/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLRN(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); Map<String, Object> lrnParams = getInnerLayerConfigFromConfig(layerConfig); LocalResponseNormalization.Builder builder = new LocalResponseNormalization.Builder().name(this.layerName) .dropOut(this.dropout).alpha((double) lrnParams.get("alpha")) .beta((double) lrnParams.get("beta")).k((int) lrnParams.get("k")).n((int) lrnParams.get("n")); this.layer = builder.build(); this.vertex = null; }
protected void setLayerOptionsBuilder(LocalResponseNormalization.Builder builder, double[] values) { super.setLayerOptionsBuilder(builder, values); if (n != null) builder.n(n.getValue(values)); if (k != null) builder.k(k.getValue(values)); if (alpha != null) builder.alpha(alpha.getValue(values)); if (beta != null) builder.beta(beta.getValue(values)); }