/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLRN(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); Map<String, Object> lrnParams = getInnerLayerConfigFromConfig(layerConfig); LocalResponseNormalization.Builder builder = new LocalResponseNormalization.Builder().name(this.layerName) .dropOut(this.dropout).alpha((double) lrnParams.get("alpha")) .beta((double) lrnParams.get("beta")).k((int) lrnParams.get("k")).n((int) lrnParams.get("n")); this.layer = builder.build(); this.vertex = null; }
.activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3) .activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3)
.layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(2, maxPool("maxpool1", new int[]{3,3})) .layer(3, conv5x5("cnn2", 256, new int[] {1,1}, new int[] {2,2}, nonZeroBias)) .layer(4, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(5, maxPool("maxpool2", new int[]{3,3})) .layer(6,conv3x3("cnn3", 384, 0))