/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLRN(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); Map<String, Object> lrnParams = getInnerLayerConfigFromConfig(layerConfig); LocalResponseNormalization.Builder builder = new LocalResponseNormalization.Builder().name(this.layerName) .dropOut(this.dropout).alpha((double) lrnParams.get("alpha")) .beta((double) lrnParams.get("beta")).k((int) lrnParams.get("k")).n((int) lrnParams.get("n")); this.layer = builder.build(); this.vertex = null; }
.layer(new ConvolutionLayer.Builder(11,11) .nIn(channels) .nOut(96) .kernelSize(3,3) .build()) .layer(new ConvolutionLayer.Builder(5,5) .nOut(256) .stride(1,1)
.activation(Activation.RELU) .build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2) .stride(2, 2) .activation(Activation.RELU).l2(0.0000005) .build()) .layer(4, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2) .stride(2, 2)
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {3, 3}, new int[] {2, 2}, new int[] {1, 1}).build(), "stem-activation1") "inception-2-cnn2") .addLayer("inception-2-activation2", new ActivationLayer.Builder().activation(Activation.RELU).build(), "inception-2-batch2")
.layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(2, maxPool("maxpool1", new int[]{3,3})) .layer(3, conv5x5("cnn2", 256, new int[] {1,1}, new int[] {2,2}, nonZeroBias)) .layer(4, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(5, maxPool("maxpool2", new int[]{3,3})) .layer(6,conv3x3("cnn3", 384, 0))
new SubsamplingLayer.Builder(new int[] {3, 3}, new int[] {2, 2}, new int[] {0, 0}).build(), "cnn1") .addLayer("lrn1", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "max1") .addLayer("cnn2", conv1x1(64, 64, 0.2), "lrn1").addLayer("cnn3", conv3x3(64, 192, 0.2), "cnn2") .addLayer("lrn2", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "cnn3")
@Override public LocalResponseNormalization getValue(double[] values) { LocalResponseNormalization.Builder b = new LocalResponseNormalization.Builder(); setLayerOptionsBuilder(b, values); return b.build(); }