.layer(new ConvolutionLayer.Builder(11,11) .nIn(channels) .nOut(96) .activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3) .build()) .layer(new ConvolutionLayer.Builder(5,5) .nOut(256) .stride(1,1) .activation(Activation.RELU) .build()) .layer(1, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(new SubsamplingLayer.Builder(PoolingType.MAX) .kernelSize(3,3)
/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLRN(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); Map<String, Object> lrnParams = getInnerLayerConfigFromConfig(layerConfig); LocalResponseNormalization.Builder builder = new LocalResponseNormalization.Builder().name(this.layerName) .dropOut(this.dropout).alpha((double) lrnParams.get("alpha")) .beta((double) lrnParams.get("beta")).k((int) lrnParams.get("k")).n((int) lrnParams.get("n")); this.layer = builder.build(); this.vertex = null; }
new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {3, 3}, new int[] {2, 2}, new int[] {1, 1}).build(), "stem-activation1") .addLayer("stem-lrn1", new LocalResponseNormalization.Builder(1, 5, 1e-4, 0.75).build(), "stem-pool1") "inception-2-cnn2") .addLayer("inception-2-activation2", new ActivationLayer.Builder().activation(Activation.RELU).build(), "inception-2-batch2") .addLayer("inception-2-lrn1", new LocalResponseNormalization.Builder(1, 5, 1e-4, 0.75).build(), "inception-2-activation2") .addLayer("inception-2-pool1",
.activation(Activation.RELU) .build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2) .stride(2, 2) .build()) .layer(2, new LocalResponseNormalization.Builder().build()) .layer(3, new ConvolutionLayer.Builder(5, 5) .nOut(64) .activation(Activation.RELU).l2(0.0000005) .build()) .layer(4, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2) .stride(2, 2) .build()) .layer(5, new LocalResponseNormalization.Builder().build()) .layer(6, new DenseLayer.Builder().nOut(1024).dropOut(dropOut).activation(Activation.RELU).build()) .layer(7, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.layer(1, new LocalResponseNormalization.Builder().name("lrn1").build()) .layer(2, maxPool("maxpool1", new int[]{3,3})) .layer(3, conv5x5("cnn2", 256, new int[] {1,1}, new int[] {2,2}, nonZeroBias)) .layer(4, new LocalResponseNormalization.Builder().name("lrn2").build()) .layer(5, maxPool("maxpool2", new int[]{3,3})) .layer(6,conv3x3("cnn3", 384, 0))
new SubsamplingLayer.Builder(new int[] {3, 3}, new int[] {2, 2}, new int[] {0, 0}).build(), "cnn1") .addLayer("lrn1", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "max1") .addLayer("cnn2", conv1x1(64, 64, 0.2), "lrn1").addLayer("cnn3", conv3x3(64, 192, 0.2), "cnn2") .addLayer("lrn2", new LocalResponseNormalization.Builder(5, 1e-4, 0.75).build(), "cnn3") .addLayer("max2", new SubsamplingLayer.Builder(new int[] {3, 3}, new int[] {2, 2}, new int[] {0, 0}).build(), "lrn2");
@Override public LocalResponseNormalization getValue(double[] values) { LocalResponseNormalization.Builder b = new LocalResponseNormalization.Builder(); setLayerOptionsBuilder(b, values); return b.build(); }
protected void setLayerOptionsBuilder(LocalResponseNormalization.Builder builder, double[] values) { super.setLayerOptionsBuilder(builder, values); if (n != null) builder.n(n.getValue(values)); if (k != null) builder.k(k.getValue(values)); if (alpha != null) builder.alpha(alpha.getValue(values)); if (beta != null) builder.beta(beta.getValue(values)); }