getModuleName(moduleLayerName) + "-cnn1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-transfer1-" + i, new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-reduce1-" + i, getModuleName(moduleLayerName) + "-reduce1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-transfer2-" + i, new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch2-" + i); getModuleName(moduleLayerName) + "-cnn2"); graph.addLayer(getModuleName(moduleLayerName) + "-transfer3", new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch3"); } catch (IndexOutOfBoundsException e) { getModuleName(moduleLayerName) + "-reduce2"); graph.addLayer(getModuleName(moduleLayerName) + "-transfer4", new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch4"); } catch (IndexOutOfBoundsException e) {
.build(), "input1") .addLayer("stem-batch1", new BatchNormalization.Builder(false).nIn(64).nOut(64).build(), "stem-cnn1") .addLayer("stem-activation1", new ActivationLayer.Builder().activation(Activation.RELU).build(), .cudnnAlgoMode(ConvolutionLayer.AlgoMode.NO_WORKSPACE).build(), "stem-lrn1") .addLayer("inception-2-batch1", new BatchNormalization.Builder(false).nIn(64).nOut(64).build(), "inception-2-cnn1") .addLayer("inception-2-activation1", "inception-2-activation1") .addLayer("inception-2-batch2", new BatchNormalization.Builder(false).nIn(192).nOut(192).build(), "inception-2-cnn2") .addLayer("inception-2-activation2", new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(320) .nOut(32).cudnnAlgoMode(ConvolutionLayer.AlgoMode.NO_WORKSPACE) .build(), new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(640) .nOut(64).cudnnAlgoMode(ConvolutionLayer.AlgoMode.NO_WORKSPACE) .build(), new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(1024) .nOut(96).cudnnAlgoMode(ConvolutionLayer.AlgoMode.NO_WORKSPACE) .build(),
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
new ActivationLayer.Builder().activation(Activation.TANH).build(), input); new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
.layer(2, new ConvolutionLayer.Builder(new int[] {7, 7}).nIn(16).nOut(16) .build()) .layer(3, new BatchNormalization.Builder().build()) .layer(4, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(5, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, .layer(8, new BatchNormalization.Builder().build()) .layer(9, new ConvolutionLayer.Builder(new int[] {5, 5}).nOut(32).build()) .layer(10, new BatchNormalization.Builder().build()) .layer(11, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(12, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, .layer(15, new BatchNormalization.Builder().build()) .layer(16, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(64).build()) .layer(17, new BatchNormalization.Builder().build()) .layer(18, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(19, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, .layer(22, new BatchNormalization.Builder().build()) .layer(23, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(128).build()) .layer(24, new BatchNormalization.Builder().build()) .layer(25, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(26, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, .layer(30, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(numLabels) .build()) .layer(31, new GlobalPoolingLayer.Builder(PoolingType.AVG).build()) .layer(32, new ActivationLayer.Builder().activation(Activation.SOFTMAX).build())
batchName + "2a") .addLayer(convName + "2b", new ConvolutionLayer.Builder(kernelSize).nOut(filters[1]) .cudnnAlgoMode(cudnnAlgoMode).convolutionMode(ConvolutionMode.Same).build(), activationName + "2a") new ConvolutionLayer.Builder(new int[] {1, 1}).nOut(filters[2]) .cudnnAlgoMode(cudnnAlgoMode).build(), activationName + "2b") .addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(), shortcutName);
new ConvolutionLayer.Builder(kernelSize).nOut(filters[1]) .convolutionMode(ConvolutionMode.Same).build(), activationName + "2a") .addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(), shortcutName);
/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasActivation(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); this.layer = new ActivationLayer.Builder().name(this.layerName).activation(getActivationFromConfig(layerConfig)) .build(); }
@Override public ActivationLayer getValue(double[] parameterValues) { ActivationLayer.Builder b = new ActivationLayer.Builder(); super.setLayerOptionsBuilder(b, parameterValues); b.activation(activationFunction.getValue(parameterValues)); return b.build(); }