/** * Layer activation function. * Typical values include:<br> * "relu" (rectified linear), "tanh", "sigmoid", "softmax", * "hardtanh", "leakyrelu", "maxout", "softsign", "softplus" * @deprecated Use {@link #activation(Activation)} or {@link @activation(IActivation)} */ @Deprecated public Builder activation(String activationFunction) { return activation(Activation.fromString(activationFunction)); }
getModuleName(moduleLayerName) + "-cnn1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-transfer1-" + i, new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-reduce1-" + i, getModuleName(moduleLayerName) + "-reduce1-" + i); graph.addLayer(getModuleName(moduleLayerName) + "-transfer2-" + i, new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch2-" + i); getModuleName(moduleLayerName) + "-cnn2"); graph.addLayer(getModuleName(moduleLayerName) + "-transfer3", new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch3"); } catch (IndexOutOfBoundsException e) { getModuleName(moduleLayerName) + "-reduce2"); graph.addLayer(getModuleName(moduleLayerName) + "-transfer4", new ActivationLayer.Builder().activation(transferFunction).build(), getModuleName(moduleLayerName) + "-batch4"); } catch (IndexOutOfBoundsException e) {
.addLayer("stem-batch1", new BatchNormalization.Builder(false).nIn(64).nOut(64).build(), "stem-cnn1") .addLayer("stem-activation1", new ActivationLayer.Builder().activation(Activation.RELU).build(), "stem-batch1") "inception-2-cnn1") .addLayer("inception-2-activation1", new ActivationLayer.Builder().activation(Activation.RELU).build(), "inception-2-batch1") .addLayer("inception-2-cnn2", "inception-2-cnn2") .addLayer("inception-2-activation2", new ActivationLayer.Builder().activation(Activation.RELU).build(), "inception-2-batch2") "inception-3b") .addLayer("3c-1x1-norm", FaceNetHelper.batchNorm(128, 128), "3c-1x1") .addLayer("3c-transfer1", new ActivationLayer.Builder().activation(transferFunction).build(), "3c-1x1-norm") .addLayer("3c-3x3", "3c-transfer1") .addLayer("3c-3x3-norm", FaceNetHelper.batchNorm(256, 256), "3c-3x3") .addLayer("3c-transfer2", new ActivationLayer.Builder().activation(transferFunction).build(), "3c-3x3-norm") "inception-3b") .addLayer("3c-2-1x1-norm", FaceNetHelper.batchNorm(32, 32), "3c-2-1x1")
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
new ActivationLayer.Builder().activation(Activation.TANH).build(), input); new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), previousBlock) .addVertex(nameLayer(blockName, "shortcut", i), graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i)); else graph.addLayer(nameLayer(blockName, "activation", i), new ActivationLayer.Builder().activation(Activation.TANH).build(), nameLayer(blockName, "shortcut", i));
.build()) .layer(3, new BatchNormalization.Builder().build()) .layer(4, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(5, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[] {2, 2}).build()) .layer(9, new ConvolutionLayer.Builder(new int[] {5, 5}).nOut(32).build()) .layer(10, new BatchNormalization.Builder().build()) .layer(11, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(12, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[] {2, 2}).build()) .layer(16, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(64).build()) .layer(17, new BatchNormalization.Builder().build()) .layer(18, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(19, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[] {2, 2}).build()) .layer(23, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(128).build()) .layer(24, new BatchNormalization.Builder().build()) .layer(25, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(26, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[] {2, 2}).build()) .build()) .layer(31, new GlobalPoolingLayer.Builder(PoolingType.AVG).build()) .layer(32, new ActivationLayer.Builder().activation(Activation.SOFTMAX).build())
"stem-zero") .addLayer("stem-batch1", new BatchNormalization(), "stem-cnn1") .addLayer("stem-act1", new ActivationLayer.Builder().activation(Activation.RELU).build(), "stem-batch1") .addLayer("stem-maxpool1", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX,
.addLayer(batchName + "2a", new BatchNormalization(), convName + "2a") .addLayer(activationName + "2a", new ActivationLayer.Builder().activation(Activation.RELU).build(), batchName + "2a") .addLayer(batchName + "2b", new BatchNormalization(), convName + "2b") .addLayer(activationName + "2b", new ActivationLayer.Builder().activation(Activation.RELU).build(), batchName + "2b") .addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(), shortcutName);
.addLayer(batchName + "2a", new BatchNormalization(), convName + "2a") .addLayer(activationName + "2a", new ActivationLayer.Builder().activation(Activation.RELU).build(), batchName + "2a") .addLayer(batchName + "2b", new BatchNormalization(), convName + "2b") .addLayer(activationName + "2b", new ActivationLayer.Builder().activation(Activation.RELU).build(), batchName + "2b") .addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(), shortcutName);
/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasActivation(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); this.layer = new ActivationLayer.Builder().name(this.layerName).activation(getActivationFromConfig(layerConfig)) .build(); }
public Builder activation(Activation activation) { return activation(activation.getActivationFunction()); }
@Override public ActivationLayer getValue(double[] parameterValues) { ActivationLayer.Builder b = new ActivationLayer.Builder(); super.setLayerOptionsBuilder(b, parameterValues); b.activation(activationFunction.getValue(parameterValues)); return b.build(); }