Tabnine Logo
ActivationLayer$Builder.build
Code IndexAdd Tabnine to your IDE (free)

How to use
build
method
in
org.deeplearning4j.nn.conf.layers.ActivationLayer$Builder

Best Java code snippets using org.deeplearning4j.nn.conf.layers.ActivationLayer$Builder.build (Showing top 11 results out of 315)

origin: org.deeplearning4j/deeplearning4j-zoo

          getModuleName(moduleLayerName) + "-cnn1-" + i);
  graph.addLayer(getModuleName(moduleLayerName) + "-transfer1-" + i,
          new ActivationLayer.Builder().activation(transferFunction).build(),
          getModuleName(moduleLayerName) + "-batch1-" + i);
  graph.addLayer(getModuleName(moduleLayerName) + "-reduce1-" + i,
          getModuleName(moduleLayerName) + "-reduce1-" + i);
  graph.addLayer(getModuleName(moduleLayerName) + "-transfer2-" + i,
          new ActivationLayer.Builder().activation(transferFunction).build(),
          getModuleName(moduleLayerName) + "-batch2-" + i);
          getModuleName(moduleLayerName) + "-cnn2");
  graph.addLayer(getModuleName(moduleLayerName) + "-transfer3",
          new ActivationLayer.Builder().activation(transferFunction).build(),
          getModuleName(moduleLayerName) + "-batch3");
} catch (IndexOutOfBoundsException e) {
          getModuleName(moduleLayerName) + "-reduce2");
  graph.addLayer(getModuleName(moduleLayerName) + "-transfer4",
          new ActivationLayer.Builder().activation(transferFunction).build(),
          getModuleName(moduleLayerName) + "-batch4");
} catch (IndexOutOfBoundsException e) {
origin: org.deeplearning4j/deeplearning4j-zoo

.addLayer("stem-batch1", new BatchNormalization.Builder(false).nIn(64).nOut(64).build(),
        "stem-cnn1")
.addLayer("stem-activation1", new ActivationLayer.Builder().activation(Activation.RELU).build(),
        "stem-batch1")
        "inception-2-cnn1")
.addLayer("inception-2-activation1",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        "inception-2-batch1")
.addLayer("inception-2-cnn2",
        "inception-2-cnn2")
.addLayer("inception-2-activation2",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        "inception-2-batch2")
"inception-3b")
.addLayer("3c-1x1-norm", FaceNetHelper.batchNorm(128, 128), "3c-1x1")
.addLayer("3c-transfer1", new ActivationLayer.Builder().activation(transferFunction).build(),
        "3c-1x1-norm")
.addLayer("3c-3x3",
        "3c-transfer1")
.addLayer("3c-3x3-norm", FaceNetHelper.batchNorm(256, 256), "3c-3x3")
.addLayer("3c-transfer2", new ActivationLayer.Builder().activation(transferFunction).build(),
        "3c-3x3-norm")
        "inception-3b")
.addLayer("3c-2-1x1-norm", FaceNetHelper.batchNorm(32, 32), "3c-2-1x1")
origin: org.deeplearning4j/deeplearning4j-zoo

                new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                previousBlock)
        .addVertex(nameLayer(blockName, "shortcut", i),
  graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
else
  graph.addLayer(nameLayer(blockName, "activation", i),
          new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
origin: org.deeplearning4j/deeplearning4j-zoo

      new ActivationLayer.Builder().activation(Activation.TANH).build(), input);
                new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                previousBlock)
        .addVertex(nameLayer(blockName, "shortcut", i),
  graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
else
  graph.addLayer(nameLayer(blockName, "activation", i),
          new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
origin: org.deeplearning4j/deeplearning4j-zoo

                new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                previousBlock)
        .addVertex(nameLayer(blockName, "shortcut", i),
  graph.addLayer(blockName, new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
else
  graph.addLayer(nameLayer(blockName, "activation", i),
          new ActivationLayer.Builder().activation(Activation.TANH).build(),
          nameLayer(blockName, "shortcut", i));
origin: org.deeplearning4j/deeplearning4j-zoo

        .build())
.layer(3, new BatchNormalization.Builder().build())
.layer(4, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(5, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG,
        new int[] {2, 2}).build())
.layer(9, new ConvolutionLayer.Builder(new int[] {5, 5}).nOut(32).build())
.layer(10, new BatchNormalization.Builder().build())
.layer(11, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(12, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG,
        new int[] {2, 2}).build())
.layer(16, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(64).build())
.layer(17, new BatchNormalization.Builder().build())
.layer(18, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(19, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG,
        new int[] {2, 2}).build())
.layer(23, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(128).build())
.layer(24, new BatchNormalization.Builder().build())
.layer(25, new ActivationLayer.Builder().activation(Activation.RELU).build())
.layer(26, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG,
        new int[] {2, 2}).build())
        .build())
.layer(31, new GlobalPoolingLayer.Builder(PoolingType.AVG).build())
.layer(32, new ActivationLayer.Builder().activation(Activation.SOFTMAX).build())
origin: org.deeplearning4j/deeplearning4j-zoo

        "stem-zero")
.addLayer("stem-batch1", new BatchNormalization(), "stem-cnn1")
.addLayer("stem-act1", new ActivationLayer.Builder().activation(Activation.RELU).build(),
        "stem-batch1")
.addLayer("stem-maxpool1", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX,
origin: org.deeplearning4j/deeplearning4j-zoo

.addLayer(batchName + "2a", new BatchNormalization(), convName + "2a")
.addLayer(activationName + "2a",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        batchName + "2a")
.addLayer(batchName + "2b", new BatchNormalization(), convName + "2b")
.addLayer(activationName + "2b",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        batchName + "2b")
.addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(),
        shortcutName);
origin: org.deeplearning4j/deeplearning4j-zoo

.addLayer(batchName + "2a", new BatchNormalization(), convName + "2a")
.addLayer(activationName + "2a",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        batchName + "2a")
.addLayer(batchName + "2b", new BatchNormalization(), convName + "2b")
.addLayer(activationName + "2b",
        new ActivationLayer.Builder().activation(Activation.RELU).build(),
        batchName + "2b")
.addLayer(convName, new ActivationLayer.Builder().activation(Activation.RELU).build(),
        shortcutName);
origin: org.deeplearning4j/deeplearning4j-modelimport

/**
 * Constructor from parsed Keras layer configuration dictionary.
 *
 * @param layerConfig               dictionary containing Keras layer configuration
 * @param enforceTrainingConfig     whether to enforce training-related configuration options
 * @throws InvalidKerasConfigurationException
 * @throws UnsupportedKerasConfigurationException
 */
public KerasActivation(Map<String, Object> layerConfig, boolean enforceTrainingConfig)
        throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
  super(layerConfig, enforceTrainingConfig);
  this.layer = new ActivationLayer.Builder().name(this.layerName).activation(getActivationFromConfig(layerConfig))
          .build();
}
origin: org.deeplearning4j/arbiter-deeplearning4j

@Override
public ActivationLayer getValue(double[] parameterValues) {
  ActivationLayer.Builder b = new ActivationLayer.Builder();
  super.setLayerOptionsBuilder(b, parameterValues);
  b.activation(activationFunction.getValue(parameterValues));
  return b.build();
}
org.deeplearning4j.nn.conf.layersActivationLayer$Builderbuild

Popular methods of ActivationLayer$Builder

  • activation
  • <init>
  • name

Popular in Java

  • Parsing JSON documents to java classes using gson
  • setScale (BigDecimal)
  • notifyDataSetChanged (ArrayAdapter)
  • compareTo (BigDecimal)
  • Proxy (java.net)
    This class represents proxy server settings. A created instance of Proxy stores a type and an addres
  • Path (java.nio.file)
  • LinkedHashMap (java.util)
    LinkedHashMap is an implementation of Map that guarantees iteration order. All optional operations a
  • Pattern (java.util.regex)
    Patterns are compiled regular expressions. In many cases, convenience methods such as String#matches
  • XPath (javax.xml.xpath)
    XPath provides access to the XPath evaluation environment and expressions. Evaluation of XPath Expr
  • Runner (org.openjdk.jmh.runner)
  • 14 Best Plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now