public static ComputationGraphConfiguration getConf() { ComputationGraphConfiguration.GraphBuilder builder = new NeuralNetConfiguration.Builder() .seed(12345) .updater(new Adam(0.01)) .weightInit(WeightInit.RELU) .graphBuilder() .addInputs("in"); String[] poolNames = new String[ngramFilters.length]; int i = 0; for (int ngram : ngramFilters) { String filterName = String.format("ngram%d", ngram); poolNames[i] = String.format("pool%d", ngram); builder = builder.addLayer(filterName, new Convolution1DLayer.Builder() .nOut(numFilters) .kernelSize(ngram) .activation(Activation.RELU) .build(), "in") .addLayer(poolNames[i], new GlobalPoolingLayer.Builder(PoolingType.MAX).build(), filterName); i++; } return builder.addVertex("concat", new MergeVertex(), poolNames) .addLayer("predict", new DenseLayer.Builder().nOut(numClasses).dropOut(dropoutRetain) .activation(Activation.SOFTMAX).build(), "concat") .addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.MCXENT).build(), "predict") .setOutputs("loss") .setInputTypes(InputType.recurrent(W2V_VECTOR_SIZE, 1000)) .build(); } }
.addLayer("globalPool", new GlobalPoolingLayer.Builder() .poolingType(globalPoolingType) .build(), "merge") .addLayer("out", new OutputLayer.Builder() .lossFunction(LossFunctions.LossFunction.MSE)
.layer(30, new ConvolutionLayer.Builder(new int[] {3, 3}).nOut(numLabels) .build()) .layer(31, new GlobalPoolingLayer.Builder(PoolingType.AVG).build()) .layer(32, new ActivationLayer.Builder().activation(Activation.SOFTMAX).build())
/** * Constructor from parsed Keras layer configuration dictionary. * * @param layerConfig dictionary containing Keras layer configuration * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasGlobalPooling(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException { super(layerConfig, enforceTrainingConfig); this.dimensions = mapPoolingDimensions(this.className); GlobalPoolingLayer.Builder builder = new GlobalPoolingLayer.Builder(mapPoolingType(this.className)).poolingDimensions(dimensions) .collapseDimensions(true).name(this.layerName).dropOut(this.dropout); this.layer = builder.build(); this.vertex = null; }
@Override public GlobalPoolingLayer getValue(double[] parameterValues) { GlobalPoolingLayer.Builder builder = new GlobalPoolingLayer.Builder(); super.setLayerOptionsBuilder(builder, parameterValues); if (poolingDimensions != null) builder.poolingDimensions(poolingDimensions.getValue(parameterValues)); if (collapseDimensions != null) builder.collapseDimensions(collapseDimensions.getValue(parameterValues)); if (poolingType != null) builder.poolingType(poolingType.getValue(parameterValues)); if (pNorm != null) builder.pnorm(pNorm.getValue(parameterValues)); return builder.build(); }
.activation(Activation.IDENTITY) .build(), "maxpooling2d_5") .addLayer("globalpooling", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "convolution2d_6") .addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX).build(), "globalpooling") .setOutputs("loss");