public static ComputationGraphConfiguration getConf() { ComputationGraphConfiguration.GraphBuilder builder = new NeuralNetConfiguration.Builder() .seed(12345) .updater(new Adam(0.01)) .weightInit(WeightInit.RELU) .graphBuilder() .addInputs("in"); String[] poolNames = new String[ngramFilters.length]; int i = 0; for (int ngram : ngramFilters) { String filterName = String.format("ngram%d", ngram); poolNames[i] = String.format("pool%d", ngram); builder = builder.addLayer(filterName, new Convolution1DLayer.Builder() .nOut(numFilters) .kernelSize(ngram) .activation(Activation.RELU) .build(), "in") .addLayer(poolNames[i], new GlobalPoolingLayer.Builder(PoolingType.MAX).build(), filterName); i++; } return builder.addVertex("concat", new MergeVertex(), poolNames) .addLayer("predict", new DenseLayer.Builder().nOut(numClasses).dropOut(dropoutRetain) .activation(Activation.SOFTMAX).build(), "concat") .addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.MCXENT).build(), "predict") .setOutputs("loss") .setInputTypes(InputType.recurrent(W2V_VECTOR_SIZE, 1000)) .build(); } }
/** * Constructor from layer name and input shape. * * @param layerName layer name * @param inboundLayerName name of inbound layer * @param kerasLoss name of Keras loss function * @param enforceTrainingConfig whether to enforce training-related configuration options * @throws InvalidKerasConfigurationException * @throws UnsupportedKerasConfigurationException */ public KerasLoss(String layerName, String inboundLayerName, String kerasLoss, boolean enforceTrainingConfig) throws UnsupportedKerasConfigurationException { this.className = KERAS_CLASS_NAME_LOSS; this.layerName = layerName; this.inputShape = null; this.dimOrder = DimOrder.NONE; this.inboundLayerNames = new ArrayList<String>(); this.inboundLayerNames.add(inboundLayerName); LossFunctions.LossFunction loss; try { loss = mapLossFunction(kerasLoss); } catch (UnsupportedKerasConfigurationException e) { if (enforceTrainingConfig) throw e; log.warn("Unsupported Keras loss function. Replacing with MSE."); loss = LossFunctions.LossFunction.SQUARED_LOSS; } this.layer = new LossLayer.Builder(loss).name(layerName).build(); }
.activation(Activation.IDENTITY) .build(), "maxpooling2d_5") .addLayer("globalpooling", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "convolution2d_6") .addLayer("loss", new LossLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX).build(), "globalpooling") .setOutputs("loss");