if (lv.getPreProcessor() == null) { if (lv.getPreProcessor() != null) { InputPreProcessor ip = lv.getPreProcessor(); afterPreproc = ip.getOutputType(layerInput);
/** * Set parameters to selectively override existing learning parameters * Usage eg. specify a lower learning rate. This will get applied to all layers * @param fineTuneConfiguration * @return GraphBuilder */ public GraphBuilder fineTuneConfiguration(FineTuneConfiguration fineTuneConfiguration) { this.fineTuneConfiguration = fineTuneConfiguration; this.editedConfigBuilder = new ComputationGraphConfiguration.GraphBuilder(origConfig, fineTuneConfiguration.appliedNeuralNetConfigurationBuilder()); Map<String, GraphVertex> vertices = this.editedConfigBuilder.getVertices(); for (Map.Entry<String, GraphVertex> gv : vertices.entrySet()) { if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); NeuralNetConfiguration nnc = lv.getLayerConf().clone(); fineTuneConfiguration.applyToNeuralNetConfiguration(nnc); vertices.put(gv.getKey(), new LayerVertex(nnc, lv.getPreProcessor())); nnc.getLayer().setLayerName(gv.getKey()); } } return this; }
LayerVertex lv = (LayerVertex) origConfig.getVertices().get(layerName); String[] lvInputs = origConfig.getVertexInputs().get(layerName).toArray(new String[0]); editedConfigBuilder.addLayer(layerName, layerImpl, lv.getPreProcessor(), lvInputs); editedVertices.add(layerName); lv = (LayerVertex) origConfig.getVertices().get(fanoutVertexName); lvInputs = origConfig.getVertexInputs().get(fanoutVertexName).toArray(new String[0]); editedConfigBuilder.addLayer(fanoutVertexName, layerImpl, lv.getPreProcessor(), lvInputs); editedVertices.add(fanoutVertexName);