@Override public ComputationGraphConfiguration deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { ComputationGraphConfiguration conf = (ComputationGraphConfiguration) defaultDeserializer.deserialize(jp, ctxt); //Updater configuration changed after 0.8.0 release //Previously: enumerations and fields. Now: classes //Here, we manually create the appropriate Updater instances, if the IUpdater field is empty List<Layer> layerList = new ArrayList<>(); Map<String, GraphVertex> vertices = conf.getVertices(); for (Map.Entry<String, GraphVertex> entry : vertices.entrySet()) { if (entry.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) entry.getValue(); layerList.add(lv.getLayerConf().getLayer()); } } Layer[] layers = layerList.toArray(new Layer[layerList.size()]); handleUpdaterBackwardCompatibility(layers); return conf; } }
/** * Set parameters to selectively override existing learning parameters * Usage eg. specify a lower learning rate. This will get applied to all layers * @param fineTuneConfiguration * @return GraphBuilder */ public GraphBuilder fineTuneConfiguration(FineTuneConfiguration fineTuneConfiguration) { this.fineTuneConfiguration = fineTuneConfiguration; this.editedConfigBuilder = new ComputationGraphConfiguration.GraphBuilder(origConfig, fineTuneConfiguration.appliedNeuralNetConfigurationBuilder()); Map<String, GraphVertex> vertices = this.editedConfigBuilder.getVertices(); for (Map.Entry<String, GraphVertex> gv : vertices.entrySet()) { if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); NeuralNetConfiguration nnc = lv.getLayerConf().clone(); fineTuneConfiguration.applyToNeuralNetConfiguration(nnc); vertices.put(gv.getKey(), new LayerVertex(nnc, lv.getPreProcessor())); nnc.getLayer().setLayerName(gv.getKey()); } } return this; }
Layer l = lv.getLayerConf().getLayer(); if (lv.getPreProcessor() == null) { lv.setPreProcessor(preproc); if (lv.getPreProcessor() != null) { InputPreProcessor ip = lv.getPreProcessor(); afterPreproc = ip.getOutputType(layerInput);
if (gv instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv; lv.setPreProcessor(entry.getValue()); } else { throw new IllegalStateException( if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); Layer l = lv.getLayerConf().getLayer(); if (l instanceof BasePretrainNetwork) lv.getLayerConf().setPretrain(pretrain);
Layer origLayerConf = currLayerVertex.getLayerConf().getLayer(); Layer newLayerConf = new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer(origLayerConf); newLayerConf.setLayerName(origLayerConf.getLayerName()); NeuralNetConfiguration newNNC = currLayerVertex.getLayerConf().clone(); currLayerVertex.setLayerConf(newNNC); currLayerVertex.getLayerConf().setLayer(newLayerConf); List<String> vars = currLayerVertex.getLayerConf().variables(true); currLayerVertex.getLayerConf().clearVariables(); for (String s : vars) { newNNC.variables(false).add(s);
@Override public GraphVertex clone() { return new LayerVertex(layerConf.clone(), (preProcessor != null ? preProcessor.clone() : null)); }
LayerVertex lv = (LayerVertex) origConfig.getVertices().get(layerName); String[] lvInputs = origConfig.getVertexInputs().get(layerName).toArray(new String[0]); editedConfigBuilder.addLayer(layerName, layerImpl, lv.getPreProcessor(), lvInputs); editedVertices.add(layerName); lv = (LayerVertex) origConfig.getVertices().get(fanoutVertexName); lvInputs = origConfig.getVertexInputs().get(fanoutVertexName).toArray(new String[0]); editedConfigBuilder.addLayer(fanoutVertexName, layerImpl, lv.getPreProcessor(), lvInputs); editedVertices.add(fanoutVertexName);
/** * Add a layer and an {@link InputPreProcessor}, with the specified name and specified inputs. * * @param layerName Name/label of the layer to add * @param layer The layer configuration * @param preProcessor The InputPreProcessor to use with this layer. * @param layerInputs Inputs to this layer (must be 1 or more). Inputs may be other layers, GraphVertex objects, * on a combination of the two. */ public GraphBuilder addLayer(String layerName, Layer layer, InputPreProcessor preProcessor, String... layerInputs) { NeuralNetConfiguration.Builder builder = globalConfiguration.clone(); builder.layer(layer); addVertex(layerName, new LayerVertex(builder.build(), preProcessor), layerInputs); layer.setLayerName(layerName); return this; }
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
LayerVertex lv = (LayerVertex) gv; if (lv.getLayerConf().getLayer() instanceof BaseLayer) { BaseLayer bl = (BaseLayer) lv.getLayerConf().getLayer(); IUpdater u = bl.getIUpdater(); if (u instanceof Sgd) { throw new IllegalStateException("When using SGD updater, must also use lr=1.0 for layer " + layerCount + "; got " + u + " with lr=" + lr + " for layer \"" + lv.getLayerConf().getLayer().getLayerName() + "\""); double dropout = lv.getLayerConf().getLayer().getDropOut(); if (lv.getLayerConf().isUseRegularization() && dropout != 0.0) { throw new IllegalStateException("Must have dropout == 0.0 for gradient checks - got dropout = " + dropout + " for layer " + layerCount);
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
if (lv.getLayerConf() != null && lv.getLayerConf().getLayer() != null) { Layer layer = lv.getLayerConf().getLayer();