@Override public ComputationGraphConfiguration deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { ComputationGraphConfiguration conf = (ComputationGraphConfiguration) defaultDeserializer.deserialize(jp, ctxt); //Updater configuration changed after 0.8.0 release //Previously: enumerations and fields. Now: classes //Here, we manually create the appropriate Updater instances, if the IUpdater field is empty List<Layer> layerList = new ArrayList<>(); Map<String, GraphVertex> vertices = conf.getVertices(); for (Map.Entry<String, GraphVertex> entry : vertices.entrySet()) { if (entry.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) entry.getValue(); layerList.add(lv.getLayerConf().getLayer()); } } Layer[] layers = layerList.toArray(new Layer[layerList.size()]); handleUpdaterBackwardCompatibility(layers); return conf; } }
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
NeuralNetConfiguration c = ((LayerVertex) gv).getLayerConf(); Layer layer = c.getLayer();
if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); Layer l = lv.getLayerConf().getLayer(); if (l instanceof BasePretrainNetwork) lv.getLayerConf().setPretrain(pretrain);
Layer l = lv.getLayerConf().getLayer();
/** * Set parameters to selectively override existing learning parameters * Usage eg. specify a lower learning rate. This will get applied to all layers * @param fineTuneConfiguration * @return GraphBuilder */ public GraphBuilder fineTuneConfiguration(FineTuneConfiguration fineTuneConfiguration) { this.fineTuneConfiguration = fineTuneConfiguration; this.editedConfigBuilder = new ComputationGraphConfiguration.GraphBuilder(origConfig, fineTuneConfiguration.appliedNeuralNetConfigurationBuilder()); Map<String, GraphVertex> vertices = this.editedConfigBuilder.getVertices(); for (Map.Entry<String, GraphVertex> gv : vertices.entrySet()) { if (gv.getValue() instanceof LayerVertex) { LayerVertex lv = (LayerVertex) gv.getValue(); NeuralNetConfiguration nnc = lv.getLayerConf().clone(); fineTuneConfiguration.applyToNeuralNetConfiguration(nnc); vertices.put(gv.getKey(), new LayerVertex(nnc, lv.getPreProcessor())); nnc.getLayer().setLayerName(gv.getKey()); } } return this; }
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
LayerVertex lv = (LayerVertex) gv; if (lv.getLayerConf().getLayer() instanceof BaseLayer) { BaseLayer bl = (BaseLayer) lv.getLayerConf().getLayer(); IUpdater u = bl.getIUpdater(); if (u instanceof Sgd) { throw new IllegalStateException("When using SGD updater, must also use lr=1.0 for layer " + layerCount + "; got " + u + " with lr=" + lr + " for layer \"" + lv.getLayerConf().getLayer().getLayerName() + "\""); double dropout = lv.getLayerConf().getLayer().getDropOut(); if (lv.getLayerConf().isUseRegularization() && dropout != 0.0) { throw new IllegalStateException("Must have dropout == 0.0 for gradient checks - got dropout = " + dropout + " for layer " + layerCount);
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) { LayerVertex lv = (LayerVertex) vertices.get(vertexName); nnc = lv.getLayerConf(); layer = nnc.getLayer(); } else if (conf.getNetworkInputs().contains(vertexName)) {
if (lv.getLayerConf() != null && lv.getLayerConf().getLayer() != null) { Layer layer = lv.getLayerConf().getLayer();
Layer origLayerConf = currLayerVertex.getLayerConf().getLayer(); Layer newLayerConf = new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer(origLayerConf); newLayerConf.setLayerName(origLayerConf.getLayerName()); NeuralNetConfiguration newNNC = currLayerVertex.getLayerConf().clone(); currLayerVertex.setLayerConf(newNNC); currLayerVertex.getLayerConf().setLayer(newLayerConf); List<String> vars = currLayerVertex.getLayerConf().variables(true); currLayerVertex.getLayerConf().clearVariables(); for (String s : vars) { newNNC.variables(false).add(s);