@Override public LocalResponseNormalization clone() { LocalResponseNormalization clone = (LocalResponseNormalization) super.clone(); return clone; }
@Override public ActivationLayer clone() { ActivationLayer clone = (ActivationLayer) super.clone(); return clone; }
@Override public BaseLayer clone() { BaseLayer clone = (BaseLayer) super.clone(); if (clone.dist != null) clone.dist = clone.dist.clone(); if (clone.learningRateSchedule != null) clone.learningRateSchedule = new HashMap<>(clone.learningRateSchedule); if (clone.momentumSchedule != null) clone.momentumSchedule = new HashMap<>(clone.momentumSchedule); return clone; }
@Override public Builder clone() { try { Builder clone = (Builder) super.clone(); if (clone.layer != null) clone.layer = clone.layer.clone(); if (clone.stepFunction != null) clone.stepFunction = clone.stepFunction.clone(); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } }
/** * Creates and returns a deep copy of the configuration. */ @Override public NeuralNetConfiguration clone() { try { NeuralNetConfiguration clone = (NeuralNetConfiguration) super.clone(); if (clone.layer != null) clone.layer = clone.layer.clone(); if (clone.stepFunction != null) clone.stepFunction = clone.stepFunction.clone(); if (clone.variables != null) clone.variables = new ArrayList<>(clone.variables); if (clone.learningRateByParam != null) clone.learningRateByParam = new HashMap<>(clone.learningRateByParam); if (clone.l1ByParam != null) clone.l1ByParam = new HashMap<>(clone.l1ByParam); if (clone.l2ByParam != null) clone.l2ByParam = new HashMap<>(clone.l2ByParam); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } }
@Override public SubsamplingLayer clone() { SubsamplingLayer clone = (SubsamplingLayer) super.clone(); if (clone.kernelSize != null) clone.kernelSize = clone.kernelSize.clone(); if (clone.stride != null) clone.stride = clone.stride.clone(); if (clone.padding != null) clone.padding = clone.padding.clone(); return clone; }
/** * Default layer setup: Create sequential layer network defined by the order of the layer list * * @param gb GraphBuilder object */ protected void makeDefaultLayerSetup(GraphBuilder gb) { String currentInput = "input"; gb.addInputs(currentInput); // Collect layers for (Layer layer : layers) { String lName = layer.getLayerName(); gb.addLayer(lName, layer.getBackend().clone(), currentInput); currentInput = lName; } gb.setOutputs(currentInput); }
gb.addLayer(lName, layers[idx].getBackend().clone(), currentInput); currentInput = lName;
layers.add(l.clone());
gb.addLayer(lName, layer.getBackend().clone(), currentInput); currentInput = lName;