@Override public void update(INDArray gradient, String paramType) { if (!logUpdate) { OneTimeLogger.info(log, "Frozen layers will not be updated. Warning will be issued only once per instance"); logUpdate = true; } //no op }
@Override public void update(Gradient gradient) { if (!logUpdate) { OneTimeLogger.info(log, "Frozen layers will not be updated. Warning will be issued only once per instance"); logUpdate = true; } //no op }
@Override public void setBackpropGradientsViewArray(INDArray gradients) { if (!logGradient) { OneTimeLogger.info(log, "Gradients for the frozen layer are not set and will therefore will not be updated.Warning will be issued only once per instance"); logGradient = true; } //no-op }
@Override public void fit(INDArray data) { if (!logFit) { OneTimeLogger.info(log, "Frozen layers cannot be fit.Warning will be issued only once per instance"); logFit = true; } }
@Override public Gradient error(INDArray input) { if (!logGradient) { OneTimeLogger.info(log, "Gradients for the frozen layer are not set and will therefore will not be updated.Warning will be issued only once per instance"); logGradient = true; } return zeroGradient; }
@Override public void fit() { if (!logFit) { OneTimeLogger.info(log, "Frozen layers cannot be fit. Warning will be issued only once per instance"); logFit = true; } //no op }
public void logTestMode(boolean training) { if (!training) return; if (logTestMode) { return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); logTestMode = true; } }
public void logTestMode(TrainingMode training) { if (training.equals(TrainingMode.TEST)) return; if (logTestMode) { return; } else { OneTimeLogger.info(log, "Frozen layer instance found! Frozen layers are treated as always in test mode. Warning will only be issued once per instance"); logTestMode = true; } }
@Override public void computeGradientAndScore() { if (!logGradient) { OneTimeLogger.info(log, "Gradients for the frozen layer are not set and will therefore will not be updated.Warning will be issued only once per instance"); logGradient = true; } insideLayer.score(); //no op }
@Override public Layer clone() { OneTimeLogger.info(log, "Frozen layers are cloned as their original versions."); return new FrozenLayer(insideLayer.clone()); }
@Override public Pair<Gradient, Double> gradientAndScore() { if (!logGradient) { OneTimeLogger.info(log, "Gradients for the frozen layer are not set and will therefore will not be updated.Warning will be issued only once per instance"); logGradient = true; } return new Pair<>(zeroGradient, insideLayer.score()); }
return; OneTimeLogger.info(log, "Starting ComputationGraph with WorkspaceModes set to [training: {}; inference: {}]", configuration.getTrainingWorkspaceMode(), configuration.getInferenceWorkspaceMode());
return; OneTimeLogger.info(log, "Starting MultiLayerNetwork with WorkspaceModes set to [training: {}; inference: {}]", layerWiseConfigurations.getTrainingWorkspaceMode(), layerWiseConfigurations.getInferenceWorkspaceMode());