if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, CDAEParamInitializer.WEIGHT_KEY);
/** * Reconstructs the visible INPUT. * A reconstruction is a propdown of the reconstructed hidden input. * @param training true or false * @return the reconstruction of the visible input */ @Override public INDArray activate(boolean training) { if (training && conf.getLayer().getDropOut() > 0.0) { Dropout.applyDropout(input, conf.getLayer().getDropOut()); } //reconstructed: propUp ----> hidden propDown to transform INDArray propUp = propUp(input, training); return propUp; }
public static void generalValidation(String layerName, Layer layer, boolean useRegularization, boolean useDropConnect, double dropOut, double l2, double l2Bias, double l1, double l1Bias, Distribution dist) { if (layer != null) { if (useDropConnect && (Double.isNaN(dropOut) && (Double.isNaN(layer.getDropOut())))) OneTimeLogger.warn(log, "Layer \"" + layerName + "\" dropConnect is set to true but dropout rate has not been added to configuration."); if (useDropConnect && layer.getDropOut() == 0.0) OneTimeLogger.warn(log, "Layer \"" + layerName + " dropConnect is set to true but dropout rate is set to 0.0"); if (layer instanceof BaseLayer) { BaseLayer bLayer = (BaseLayer) layer; configureBaseLayer(layerName, bLayer, useRegularization, useDropConnect, dropOut, l2, l2Bias, l1, l1Bias, dist); } else if (layer instanceof FrozenLayer && ((FrozenLayer) layer).getLayer() instanceof BaseLayer) { BaseLayer bLayer = (BaseLayer) ((FrozenLayer) layer).getLayer(); configureBaseLayer(layerName, bLayer, useRegularization, useDropConnect, dropOut, l2, l2Bias, l1, l1Bias, dist); } } }
public INDArray preOutput(INDArray v, boolean training) { INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } return v.mmul(W).addiRowVector(hBias); }
/** * Apply drop connect to the given variable * @param layer the layer with the variables * @param variable the variable to apply * @return the post applied drop connect */ public static INDArray applyDropConnect(Layer layer, String variable) { INDArray result = layer.getParam(variable).dup(); if (Nd4j.getRandom().getStatePointer() != null) { Nd4j.getExecutioner().exec(new DropOut(result, result, layer.conf().getLayer().getDropOut())); } else { Nd4j.getExecutioner().exec(new LegacyDropOut(result, result, layer.conf().getLayer().getDropOut())); } return result; }
protected void applyDropOutIfNecessary(boolean training) { if (layerConf().getDropOut() > 0 && !conf.isUseDropConnect() && training && !dropoutApplied) { if (Nd4j.getWorkspaceManager().checkIfWorkspaceExists(ComputationGraph.workspaceExternal)) { try (MemoryWorkspace ws = Nd4j.getWorkspaceManager() .getWorkspaceForCurrentThread(ComputationGraph.workspaceExternal) .notifyScopeBorrowed()) { input = input.isView() ? input.dup() : input.unsafeDuplication(); } } else input = input.isView() ? input.dup() : input.unsafeDuplication(); Dropout.applyDropout(input, layerConf().getDropOut()); dropoutApplied = true; } }
double dropout = n.getLayer().getDropOut(); if (n.isUseRegularization() && dropout != 0.0) { throw new IllegalStateException("Must have dropout == 0.0 for gradient checks - got dropout = "
double dropout = lv.getLayerConf().getLayer().getDropOut(); if (lv.getLayerConf().isUseRegularization() && dropout != 0.0) { throw new IllegalStateException("Must have dropout == 0.0 for gradient checks - got dropout = "
public INDArray encode(INDArray v, boolean training) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY); if (training && conf.isUseDropConnect() && conf.getLayer().getDropOut() > 0) { W = Dropout.applyDropConnect(this, PretrainParamInitializer.WEIGHT_KEY); } INDArray hBias = getParam(PretrainParamInitializer.BIAS_KEY); INDArray preAct = v.mmul(W).addiRowVector(hBias); //INDArray ret = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(conf.getLayer().getActivationFunction(), preAct)); INDArray ret = layerConf().getActivationFn().getActivation(preAct, training); return ret; }
private void copyConfigToLayer(String layerName, Layer layer) { if (Double.isNaN(layer.getDropOut())) layer.setDropOut(dropOut);
@Override public INDArray activate(boolean training) { if (training && conf.getLayer().getDropOut() > 0) { Dropout.applyDropout(input, conf.getLayer().getDropOut());
INDArray weights = getParam(ConvolutionParamInitializer.WEIGHT_KEY); INDArray bias = getParam(ConvolutionParamInitializer.BIAS_KEY); if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { weights = Dropout.applyDropConnect(this, ConvolutionParamInitializer.WEIGHT_KEY);
if (conf.isUseDropConnect() && training && conf.getLayer().getDropOut() > 0) { inputWeights = Dropout.applyDropConnect(layer, inputWeightKey);