public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { Map<String, INDArray> out = super.getGradientsFromFlattened(conf, gradientView); FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); int nWeightParams = nIn * nOut; int nUserWeightParams = numUsers * nOut; INDArray userWeightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nUserWeightParams)) .reshape('f', numUsers, nOut); out.put(USER_WEIGHT_KEY, userWeightGradientView); return out; } }
public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { Map<String, INDArray> params = super.init(conf, paramsView, initializeParams); FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); int nWeightParams = nIn * nOut; int nUserWeightParams = numUsers * nOut; INDArray userWeightView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nUserWeightParams)); params.put(USER_WEIGHT_KEY, this.createUserWeightMatrix(conf, userWeightView, initializeParams)); conf.addVariable(USER_WEIGHT_KEY); return params; }
@Override public int numParams(Layer l) { FeedForwardLayer layerConf = (FeedForwardLayer) l; int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); return nIn * nOut + nOut; //weights + bias }
@Override public int numParams(NeuralNetConfiguration conf) { org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); // also equal to numClasses return nIn * nOut + nOut + nIn * nOut; //weights + bias + embeddings }
map.put("Input size", String.valueOf(layer1.getNIn())); map.put("Output size", String.valueOf(layer1.getNOut())); map.put("Num Parameters", String.valueOf(layer1.initializer().numParams(c)));
map.put("Input size", String.valueOf(layer1.getNIn())); map.put("Output size", String.valueOf(layer1.getNOut())); map.put("Num Parameters", String.valueOf(layer1.initializer().numParams(c)));
map.put("Input size", String.valueOf(layer1.getNIn())); map.put("Output size", String.valueOf(layer1.getNOut())); map.put("Num Parameters", String.valueOf(layer1.initializer().numParams(c)));
protected INDArray createWeightMatrix(NeuralNetConfiguration conf, INDArray weightParamView, boolean initializeParameters) { org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer(); if (initializeParameters) { Distribution dist = Distributions.createDistribution(layerConf.getDist()); return createWeightMatrix(layerConf.getNIn(), layerConf.getNOut(), layerConf.getWeightInit(), dist, weightParamView, true); } else { return createWeightMatrix(layerConf.getNIn(), layerConf.getNOut(), null, null, weightParamView, false); } }
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); int nWeightParams = nIn * nOut; INDArray weightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nWeightParams)) .reshape('f', nIn, nOut); INDArray biasView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams, nWeightParams + nOut)); //Already a row vector Map<String, INDArray> out = new LinkedHashMap<>(); out.put(WEIGHT_KEY, weightGradientView); out.put(BIAS_KEY, biasView); return out; }
if (currentLayer.numParams() > 0) { paramShape = ""; in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn()); out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut()); Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet();
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { Map<String, INDArray> out = super.getGradientsFromFlattened(conf, gradientView); org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); int nWeightParams = nIn * nOut; INDArray vBiasView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nIn)); out.put(VISIBLE_BIAS_KEY, vBiasView); return out; } }
org.deeplearning4j.nn.conf.layers.FeedForwardLayer layer1 = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) layer.conf().getLayer(); mainLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); subLine.append(info.getLayerType()); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else {
if (currentLayer.numParams() > 0) { paramShape = ""; in = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNIn()); out = String.valueOf(((FeedForwardLayer) currentLayer.conf().getLayer()).getNOut()); Set<String> paraNames = currentLayer.conf().getLearningRateByParam().keySet();
org.deeplearning4j.nn.conf.layers.FeedForwardLayer layer1 = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) layer.conf().getLayer(); mainLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); subLine.append(info.getLayerType()); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else {
@Override public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { if (!(conf.getLayer() instanceof org.deeplearning4j.nn.conf.layers.FeedForwardLayer)) throw new IllegalArgumentException("unsupported layer type: " + conf.getLayer().getClass().getName()); Map<String, INDArray> params = Collections.synchronizedMap(new LinkedHashMap<String, INDArray>()); int length = numParams(conf); if (paramsView.length() != length) throw new IllegalStateException( "Expected params view of length " + length + ", got length " + paramsView.length()); org.deeplearning4j.nn.conf.layers.FeedForwardLayer layerConf = (org.deeplearning4j.nn.conf.layers.FeedForwardLayer) conf.getLayer(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); int nWeightParams = nIn * nOut; INDArray weightView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nWeightParams)); INDArray biasView = paramsView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams, nWeightParams + nOut)); params.put(WEIGHT_KEY, createWeightMatrix(conf, weightView, initializeParams)); params.put(BIAS_KEY, createBias(conf, biasView, initializeParams)); conf.addVariable(WEIGHT_KEY); conf.addVariable(BIAS_KEY); return params; }
FeedForwardLayer ffl = (FeedForwardLayer) layer; layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerNIn"), String.valueOf(ffl.getNIn())}); layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerSize"), String.valueOf(ffl.getNOut())});
FeedForwardLayer ffl = (FeedForwardLayer) layer; layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerNIn"), String.valueOf(ffl.getNIn())}); layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerSize"), String.valueOf(ffl.getNOut())});
int nIn = ffl.getNIn(); if (nIn > 0) { inputType = InputType.feedForward(nIn);
FeedForwardLayer ffl = (FeedForwardLayer) layer; layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerNIn"), String.valueOf(ffl.getNIn())}); layerInfoRows.add(new String[] {i18N.getMessage("train.model.layerinfotable.layerSize"), String.valueOf(ffl.getNOut())});
(org.deeplearning4j.nn.conf.layers.FeedForwardLayer) clone.getLayer(); int nIn = clonedLayerConf.getNOut(); int nOut = clonedLayerConf.getNIn(); clonedLayerConf.setNIn(nIn); clonedLayerConf.setNOut(nOut);