(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); if (initializeParams) { Distribution dist = Distributions.createDistribution(layerConf.getDist()); int[] kernel = layerConf.getKernelSize(); int[] stride = layerConf.getStride(); int inputDepth = layerConf.getNIn(); int outputDepth = layerConf.getNOut(); return WeightInitUtil.initWeights(fanIn, fanOut, weightsShape, layerConf.getWeightInit(), dist, 'c', weightView); } else { int[] kernel = layerConf.getKernelSize(); return WeightInitUtil.reshapeWeights( new int[] {layerConf.getNOut(), layerConf.getNIn(), kernel[0], kernel[1]}, weightView, 'c');
@OptionMetadata( displayName = "number of rows in padding", description = "The number of rows in the padding (default = 0).", commandLineParamName = "paddingRows", commandLineParamSynopsis = "-paddingRows <int>", displayOrder = 8 ) public int getPaddingRows() { return backend.getPadding()[0]; }
@Override public void initializeBackend() { backend = new org.deeplearning4j.nn.conf.layers.ConvolutionLayer(); }
@Override public int numParams(Layer l) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) l; int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); return nIn * nOut * kernel[0] * kernel[1] + nOut; }
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer; map.put("Kernel size", Arrays.toString(layer1.getKernelSize())); map.put("Stride", Arrays.toString(layer1.getStride())); map.put("Padding", Arrays.toString(layer1.getPadding())); } else if (layer instanceof SubsamplingLayer) { SubsamplingLayer layer1 = (SubsamplingLayer) layer;
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer.conf().getLayer(); mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>"); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else if (layer.conf().getLayer() instanceof SubsamplingLayer) { SubsamplingLayer layer1 = (SubsamplingLayer) layer.conf().getLayer();
int kW = weights.size(3); int[] kernel = layerConf().getKernelSize(); int[] strides = layerConf().getStride(); int[] pad; int[] outSize; pad = layerConf().getPadding(); IActivation afn = layerConf().getActivationFn(); biasGradView, weightGradView, afn, layerConf().getCudnnAlgoMode(), layerConf().getCudnnBwdFilterAlgo(), layerConf().getCudnnBwdDataAlgo(), convolutionMode); if (ret != null) { return ret;
int kW = weights.size(3); int[] kernel = layerConf().getKernelSize(); int[] strides = layerConf().getStride(); strides); } else { pad = layerConf().getPadding(); INDArray ret = helper.preOutput(input, weights, bias, kernel, strides, pad, layerConf().getCudnnAlgoMode(), layerConf().getCudnnFwdAlgo(), convolutionMode); if (ret != null) { return new Pair<>(ret, null);
@OptionMetadata( displayName = "number of rows in kernel", description = "The number of rows in the kernel (default = 5).", commandLineParamName = "rows", commandLineParamSynopsis = "-rows <int>", displayOrder = 4 ) public int getKernelSizeX() { return backend.getKernelSize()[0]; }
@OptionMetadata( displayName = "number of rows in stride", description = "The stride along the rows (default = 1).", commandLineParamName = "strideRows", commandLineParamSynopsis = "-strideRows <int>", displayOrder = 6 ) public int getStrideRows() { return backend.getStride()[0]; }
@Override public Layer instantiate(NeuralNetConfiguration conf, Collection<IterationListener> iterationListeners, int layerIndex, INDArray layerParamsView, boolean initializeParams) { LayerValidation.assertNInNOutSet("ConvolutionLayer", getLayerName(), layerIndex, getNIn(), getNOut()); org.deeplearning4j.nn.layers.convolution.ConvolutionLayer ret = new org.deeplearning4j.nn.layers.convolution.ConvolutionLayer(conf); ret.setListeners(iterationListeners); ret.setIndex(layerIndex); ret.setParamsViewArray(layerParamsView); Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams); ret.setParamTable(paramTable); ret.setConf(conf); return ret; }
/** * @param conf the configuration to get * the number of kernels from * @return the number of kernels/filters to apply */ public static int numFeatureMap(NeuralNetConfiguration conf) { return ((org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer()).getNOut(); }
private void configureLayer(Layer layer) { String layerName; if (layer == null || layer.getLayerName() == null) layerName = "Layer not named"; else layerName = layer.getLayerName(); learningRateValidation(layerName); if (layer != null) { copyConfigToLayer(layerName, layer); } if (layer instanceof FrozenLayer) { copyConfigToLayer(layerName, ((FrozenLayer) layer).getLayer()); } if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; if (cl.getConvolutionMode() == null) { cl.setConvolutionMode(convolutionMode); } } if (layer instanceof SubsamplingLayer) { SubsamplingLayer sl = (SubsamplingLayer) layer; if (sl.getConvolutionMode() == null) { sl.setConvolutionMode(convolutionMode); } } LayerValidation.generalValidation(layerName, layer, useRegularization, useDropConnect, dropOut, l2, l2Bias, l1, l1Bias, dist); }
@OptionMetadata( displayName = "convolution mode", description = "The convolution mode (default = Truncate).", commandLineParamName = "mode", commandLineParamSynopsis = "-mode <string>", displayOrder = 2 ) public ConvolutionMode getConvolutionMode() { return ConvolutionMode.fromBackend(backend.getConvolutionMode()); }
@OptionMetadata( displayName = "CudnnAlgoMode", description = "The Cudnn algo mode (default = PREFER_FASTEST).", commandLineParamName = "cudnnAlgoMode", commandLineParamSynopsis = "-cudnnAlgoMode <string>", displayOrder = 3 ) public AlgoMode getCudnnAlgoMode() { return AlgoMode.fromBackend(backend.getCudnnAlgoMode()); }
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer; map.put("Kernel size", Arrays.toString(layer1.getKernelSize())); map.put("Stride", Arrays.toString(layer1.getStride())); map.put("Padding", Arrays.toString(layer1.getPadding())); } else if (layer instanceof SubsamplingLayer) { SubsamplingLayer layer1 = (SubsamplingLayer) layer;
org.deeplearning4j.nn.conf.layers.ConvolutionLayer layer1 = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer.conf().getLayer(); mainLine.append("K: " + Arrays.toString(layer1.getKernelSize()) + " S: " + Arrays.toString(layer1.getStride()) + " P: " + Arrays.toString(layer1.getPadding())); subLine.append("nIn/nOut: [" + layer1.getNIn() + "/" + layer1.getNOut() + "]"); fullLine.append("Kernel size: ").append(Arrays.toString(layer1.getKernelSize())).append("<br/>"); fullLine.append("Stride: ").append(Arrays.toString(layer1.getStride())).append("<br/>"); fullLine.append("Padding: ").append(Arrays.toString(layer1.getPadding())).append("<br/>"); fullLine.append("Inputs number: ").append(layer1.getNIn()).append("<br/>"); fullLine.append("Outputs number: ").append(layer1.getNOut()).append("<br/>"); } else if (layer.conf().getLayer() instanceof SubsamplingLayer) { SubsamplingLayer layer1 = (SubsamplingLayer) layer.conf().getLayer();
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf = (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer(); int[] kernel = layerConf.getKernelSize(); int nIn = layerConf.getNIn(); int nOut = layerConf.getNOut(); INDArray biasGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, nOut)); INDArray weightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nOut, numParams(conf))) .reshape('c', nOut, nIn, kernel[0], kernel[1]); Map<String, INDArray> out = new LinkedHashMap<>(); out.put(BIAS_KEY, biasGradientView); out.put(WEIGHT_KEY, weightGradientView); return out; }
@OptionMetadata( displayName = "number of columns in kernel", description = "The number of columns in the kernel (default = 5).", commandLineParamName = "columns", commandLineParamSynopsis = "-columns <int>", displayOrder = 5 ) public int getKernelSizeY() { return backend.getKernelSize()[1]; }
@OptionMetadata( displayName = "number of columns in stride", description = "The stride along the columns (default = 1).", commandLineParamName = "strideColumns", commandLineParamSynopsis = "-strideColumns <int>", displayOrder = 7 ) public int getStrideColumns() { return backend.getStride()[1]; }