throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); throw new DL4JInvalidInputException( "Input cardinality (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input cardinality (layer # inputs = "
/** * This * @param parties */ public Builder(int parties) { if (parties < 1) throw new DL4JInvalidConfigException( "Number of parties for GradientsAccumulation should be positive value"); this.parties = parties; }
throw new DL4JInvalidInputException( "Invalid input data or configuration: kernel height and input height must satisfy 0 < kernel height <= input height + 2 * padding height. " + "\nGot kernel height = " + kernel[0] + ", input height = " + inH throw new DL4JInvalidInputException( "Invalid input data or configuration: kernel width and input width must satisfy 0 < kernel width <= input width + 2 * padding width. " + "\nGot kernel width = " + kernel[1] + ", input width = " + inW int truncated = (int) d; int sameSize = (int) Math.ceil(inW / ((double) strides[1])); throw new DL4JInvalidConfigException( "Invalid input data or configuration: Combination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n" + "ConvolutionMode.Strict requires: output width = (input - kernelSize + 2*padding)/stride + 1 to be an integer. Got: ("
throw new DL4JException("Cannot convert sequence writables to one-hot: class index " + classIdx + " >= numClass (" + details.oneHotNumClasses + ")");
/** * Asserts that the layer nIn and nOut values are set for the layer * * @param layerType Type of layer ("DenseLayer", etc) * @param layerName Name of the layer (may be null if not set) * @param layerIndex Index of the layer * @param nIn nIn value * @param nOut nOut value */ public static void assertNInNOutSet(String layerType, String layerName, int layerIndex, int nIn, int nOut) { if (nIn <= 0 || nOut <= 0) { if (layerName == null) layerName = "(name not set)"; throw new DL4JInvalidConfigException(layerType + " (index=" + layerIndex + ", name=" + layerName + ") nIn=" + nIn + ", nOut=" + nOut + "; nIn and nOut must be > 0"); } } }
public JointParallelDataSetIterator(@NonNull List<DataSetIterator> iterators, boolean singleDeviceMode, int bufferSize, @NonNull InequalityHandling inequalityHandling) { super(iterators.size()); this.enforceSingleDevice = singleDeviceMode; this.bufferSizePerDevice = bufferSize; this.numProducers = iterators.size(); this.inequalityHandling = inequalityHandling; if (numProducers == 0) throw new DL4JInvalidInputException("You can't start ParallelDataSetIterator without input data"); initializeIterators(iterators); }
throw new DL4JException( "Cannot calculate gradient and score with respect to labels: final layer is not an IOutputLayer");
/** * This method enables optional limit for max number of updates per message * * Default value: 1.0 (no limit) * @param boundary positive value in range 0..1 * @return */ public Builder updatesBoundary(double boundary) { if (boundary >= 1.0) return this; if (boundary <= 0.0) throw new DL4JInvalidConfigException("Boundary should have positive value"); this.boundary = boundary; return this; }
public Builder addSourceIterator(@NonNull DataSetIterator iterator) { if (!iterator.asyncSupported()) throw new DL4JInvalidInputException("Source iterators should support async mode"); //TODO: add strict equality check here, we don't want it equal if (!hasIterator(iterator)) iterators.add(iterator); else throw new DL4JInvalidInputException("You can't put equal iterators into this joint iterator"); return this; }
/** * This constructor will create ModelSavingCallback instance that will save models in specified folder * * PLEASE NOTE: Make sure you have write access to the target folder * * @param rootFolder File object referring to target folder * @param fileNameTemplate */ public ModelSavingCallback(@NonNull File rootFolder, @NonNull String fileNameTemplate) { if (!rootFolder.isDirectory()) throw new DL4JInvalidConfigException("rootFolder argument should point to valid folder"); if (fileNameTemplate.isEmpty()) throw new DL4JInvalidConfigException("Filename template can't be empty String"); this.rootFolder = rootFolder; this.template = fileNameTemplate; }
public FileSplitParallelDataSetIterator(@NonNull File rootFolder, @NonNull String pattern, @NonNull FileCallback callback, int numThreads, int bufferPerThread, @NonNull InequalityHandling inequalityHandling) { super(numThreads); if (!rootFolder.exists() || !rootFolder.isDirectory()) throw new DL4JInvalidInputException("Root folder should point to existing folder"); this.pattern = pattern; this.inequalityHandling = inequalityHandling; this.buffer = bufferPerThread; String modifiedPattern = pattern.replaceAll("\\%d", ".*."); IOFileFilter fileFilter = new RegexFileFilter(modifiedPattern); List<File> files = new ArrayList<>(FileUtils.listFiles(rootFolder, fileFilter, null)); log.debug("Files found: {}; Producers: {}", files.size(), numProducers); if (files.size() < 1) throw new DL4JInvalidInputException("No suitable files were found"); int numDevices = Nd4j.getAffinityManager().getNumberOfDevices(); int cnt = 0; for (List<File> part : Lists.partition(files, files.size() / numThreads)) { // discard remainder if (cnt >= numThreads) break; int cDev = cnt % numDevices; asyncIterators.add(new AsyncDataSetIterator(new FileSplitDataSetIterator(part, callback), bufferPerThread, true, cDev)); cnt++; } }
throw new DL4JInvalidConfigException("Invalid configuration: convolution mode is null for layer (idx=" + layerIdx + ", name=" + name + ", type=" + layerClass.getName() + ")"); throw new DL4JInvalidConfigException(getConfigErrorCommonLine1(layerIdx, layerName, layerClass, sH <= 0) + " Invalid strides: strides must be > 0 (strideH = " + sH + ", strideW = " + sW + ")" + "\n" + getConfigErrorCommonLastLine(inputType, kernelSize, stride, padding, outputDepth, throw new DL4JInvalidConfigException(getConfigErrorCommonLine1(layerIdx, layerName, layerClass, true) + " Invalid input configuration for kernel height. Require 0 < kH <= inHeight + 2*padH; got (kH=" + kH + ", inHeight=" + inHeight + ", padH=" + padH + ")\n" + getConfigErrorCommonLastLine( throw new DL4JInvalidConfigException(getConfigErrorCommonLine1(layerIdx, layerName, layerClass, false) + " Invalid input configuration for kernel width. Require 0 < kW <= inWidth + 2*padW; got (kW=" + kW + ", inWidth=" + inWidth + ", padW=" + padW + ")\n" + getConfigErrorCommonLastLine( int truncated = (int) d; int sameSize = (int) Math.ceil(inHeight / ((double) stride[0])); throw new DL4JInvalidConfigException(getConfigErrorCommonLine1(layerIdx, layerName, layerClass, true) + "\nCombination of kernel size, stride and padding are not valid for given input height, using ConvolutionMode.Strict\n" + "ConvolutionMode.Strict requires: output height = (input height - kernelSize + 2*padding)/stride + 1 in height dimension to be an integer. Got: (" int truncated = (int) d; int sameSize = (int) Math.ceil(inWidth / ((double) stride[1])); throw new DL4JInvalidConfigException(getConfigErrorCommonLine1(layerIdx, layerName, layerClass, false) + "\nCombination of kernel size, stride and padding are not valid for given input width, using ConvolutionMode.Strict\n" + "ConvolutionMode.Strict requires: output width = (input width - kernelSize + 2*padding)/stride + 1 in width dimension to be an integer. Got: ("
@Override public INDArray preOutput(boolean training) { if (input.columns() != 1) { //Assume shape is [numExamples,1], and each entry is an integer index throw new DL4JInvalidInputException( "Cannot do forward pass for embedding layer with input more than one column. " + "Expected input shape: [numExamples,1] with each entry being an integer index " + layerId()); } int[] indexes = new int[input.length()]; for (int i = 0; i < indexes.length; i++) indexes[i] = input.getInt(i, 0); INDArray weights = getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray bias = getParam(DefaultParamInitializer.BIAS_KEY); INDArray rows = Nd4j.pullRows(weights, 1, indexes); rows.addiRowVector(bias); return rows; }
this.trainerContext = new SymmetricTrainerContext(); if (this.accumulator == null) throw new DL4JInvalidConfigException( "Please specify GradientsAccumulator fo encoded gradients mode");
@Override public INDArray activate(boolean training) { if (input.rank() != 3) throw new DL4JInvalidInputException("Got rank " + input.rank() + " array as input to Subsampling1DLayer with shape " + Arrays.toString(input.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); // call 2D SubsamplingLayer's activate method INDArray acts = super.activate(training); // remove singleton fourth dimension from input and output activations input = origInput; acts = acts.reshape(acts.size(0), acts.size(1), acts.size(2)); return acts; } }
@Override public void fit(MultiDataSet dataSet) { if (dataSet.getFeatures().length == 1 && dataSet.getLabels().length == 1) { INDArray features = null; INDArray labels = null; INDArray fMask = null; INDArray lMask = null; if (dataSet.getFeaturesMaskArrays() != null) fMask = dataSet.getFeaturesMaskArrays()[0]; if (dataSet.getFeaturesMaskArrays() != null) lMask = dataSet.getLabelsMaskArrays()[0]; features = dataSet.getFeatures()[0]; labels = dataSet.getLabels()[0]; DataSet ds = new DataSet(features, labels, fMask, lMask); fit(ds); } throw new DL4JInvalidInputException( "MultiLayerNetwork can't handle MultiDataSet. Please consider use of ComputationGraph"); }
public INDArray preOutput(boolean training) { applyDropOutIfNecessary(training); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); //Input validation: if (input.rank() != 2 || input.columns() != W.rows()) { if (input.rank() != 2) { throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); } throw new DL4JInvalidInputException( "Input size (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input size (layer # inputs = " + W.size(0) + ") " + layerId()); } if (conf.isUseDropConnect() && training && layerConf().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray ret = input.mmul(W).addiRowVector(b); if (maskArray != null) { applyMask(ret); } return ret; }
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { if (epsilon.rank() != 3) throw new DL4JInvalidInputException("Got rank " + epsilon.rank() + " array as epsilon for Subsampling1DLayer backprop with shape " + Arrays.toString(epsilon.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input and next layer's epsilon INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); epsilon = epsilon.reshape(epsilon.size(0), epsilon.size(1), epsilon.size(2), 1); // call 2D SubsamplingLayer's backpropGradient method Pair<Gradient, INDArray> gradientEpsNext = super.backpropGradient(epsilon); INDArray epsNext = gradientEpsNext.getSecond(); // remove singleton fourth dimension from input and current epsilon input = origInput; epsNext = epsNext.reshape(epsNext.size(0), epsNext.size(1), epsNext.size(2)); return new Pair<>(gradientEpsNext.getFirst(), epsNext); }
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { if (epsilon.rank() != 3) throw new DL4JInvalidInputException("Got rank " + epsilon.rank() + " array as epsilon for Convolution1DLayer backprop with shape " + Arrays.toString(epsilon.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input and next layer's epsilon epsilon = epsilon.reshape(epsilon.size(0), epsilon.size(1), epsilon.size(2), 1); INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); // call 2D ConvolutionLayer's backpropGradient method Pair<Gradient, INDArray> gradientEpsNext = super.backpropGradient(epsilon); INDArray epsNext = gradientEpsNext.getSecond(); // remove singleton fourth dimension from input and current epsilon epsNext = epsNext.reshape(epsNext.size(0), epsNext.size(1), epsNext.size(2)); input = origInput; return new Pair<>(gradientEpsNext.getFirst(), epsNext); }
INDArray labels2d = getLabels2d(); if (labels2d.size(1) != preOut.size(1)) { throw new DL4JInvalidInputException( "Labels array numColumns (size(1) = " + labels2d.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOut.size(1) + ") " + layerId());