throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); throw new DL4JInvalidInputException( "Input cardinality (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input cardinality (layer # inputs = "
public JointParallelDataSetIterator(@NonNull List<DataSetIterator> iterators, boolean singleDeviceMode, int bufferSize, @NonNull InequalityHandling inequalityHandling) { super(iterators.size()); this.enforceSingleDevice = singleDeviceMode; this.bufferSizePerDevice = bufferSize; this.numProducers = iterators.size(); this.inequalityHandling = inequalityHandling; if (numProducers == 0) throw new DL4JInvalidInputException("You can't start ParallelDataSetIterator without input data"); initializeIterators(iterators); }
public Builder addSourceIterator(@NonNull DataSetIterator iterator) { if (!iterator.asyncSupported()) throw new DL4JInvalidInputException("Source iterators should support async mode"); //TODO: add strict equality check here, we don't want it equal if (!hasIterator(iterator)) iterators.add(iterator); else throw new DL4JInvalidInputException("You can't put equal iterators into this joint iterator"); return this; }
public FileSplitParallelDataSetIterator(@NonNull File rootFolder, @NonNull String pattern, @NonNull FileCallback callback, int numThreads, int bufferPerThread, @NonNull InequalityHandling inequalityHandling) { super(numThreads); if (!rootFolder.exists() || !rootFolder.isDirectory()) throw new DL4JInvalidInputException("Root folder should point to existing folder"); this.pattern = pattern; this.inequalityHandling = inequalityHandling; this.buffer = bufferPerThread; String modifiedPattern = pattern.replaceAll("\\%d", ".*."); IOFileFilter fileFilter = new RegexFileFilter(modifiedPattern); List<File> files = new ArrayList<>(FileUtils.listFiles(rootFolder, fileFilter, null)); log.debug("Files found: {}; Producers: {}", files.size(), numProducers); if (files.size() < 1) throw new DL4JInvalidInputException("No suitable files were found"); int numDevices = Nd4j.getAffinityManager().getNumberOfDevices(); int cnt = 0; for (List<File> part : Lists.partition(files, files.size() / numThreads)) { // discard remainder if (cnt >= numThreads) break; int cDev = cnt % numDevices; asyncIterators.add(new AsyncDataSetIterator(new FileSplitDataSetIterator(part, callback), bufferPerThread, true, cDev)); cnt++; } }
throw new DL4JInvalidInputException( "Invalid input data or configuration: kernel height and input height must satisfy 0 < kernel height <= input height + 2 * padding height. " + "\nGot kernel height = " + kernel[0] + ", input height = " + inH throw new DL4JInvalidInputException( "Invalid input data or configuration: kernel width and input width must satisfy 0 < kernel width <= input width + 2 * padding width. " + "\nGot kernel width = " + kernel[1] + ", input width = " + inW
throw new DL4JInvalidInputException("Model is unknown: " + model.getClass().getCanonicalName());
@Override public INDArray preOutput(boolean training) { if (input.columns() != 1) { //Assume shape is [numExamples,1], and each entry is an integer index throw new DL4JInvalidInputException( "Cannot do forward pass for embedding layer with input more than one column. " + "Expected input shape: [numExamples,1] with each entry being an integer index " + layerId()); } int[] indexes = new int[input.length()]; for (int i = 0; i < indexes.length; i++) indexes[i] = input.getInt(i, 0); INDArray weights = getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray bias = getParam(DefaultParamInitializer.BIAS_KEY); INDArray rows = Nd4j.pullRows(weights, 1, indexes); rows.addiRowVector(bias); return rows; }
public INDArray preOutput(boolean training) { applyDropOutIfNecessary(training); INDArray b = getParam(DefaultParamInitializer.BIAS_KEY); INDArray W = getParam(DefaultParamInitializer.WEIGHT_KEY); //Input validation: if (input.rank() != 2 || input.columns() != W.rows()) { if (input.rank() != 2) { throw new DL4JInvalidInputException("Input that is not a matrix; expected matrix (rank 2), got rank " + input.rank() + " array with shape " + Arrays.toString(input.shape()) + ". Missing preprocessor or wrong input type? " + layerId()); } throw new DL4JInvalidInputException( "Input size (" + input.columns() + " columns; shape = " + Arrays.toString(input.shape()) + ") is invalid: does not match layer input size (layer # inputs = " + W.size(0) + ") " + layerId()); } if (conf.isUseDropConnect() && training && layerConf().getDropOut() > 0) { W = Dropout.applyDropConnect(this, DefaultParamInitializer.WEIGHT_KEY); } INDArray ret = input.mmul(W).addiRowVector(b); if (maskArray != null) { applyMask(ret); } return ret; }
@Override public INDArray activate(boolean training) { if (input.rank() != 3) throw new DL4JInvalidInputException("Got rank " + input.rank() + " array as input to Subsampling1DLayer with shape " + Arrays.toString(input.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); // call 2D SubsamplingLayer's activate method INDArray acts = super.activate(training); // remove singleton fourth dimension from input and output activations input = origInput; acts = acts.reshape(acts.size(0), acts.size(1), acts.size(2)); return acts; } }
@Override public void fit(MultiDataSet dataSet) { if (dataSet.getFeatures().length == 1 && dataSet.getLabels().length == 1) { INDArray features = null; INDArray labels = null; INDArray fMask = null; INDArray lMask = null; if (dataSet.getFeaturesMaskArrays() != null) fMask = dataSet.getFeaturesMaskArrays()[0]; if (dataSet.getFeaturesMaskArrays() != null) lMask = dataSet.getLabelsMaskArrays()[0]; features = dataSet.getFeatures()[0]; labels = dataSet.getLabels()[0]; DataSet ds = new DataSet(features, labels, fMask, lMask); fit(ds); } throw new DL4JInvalidInputException( "MultiLayerNetwork can't handle MultiDataSet. Please consider use of ComputationGraph"); }
INDArray labels2d = getLabels2d(); if (labels2d.size(1) != preOut.size(1)) { throw new DL4JInvalidInputException( "Labels array numColumns (size(1) = " + labels2d.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOut.size(1) + ") " + layerId());
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { if (epsilon.rank() != 3) throw new DL4JInvalidInputException("Got rank " + epsilon.rank() + " array as epsilon for Subsampling1DLayer backprop with shape " + Arrays.toString(epsilon.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input and next layer's epsilon INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); epsilon = epsilon.reshape(epsilon.size(0), epsilon.size(1), epsilon.size(2), 1); // call 2D SubsamplingLayer's backpropGradient method Pair<Gradient, INDArray> gradientEpsNext = super.backpropGradient(epsilon); INDArray epsNext = gradientEpsNext.getSecond(); // remove singleton fourth dimension from input and current epsilon input = origInput; epsNext = epsNext.reshape(epsNext.size(0), epsNext.size(1), epsNext.size(2)); return new Pair<>(gradientEpsNext.getFirst(), epsNext); }
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { if (epsilon.rank() != 3) throw new DL4JInvalidInputException("Got rank " + epsilon.rank() + " array as epsilon for Convolution1DLayer backprop with shape " + Arrays.toString(epsilon.shape()) + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId()); // add singleton fourth dimension to input and next layer's epsilon epsilon = epsilon.reshape(epsilon.size(0), epsilon.size(1), epsilon.size(2), 1); INDArray origInput = input; input = input.reshape(input.size(0), input.size(1), input.size(2), 1); // call 2D ConvolutionLayer's backpropGradient method Pair<Gradient, INDArray> gradientEpsNext = super.backpropGradient(epsilon); INDArray epsNext = gradientEpsNext.getSecond(); // remove singleton fourth dimension from input and current epsilon epsNext = epsNext.reshape(epsNext.size(0), epsNext.size(1), epsNext.size(2)); input = origInput; return new Pair<>(gradientEpsNext.getFirst(), epsNext); }
throw new DL4JInvalidInputException("Got rank " + input.rank() + " array as input to SubsamplingLayer with shape " + Arrays.toString(input.shape()) + ". Expected rank 4 array with shape [minibatchSize, depth, inputHeight, inputWidth]. "
if (layerName == null) layerName = "(not named)"; throw new DL4JInvalidInputException("Got rank " + input.rank() + " array as input to ConvolutionLayer (layer name = " + layerName + ", layer index = " + index + ") with shape " + Arrays.toString(input.shape()) + ". " if (layerName == null) layerName = "(not named)"; throw new DL4JInvalidInputException("Cannot do forward pass in Convolution layer (layer name = " + layerName + ", layer index = " + index + "): input array depth does not match CNN layer configuration" + " (data input depth = " + input.size(1) + ", [minibatch,inputDepth,height,width]="
throw new DL4JInvalidInputException("Received input with size(1) = " + input.size(1) + " (input array shape = " + Arrays.toString(input.shape()) + "); input.size(1) must match layer nIn size (nIn = " + inputWeights.size(0) + ")"); throw new DL4JInvalidInputException("Previous activations (stored state) number of examples = " + prevOutputActivations.size(0) + " but input array number of examples = " + input.size(0) + ". Possible cause: using rnnTimeStep() without calling"