static INDArray append(INDArray arr1, INDArray values, int dimension) { if(dimension == -1) { return Nd4j.toFlattened(arr1, values); } else { return Nd4j.concat(dimension, arr1, values); } }
static INDArray insert(INDArray arr1, int index, INDArray values, int dimension) { if(dimension == -1) { INDArray flat1 = Nd4j.toFlattened(arr1); INDArray flatValues = Nd4j.toFlattened(values); INDArray firstSlice = flat1.get(NDArrayIndex.interval(0, index)); INDArray secondSlice = flat1.get(NDArrayIndex.interval(index, flat1.length())); return Nd4j.toFlattened(firstSlice, flatValues, secondSlice); } else { INDArray firstSlice = arr1.get(createIntervalOnDimension(dimension, false, 0, index)); INDArray secondSlice = arr1.get(createIntervalOnDimension(dimension, false, index, arr1.shape()[dimension])); return Nd4j.concat(dimension, firstSlice, values, secondSlice); } }
protected INDArray handleParamsView(INDArray outputArray, INDArray paramView) { //minor optimization when the views are the same, just return if(paramView == null || paramView == outputArray) return outputArray; INDArray flat = Nd4j.toFlattened(order(), outputArray); if (flat.length() != paramView.length()) throw new RuntimeException("ParamView length does not match initialized weights length (view length: " + paramView.length() + ", view shape: " + Arrays.toString(paramView.shape()) + "; flattened length: " + flat.length()); paramView.assign(flat); return paramView.reshape(order(), outputArray.shape()); }
public static INDArray tailor4d2d(@NonNull INDArray data) { long instances = data.size(0); long channels = data.size(1); long height = data.size(2); long width = data.size(3); INDArray in2d = Nd4j.create(channels, height * width * instances); long tads = data.tensorssAlongDimension(3, 2, 0); for (int i = 0; i < tads; i++) { INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0); in2d.putRow(i, Nd4j.toFlattened(thisTAD)); } return in2d.transposei(); }
static INDArray delete(int dimension, INDArray arr1, int... interval) { int length = interval.length; int lastIntervalValue = interval[length - 1]; if(dimension == -1) { INDArray array1 = arr1.get(NDArrayIndex.interval(0, interval[0])); if(lastIntervalValue == arr1.length() - 1) { return Nd4j.toFlattened(array1); } else { INDArray array2 = arr1.get(NDArrayIndex.interval(lastIntervalValue + 1, arr1.length())); return Nd4j.toFlattened(array1, array2); } } else { INDArray array1 = arr1.get(createIntervalOnDimension(dimension, false, 0, interval[0])); if(lastIntervalValue == arr1.shape()[dimension] - 1) { return array1; } else { INDArray array2 = arr1.get(createIntervalOnDimension(dimension, false, lastIntervalValue + 1, arr1.shape()[dimension])); return Nd4j.concat(dimension, array1, array2); } } }
print("Ascended sorted array on zero axis: ", axisSortedArray); INDArray flattened = Nd4j.toFlattened(fourByFiveRandomZeroToOne); print("Flattened array", flattened);
@Override public INDArray params() { //C order flattening, to match the gradient flattening order return Nd4j.toFlattened('c', params.values()); }
public INDArray params() { List<INDArray> list = new ArrayList<>(2); for (Map.Entry<String, INDArray> entry : params.entrySet()) { list.add(entry.getValue()); } return Nd4j.toFlattened('f', list); }
private void flattenGradient() { if (flatteningOrders != null) { //Arrays with non-default order get flattened to row vector first, then everything is flattened to f order //TODO revisit this, and make more efficient List<INDArray> toFlatten = new ArrayList<>(); for (Map.Entry<String, INDArray> entry : gradients.entrySet()) { if (flatteningOrders.containsKey(entry.getKey()) && flatteningOrders.get(entry.getKey()) != DEFAULT_FLATTENING_ORDER) { //Specific flattening order for this array, that isn't the default toFlatten.add(Nd4j.toFlattened(flatteningOrders.get(entry.getKey()), entry.getValue())); } else { //default flattening order for this array toFlatten.add(entry.getValue()); } } flattenedGradient = Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, toFlatten); } else { //Standard case: flatten all to f order flattenedGradient = Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, gradients.values()); } }
@Override public INDArray gradient(List<String> order) { List<INDArray> toFlatten = new ArrayList<>(); if (flatteningOrders == null) { for (String s : order) { if (!gradients.containsKey(s)) continue; toFlatten.add(gradients.get(s)); } } else { for (String s : order) { if (!gradients.containsKey(s)) continue; if (flatteningOrders.containsKey(s) && flatteningOrders.get(s) != DEFAULT_FLATTENING_ORDER) { //Arrays with non-default order get flattened to row vector first, then everything is flattened to f order //TODO revisit this, and make more efficient toFlatten.add(Nd4j.toFlattened(flatteningOrders.get(s), gradients.get(s))); } else { toFlatten.add(gradients.get(s)); } } } return Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, toFlatten); }
public INDArray scoreExamples(DataSetIterator iter, boolean addRegularizationTerms) { List<INDArray> out = new ArrayList<>(); while (iter.hasNext()) { out.add(scoreExamples(iter.next(), addRegularizationTerms)); } return Nd4j.toFlattened('f', out); }
/** * Returns a 1 x m vector where the vector is composed of * a flattened vector of all of the weights for the * various neuralNets(w,hbias NOT VBIAS) and output layer * * @return the params for this neural net */ public INDArray params(boolean backwardOnly) { if (backwardOnly) return params(); List<INDArray> params = new ArrayList<>(); for (Layer layer : getLayers()) { INDArray layerParams = layer.params(); if (layerParams != null) params.add(layerParams); //may be null: subsampling etc layers } return Nd4j.toFlattened('f', params); }
/** * Get the parameters for the ComputationGraph * * @param backwardOnly If true: backprop parameters only (i.e., no visible layer biases used in layerwise pretraining layers) */ public INDArray params(boolean backwardOnly) { if (backwardOnly) return flattenedParams; List<INDArray> list = new ArrayList<>(layers.length); for (int i = 0; i < topologicalOrder.length; i++) { if (!vertices[topologicalOrder[i]].hasLayer()) continue; Layer l = vertices[topologicalOrder[i]].getLayer(); INDArray layerParams = l.params(); if (layerParams != null) list.add(layerParams); //may be null: subsampling etc layers } return Nd4j.toFlattened('f', list); }
public static INDArray tailor4d2d(@NonNull INDArray data) { int instances = data.size(0); int channels = data.size(1); int height = data.size(2); int width = data.size(3); INDArray in2d = Nd4j.create(channels, height * width * instances); int tads = data.tensorssAlongDimension(3, 2, 0); for (int i = 0; i < tads; i++) { INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0); in2d.putRow(i, Nd4j.toFlattened(thisTAD)); } return in2d.transposei(); }
for (int i = 0; i < batchsize; i++) { INDArray row = ndArray.getRow(i); INDArray flattenedRow = Nd4j.toFlattened(row); Instance inst = new DenseInstance(atts.size()); for (int j = 0; j < flattenedRow.size(1); j++) {
INDArray flat = Nd4j.toFlattened(order, ret); if (flat.length() != paramView.length()) throw new RuntimeException("ParamView length does not match initialized weights length (view length: "