@Override public String toString() { StringBuilder builder = new StringBuilder(); if (features != null && labels != null) { builder.append("===========INPUT===================\n") .append(getFeatures().toString().replaceAll(";", "\n")) .append("\n=================OUTPUT==================\n") .append(getLabels().toString().replaceAll(";", "\n")); if (featuresMask != null) { builder.append("\n===========INPUT MASK===================\n") .append(getFeaturesMaskArray().toString().replaceAll(";", "\n")); } if (labelsMask != null) { builder.append("\n===========OUTPUT MASK===================\n") .append(getLabelsMaskArray().toString().replaceAll(";", "\n")); } return builder.toString(); } else { log.info("Features or labels are null values"); return ""; } }
@Override public int hashCode() { int result = getFeatures() != null ? getFeatures().hashCode() : 0; result = 31 * result + (getLabels() != null ? getLabels().hashCode() : 0); result = 31 * result + (getFeaturesMaskArray() != null ? getFeaturesMaskArray().hashCode() : 0); result = 31 * result + (getLabelsMaskArray() != null ? getLabelsMaskArray().hashCode() : 0); return result; }
arrays.add(getLabelsMaskArray()); dimensions.add(ArrayUtil.range(1, getLabelsMaskArray().rank()));
if (ds.getLabelsMaskArray() != null) { if (labelsMasksToMerge == null) { labelsMasksToMerge = new INDArray[data.size()]; labelsMasksToMerge[count] = ds.getLabelsMaskArray();
/** * Clone the dataset * * @return a clone of the dataset */ @Override public DataSet copy() { DataSet ret = new DataSet(getFeatures().dup(), getLabels().dup()); if (getLabelsMaskArray() != null) ret.setLabelsMaskArray(getLabelsMaskArray().dup()); if (getFeaturesMaskArray() != null) ret.setFeaturesMaskArray(getFeaturesMaskArray().dup()); ret.setColumnNames(getColumnNames()); ret.setLabelNames(getLabelNames()); return ret; }
@Override public String toString() { StringBuilder builder = new StringBuilder(); if (features != null && labels != null) { builder.append("===========INPUT===================\n") .append(getFeatures().toString().replaceAll(";", "\n")) .append("\n=================OUTPUT==================\n") .append(getLabels().toString().replaceAll(";", "\n")); if (featuresMask != null) { builder.append("\n===========INPUT MASK===================\n") .append(getFeaturesMaskArray().toString().replaceAll(";", "\n")); } if (labelsMask != null) { builder.append("\n===========OUTPUT MASK===================\n") .append(getLabelsMaskArray().toString().replaceAll(";", "\n")); } return builder.toString(); } else { log.info("Features or labels are null values"); return ""; } }
@Override public int hashCode() { int result = getFeatures() != null ? getFeatures().hashCode() : 0; result = 31 * result + (getLabels() != null ? getLabels().hashCode() : 0); result = 31 * result + (getFeaturesMaskArray() != null ? getFeaturesMaskArray().hashCode() : 0); result = 31 * result + (getLabelsMaskArray() != null ? getLabelsMaskArray().hashCode() : 0); return result; }
labelsMask.add(ds.getLabelsMaskArray()); hasFM = true; if (ds.getLabelsMaskArray() != null) hasLM = true;
data.getFeaturesMaskArray(), data.getLabelsMaskArray()); log.trace("Prediction took {}ms", System.currentTimeMillis() - predictionStart);
data.getFeaturesMaskArray(), data.getLabelsMaskArray()); log.trace("Prediction took {}ms", System.currentTimeMillis() - predictionStart);
arrays.add(getLabelsMaskArray()); dimensions.add(ArrayUtil.range(1, getLabelsMaskArray().rank()));
if(ds.getLabelsMaskArray() != null){ if(labelsMasksToMerge == null){ labelsMasksToMerge = new INDArray[data.size()]; labelsMasksToMerge[count] = ds.getLabelsMaskArray();
new INDArray[] {input.getLabels()}, null, new INDArray[] {input.getLabelsMaskArray()}); MultiDataSet ret = featurize(inbW); return new DataSet(ret.getFeatures()[0], input.getLabels(), ret.getLabelsMaskArrays()[0], input.getLabelsMaskArray()); throw new UnsupportedOperationException("Feature masks not supported with featurizing currently"); return new DataSet(origMLN.feedForwardToLayer(frozenInputLayer + 1, input.getFeatures(), false) .get(frozenInputLayer + 1), input.getLabels(), null, input.getLabelsMaskArray());
/** * Label the probabilities of the input * * @param iterator test data to evaluate * @return a vector of probabilities * given each label. * <p> * This is typically of the form: * [0.5, 0.5] or some other probability distribution summing to one */ public INDArray output(DataSetIterator iterator, boolean train) { List<INDArray> outList = new ArrayList<>(); while (iterator.hasNext()) { DataSet next = iterator.next(); if (next.getFeatureMatrix() == null || next.getLabels() == null) break; INDArray features = next.getFeatures(); if (next.hasMaskArrays()) { INDArray fMask = next.getFeaturesMaskArray(); INDArray lMask = next.getLabelsMaskArray(); outList.add(this.output(features, train, fMask, lMask)); } else { outList.add(output(features, train)); } } return Nd4j.vstack(outList.toArray(new INDArray[0])); }
/** * Clone the dataset * * @return a clone of the dataset */ @Override public DataSet copy() { DataSet ret = new DataSet(getFeatures().dup(), getLabels().dup()); if (getLabelsMaskArray() != null) ret.setLabelsMaskArray(getLabelsMaskArray().dup()); if (getFeaturesMaskArray() != null) ret.setFeaturesMaskArray(getFeaturesMaskArray().dup()); ret.setColumnNames(getColumnNames()); ret.setLabelNames(getLabelNames()); return ret; }
INDArray labels = t.getLabels(); INDArray inMask = t.getFeaturesMaskArray(); INDArray outMask = t.getLabelsMaskArray(); INDArray predicted = this.model.getNetwork().output(features, false, inMask, outMask); evaluation.evalTimeSeries(labels, predicted, outMask);
/**Calculate the score for each example in a DataSet individually. Unlike {@link #score(DataSet)} and {@link #score(DataSet, boolean)} * this method does not average/sum over examples. This method allows for examples to be scored individually (at test time only), which * may be useful for example for autoencoder architectures and the like.<br> * Each row of the output (assuming addRegularizationTerms == true) is equivalent to calling score(DataSet) with a single example. * @param data The data to score * @param addRegularizationTerms If true: add l1/l2 regularization terms (if any) to the score. If false: don't add regularization terms * @return An INDArray (column vector) of size input.numRows(); the ith entry is the score (loss value) of the ith example */ public INDArray scoreExamples(DataSet data, boolean addRegularizationTerms) { boolean hasMaskArray = data.hasMaskArrays(); if (hasMaskArray) setLayerMaskArrays(data.getFeaturesMaskArray(), data.getLabelsMaskArray()); feedForward(data.getFeatureMatrix(), false); setLabels(data.getLabels()); INDArray out; if (getOutputLayer() instanceof IOutputLayer) { IOutputLayer ol = (IOutputLayer) getOutputLayer(); ol.setLabels(data.getLabels()); double l1 = (addRegularizationTerms ? calcL1(true) : 0.0); double l2 = (addRegularizationTerms ? calcL2(true) : 0.0); out = ol.computeScoreForExamples(l1, l2); } else { throw new UnsupportedOperationException( "Cannot calculate score with respect to labels without an OutputLayer"); } if (hasMaskArray) clearLayerMaskArrays(); return out; }
INDArray lables = t.getLabels(); INDArray outMask = t.getLabelsMaskArray(); INDArray predicted = mln.output(features, false);
boolean hasMaskArray = data.hasMaskArrays(); if (hasMaskArray) setLayerMaskArrays(data.getFeaturesMaskArray(), data.getLabelsMaskArray());
INDArray lMask = next.getLabelsMaskArray();