public SDVariable variance(SDVariable i_x, boolean biasCorrected, int... dimensions) { return new Variance(sameDiff(), i_x, dimensions, biasCorrected).outputVariables()[0]; }
@Override public List<SDVariable> doDiff(List<SDVariable> i_v1) { //If out = var(in) then: //dL/dIn = dL/dOut * dOut/dIn // with dOut/dIn = (in-mean) * 2/(n-1) val n = f().getReductionLength(this); int origRank = Shape.rankFromShape(arg().getShape()); SDVariable broadcastableMean = f().reductionBroadcastableWithOrigShape(origRank, dimensions, f().mean(arg(), dimensions)); SDVariable broadcastableGrad = f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0)); SDVariable dOutdIn = arg().sub(broadcastableMean).mul(2.0 / (biasCorrected ? (n - 1) : n)); SDVariable dLdIn = dOutdIn.mul(broadcastableGrad); return Arrays.asList(dLdIn); }
public Variance(INDArray x, boolean biasCorrected) { super(x); this.biasCorrected = biasCorrected; init(x, y, z, n); }
@Override public Op opForDimension(int index, int dimension) { INDArray xAlongDimension = x.vectorAlongDimension(index, dimension); Variance ret; if (y() != null) ret = new Variance(xAlongDimension, y.vectorAlongDimension(index, dimension), xAlongDimension.length()); else ret = new Variance(x.vectorAlongDimension(index, dimension)); ret.setBiasCorrected(biasCorrected); ret.setApplyFinalTransform(applyFinalTransform()); return ret; }
double result = nativeOps.execSummaryStatsScalarDouble(xShapeInfoHostPointer, op.opNum(), (DoublePointer) x, (LongPointer) xShapeInfo, (DoublePointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { float result = nativeOps.execSummaryStatsScalarFloat(xShapeInfoHostPointer, op.opNum(), (FloatPointer) x, (LongPointer) xShapeInfo, (FloatPointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { float result = nativeOps.execSummaryStatsScalarHalf(xShapeInfoHostPointer, op.opNum(), (ShortPointer) x, (LongPointer) xShapeInfo, (ShortPointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { (LongPointer) xShapeInfo, (DoublePointer) extraArgs, (DoublePointer) result, (LongPointer) resultShapeInfo, (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); } else { nativeOps.execReduceDouble(xShapeInfoHostPointer, op.opNum(), (DoublePointer) x, (LongPointer) xShapeInfo, (FloatPointer) extraArgs, (FloatPointer) result, (LongPointer) resultShapeInfo, (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); } else { nativeOps.execReduceFloat(xShapeInfoHostPointer, op.opNum(), (FloatPointer) x,
/** * Returns the overall variance of this ndarray * * @param biasCorrected boolean on whether to apply corrected bias * @param dimension the dimension to getScalar the mean along * @return the mean along the specified dimension of this ndarray */ @Override public INDArray var(boolean biasCorrected, int... dimension) { return Nd4j.getExecutioner().exec(new Variance(this, biasCorrected), dimension); }
@Override public IComplexNumber getAndSetFinalResult(IComplexNumber accum) { /* if (biasCorrected) finalResultComplex = (accum.sub(ComplexUtil.pow(Nd4j.createComplexNumber(bias, 0), 2.0).div(Nd4j.createComplexNumber(n(), 0))).div(Nd4j.createComplexNumber(n() - 1.0, 0.0))); else finalResultComplex = accum.divi(n - 1); return finalResultComplex;*/ throw new UnsupportedOperationException(); }
@Override public void exec(int... dimension) { if (dimension.length == 1 && dimension[0] == Integer.MAX_VALUE) { exec(); return; } int[] retShape = ArrayUtil.removeIndex(x.shape(), dimension); int nOps = x.tensorssAlongDimension(dimension); z = Nd4j.create(retShape); for (int i = 0; i < nOps; i++) { double d = Nd4j.getExecutioner().execAndReturn(opForDimension(i, dimension)).getFinalResult().doubleValue(); z.putScalar(i, d); } }
@Override public float calculateFinalResult(float accum, long n) { //accumulation is sum_i (x_i-mean)^2 return (float) calculateFinalResult((double) accum, n); }
@Override public void exec() { super.exec(); //variance = sqrt(stdev) -> sqrt is done in getAndSetFinalResult(...) }
@Override public Variance opForDimension(int index, int... dimension) { INDArray xAlongDimension = x.tensorAlongDimension(index, dimension); Variance ret; if (y() != null) ret = new Variance(xAlongDimension, y.tensorAlongDimension(index, dimension), xAlongDimension.length()); else ret = new Variance(x.tensorAlongDimension(index, dimension), biasCorrected); ret.setApplyFinalTransform(applyFinalTransform()); return ret; }
double result = nativeOps.execSummaryStatsScalarDouble(xShapeInfoHostPointer, op.opNum(), (DoublePointer) x, (IntPointer) xShapeInfo, (DoublePointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { float result = nativeOps.execSummaryStatsScalarFloat(xShapeInfoHostPointer, op.opNum(), (FloatPointer) x, (IntPointer) xShapeInfo, (FloatPointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { float result = nativeOps.execSummaryStatsScalarHalf(xShapeInfoHostPointer, op.opNum(), (ShortPointer) x, (IntPointer) xShapeInfo, (ShortPointer) extraArgs, ((Variance) op).isBiasCorrected()); op.setFinalResult(result); } else if (op.y() != null) { (IntPointer) xShapeInfo, (DoublePointer) extraArgs, (DoublePointer) result, (IntPointer) resultShapeInfo, (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); } else { nativeOps.execReduceDouble(xShapeInfoHostPointer, op.opNum(), (DoublePointer) x, (IntPointer) xShapeInfo, (FloatPointer) extraArgs, (FloatPointer) result, (IntPointer) resultShapeInfo, (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); } else { nativeOps.execReduceFloat(xShapeInfoHostPointer, op.opNum(), (FloatPointer) x,
/** * Returns the overall variance of this ndarray * * @param dimension the dimension to getScalar the mean along * @return the mean along the specified dimension of this ndarray */ @Override public INDArray var(int... dimension) { return Nd4j.getExecutioner().exec(new Variance(this), dimension); }
@Override public float getAndSetFinalResult(float accum) { float f = (float) FastMath.sqrt(super.getAndSetFinalResult(accum)); this.finalResult = f; return f; }
@Override public double calculateFinalResult(double accum, long n) { return FastMath.sqrt(super.calculateFinalResult(accum, n)); }
double res = nativeOps.execSummaryStatsScalarDouble(xShapeInfoHostPointer, op.opNum(), (DoublePointer) x, (LongPointer) xShapeInfo, (DoublePointer) extraArgs, ((Variance) op).isBiasCorrected()); (LongPointer) AtomicAllocator.getInstance().getPointer(op.z().shapeInfoDataBuffer(), context), (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); float res = nativeOps.execSummaryStatsScalarFloat(xShapeInfoHostPointer, op.opNum(), (FloatPointer) x, (LongPointer) xShapeInfo, (FloatPointer) extraArgs, ((Variance) op).isBiasCorrected()); (LongPointer) AtomicAllocator.getInstance().getPointer(op.z().shapeInfoDataBuffer(), context), (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected()); float res = nativeOps.execSummaryStatsScalarHalf(xShapeInfoHostPointer, op.opNum(), (ShortPointer) x, (LongPointer) xShapeInfo, (ShortPointer) extraArgs, ((Variance) op).isBiasCorrected()); (LongPointer) AtomicAllocator.getInstance().getPointer(op.z().shapeInfoDataBuffer(), context), (IntPointer) dimensionPointer, dimension.length, ((Variance) op).isBiasCorrected());
public Variance(INDArray x, INDArray y, long n, boolean biasCorrected) { super(x, y, n); this.biasCorrected = biasCorrected; init(x, y, z, n); }
break; case "var": ret = new Variance(x, y, z, x.length(),(boolean) extraArgs[0]); break; default:
@Override public double getAndSetFinalResult(double accum) { //stdev is sqrt of variance: double d = FastMath.sqrt(super.getAndSetFinalResult(accum)); this.finalResult = d; return d; }