/** * Abs function * * @param ndArray * @return */ public static INDArray abs(INDArray ndArray) { return abs(ndArray, true); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
public static void main(String[] args) { INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6}); INDArray nd2 = Nd4j.create(new float[]{15,16,17,18,19,20,21,22,23,24,25,26,27,28}, new int[]{2, 7}); INDArray ndv; // a placeholder variable to print out and leave the original data unchanged //this normalizes data and helps activate artificial neurons in deep-learning nets and assigns it to var ndv ndv = sigmoid(nd); System.out.println(ndv); //this gives you absolute value ndv = abs(nd); System.out.println(ndv); //a hyperbolic function to transform data much like sigmoid. ndv = tanh(nd); System.out.println(ndv); // ndv = hardTanh(nd); // System.out.println(ndv); //exponentiation ndv = exp(nd); System.out.println(ndv); //square root ndv = sqrt(nd); System.out.println(ndv); } }
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray scoreArr; INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray yMinusyHat = Transforms.abs(labels.sub(output)); scoreArr = yMinusyHat.mul(yMinusyHat); scoreArr = scoreArr.mul(trainMask); if (mask != null) { scoreArr.muliColumnVector(mask); } return scoreArr; }
/** * * @param relStep * @param x * @return */ public static INDArray computeAbsoluteStep(INDArray relStep,INDArray x) { if(relStep == null) { relStep = pow(Nd4j.scalar(getEpsRelativeTo(x)),0.5); } INDArray signX0 = x.gte(0).muli(2).subi(1); return signX0.mul(relStep).muli(max(abs(x),1.0)); }
Transforms.abs(gradient, false); //In-place should be OK here, original gradient values aren't used again later Nd4j.getExecutioner().exec(new OldMax(u, gradient, u, u.length()));
print("Vector log", log); INDArray abs = abs(fourByFiveRandomZeroToOne); print("Vector abs", abs);
INDArray adjustedCentral = and(not(central),lessThanOrEqual(abs(hAdjusted),minDist)); hAdjusted.put(adjustedCentral,minDist.get(adjustedCentral)); oneSided.put(adjustedCentral,Nd4j.scalar(0.0));
@Override public DoubleTensor absInPlace() { Transforms.abs(tensor, false); return this; }
/** * Abs funciton * * @param ndArray * @return */ public static INDArray abs(INDArray ndArray) { return abs(ndArray, true); }
@Override public IntegerTensor absInPlace() { Transforms.abs(tensor, false); return this; }
@Override public INDArray noOp() { return Transforms.abs(x()); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
@Override public INDArray noOp() { return Transforms.abs(x()); }
public static INDArray castToInteger(INDArray tensor, boolean duplicate) { INDArray tensorToDropFractionOn = duplicate ? tensor.dup() : tensor; INDArray sign = Transforms.sign(tensorToDropFractionOn); Transforms.floor(Transforms.abs(tensorToDropFractionOn, false), false).muli(sign); return tensorToDropFractionOn; } }
private INDArray activateHelperFullArray(INDArray inputArray, int[] poolDim) { switch (poolingType) { case MAX: return inputArray.max(poolDim); case AVG: return inputArray.mean(poolDim); case SUM: return inputArray.sum(poolDim); case PNORM: //P norm: https://arxiv.org/pdf/1311.1780.pdf //out = (1/N * sum( |in| ^ p) ) ^ (1/p) int pnorm = layerConf().getPnorm(); INDArray abs = Transforms.abs(inputArray, true); Transforms.pow(abs, pnorm, false); INDArray pNorm = abs.sum(poolDim); return Transforms.pow(pNorm, 1.0 / pnorm, false); default: throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId()); } }
@Override public double calculateScore(MultiLayerNetwork network) { dataSetIterator.reset(); double losSum = 0.0; int exCount = 0; while (dataSetIterator.hasNext()) { DataSet dataSet = dataSetIterator.next(); if (dataSet == null) { break; } long nEx = dataSet.getFeatures().size(0); INDArray output = network.output(dataSet.getFeatures(), false); INDArray labels = dataSet.getLabels(); INDArray score = Transforms.abs(output.sub(labels)); score = score.div(labels); exCount += nEx; losSum += score.sumNumber().doubleValue(); } if (average) { return losSum / exCount; } return losSum; }
Transforms.abs(gradient,false); //In-place should be OK here, original gradient values aren't used again later Nd4j.getExecutioner().exec(new Max(u,gradient,u,u.length()));