public void convertFromVector( Vector parameters) { if( parameters.getDimensionality() != 3 ) { throw new IllegalArgumentException( "Expected three parameters: amplitude, frequency, phase" ); } this.amplitude = parameters.getElement(0); this.frequency = parameters.getElement(1); this.phase = parameters.getElement(2); }
@Override final public Vector minus( final Vector v) { // I need to flip this so that if it the input is a dense vector, I // return a dense vector. If it's a sparse vector, then a sparse vector // is still returned. Vector result = v.clone(); result.negativeEquals(); result.plusEquals(this); return result; }
@Override public Vector computeLocalWeights( final Vector counts) { // Compute the local weights. final Vector result = super.computeLocalWeights(counts); final int dimensionality = result.getDimensionality(); if (dimensionality != 0) { final double average = counts.norm1() / dimensionality; final double divisor = Math.log(1.0 + average); result.scaleEquals(1.0 / divisor); } return result; }
@Override final public Vector plus( final Vector v) { // I need to flip this so that if it the input is a dense vector, I // return a dense vector. If it's a sparse vector, then a sparse vector // is still returned. Vector result = v.clone(); result.plusEquals(this); return result; }
protected double computeScaleFactor( Vector gradientCurrent, Vector gradientPrevious ) { Vector deltaGradient = gradientCurrent.minus( gradientPrevious ); double deltaTgradient = deltaGradient.dotProduct( gradientCurrent ); double denom = gradientPrevious.norm2Squared(); double beta = deltaTgradient / denom; return beta; }
final double actual = example.getOutput(); final double error = actual - prediction; errors.set(i, error); final double newBias = (oldBias * this.dataSize + errors.sum()) / (this.dataSize + this.biasRegularization); this.result.setBias(newBias); errors.increment(i, biasChange); final double oldWeight = weights.getElement(j); final Vector inputs = this.inputsTransposed.get(j); final double sumOfSquares = derivative.norm2Squared(); final double newWeight = sumOfSquares == 0.0 ? 0.0 : (oldWeight * sumOfSquares + derivative.dot(errors)) / (sumOfSquares + this.weightRegularization); weights.set(j, newWeight); errors.scaledPlusEquals(weightChange, inputs); this.totalChange += Math.abs(weightChange); factorTimesInput.set(i, this.dataList.get(i).getInput().dot(factorRow)); final Vector derivative = inputs.dotTimes(factorTimesInput); derivative.scaledMinusEquals(oldFactor, inputs.dotTimes(inputs)); final double sumOfSquares = derivative.norm2Squared();
/** * Computes the Viterbi recursion for a given "delta" and "b" * @param delta * Previous value of the Viterbi recursion. * @param bn * Current observation likelihood. * @return * Updated "delta" and state backpointers. */ protected Pair<Vector,int[]> computeViterbiRecursion( Vector delta, Vector bn ) { final int k = delta.getDimensionality(); final Vector dn = VectorFactory.getDefault().createVector(k); final int[] psi = new int[ k ]; for( int i = 0; i < k; i++ ) { WeightedValue<Integer> transition = this.findMostLikelyState(i, delta); psi[i] = transition.getValue(); dn.setElement(i, transition.getWeight()); } dn.dotTimesEquals( bn ); delta = dn; delta.scaleEquals( 1.0/delta.norm1() ); return DefaultPair.create( delta, psi ); }
public void convertFromVector( Vector parameters) { final int d = this.getInputDimensionality(); parameters.assertDimensionalityEquals( 1+d + 1+d*d ); this.setCovarianceDivisor( parameters.getElement(0) ); Vector mean = parameters.subVector(1, d); this.gaussian.setMean(mean); Vector iwp = parameters.subVector(d+1, parameters.getDimensionality()-1); this.inverseWishart.convertFromVector(iwp); }
@Override public Vector convertToVector() { final int dim = this.getInputDimensionality() + 1; Vector p = VectorFactory.getDefault().createVector(dim); for( int i = 0; i < dim-1; i++ ) { p.setElement(i, this.weightVector.getElement(i) ); } p.setElement(dim-1, this.bias); return p; }
* latent.pDocumentGivenLatent.getElement(i) * latent.pTermGivenLatent.getElement(j); latent.pLatent = latent.pDocumentGivenLatent.sum(); * latent.pDocumentGivenLatent.getElement(i) * latent.pTermGivenLatent.getElement(j);
/** * Sets the entry value to the first underlying vector. * * @param value Entry value to the first underlying vector. */ public void setFirstValue( double value) { this.getFirstVector().setElement(this.getIndex(), value); }
Vector alpha = this.getInitialProbability().clone(); Matrix A = this.getTransitionProbability(); int index = 0; alpha.dotTimesEquals(b); final double weight = alpha.norm1(); alpha.scaleEquals(1.0/weight); logLikelihood += Math.log(weight); index++;
@Override final public Vector dotTimes( final Vector v) { // By switch from this.dotTimes(v) to v.dotTimes(this), we get sparse // vectors dotted with dense still being sparse and dense w/ dense is // still dense. The way this was originally implemented in the Foundry // (this.clone().dotTimesEquals(v)), if v is sparse, it returns a // dense vector type storing sparse data. Vector result = v.clone(); result.dotTimesEquals(this); return result; }
/** * Evaluates the weighted Euclidean distance between two vectors. * * @param first * The first vector. * @param second * The second vector. * @return * The weighted Euclidean distance between the two vectors. */ @Override public double evaluate( final Vectorizable first, final Vectorizable second) { // \sqrt(\sum_i w_i * (x_i - y_i)^2) // First compute the difference between the two vectors. final Vector difference = first.convertToVector().minus(second.convertToVector()); // Now square it. difference.dotTimesEquals(difference); // Now compute the square root of the weights times the squared // difference. return Math.sqrt(this.weights.dotProduct(difference)); }
/** * Sets the initial guess ("x0") * * @param initialGuess the initial guess ("x0") */ @Override final public void setInitialGuess(Vector initialGuess) { x0 = initialGuess.clone(); }
public Vector computeLocalWeights( final Vector counts) { // Since the counts are positive, the 1-norm of them is their sum. final Vector result = this.vectorFactory.copyVector(counts); final double countSum = counts.norm1(); if (countSum != 0.0) { result.scaleEquals(1.0 / countSum); } return result; }
@Override protected void initialize( final LinearBinaryCategorizer target, final Vector input, final boolean actualCategory) { final double norm = input.norm2(); if (norm != 0.0) { final Vector weights = this.getVectorFactory().copyVector(input); final double actual = actualCategory ? +1.0 : -1.0; weights.scaleEquals(actual / input.norm2()); target.setWeights(weights); } }
@Override public void convertFromVector( final Vector parameters) { parameters.assertDimensionalityEquals(1); this.value = parameters.getElement(0); }
@Override protected double computeScaleFactor( Vector gradientCurrent, Vector gradientPrevious ) { Vector direction = this.lineFunction.getDirection(); Vector deltaGradient = gradientCurrent.minus( gradientPrevious ); double deltaTgradient = deltaGradient.dotProduct( gradientCurrent ); double denom = gradientPrevious.dotProduct( direction ); double beta = -deltaTgradient / denom; return beta; }