/** * Convenience method to create a new {@code WeightedValue}. * * @param <ValueType> * The type of the value. * @param value * The value. * @param weight * The weight. * @return * A new weighted value with the given value and weight. */ public static <ValueType> DefaultWeightedValue<ValueType> create( final ValueType value, final double weight) { return new DefaultWeightedValue<ValueType>(value, weight); }
/** * Adds a new example of the given value with the given weight. * * @param value * The value to add. * @param weight * The weight for the value. */ public void add( final InputType value, final double weight) { this.examples.add(DefaultWeightedValue.create(value, weight)); }
/** * Scales all of the weights in the given kernel binary categorizer by * the given value. * * @param target * The kernel binary categorizer to update the weights on. * @param scale * The scale to apply to all the weights. */ public static void scaleEquals( final DefaultKernelBinaryCategorizer<?> target, final double scale) { for (DefaultWeightedValue<?> example : target.getExamples()) { final double oldWeight = example.getWeight(); final double newWeight = scale * oldWeight; example.setWeight(newWeight); } } }
/** * Creates a shallow copy of the WeightedValue. The weight is copied but * the value is not. * * @return A new shallow copy of this {@code WeightedValue}. */ @Override public DefaultWeightedValue<ValueType> clone() { @SuppressWarnings("unchecked") final DefaultWeightedValue<ValueType> clone = (DefaultWeightedValue<ValueType>) super.clone(); clone.setValue(ObjectUtil.cloneSmart(this.getValue())); return clone; }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
final double value = input.getElement(dimension); DefaultWeightedValue<OutputType> entry = values.get(index); entry.setWeight(value); entry.setValue(output); index++; final double smallestValue = values.get(0).getWeight(); final double largestValue = values.get(totalCount - 1).getWeight(); final OutputType label = valueLabel.getValue(); final double value = valueLabel.getWeight();
if (difference >= 0.0 && !actual.equals(category)) errors.add(DefaultWeightedValue.create(category, difference)); differenceSum += difference; target.getPrototypes().get(category.getValue()); final double errorWeight = category.getWeight() / differenceSum; prototype.getWeights().minusEquals(input.scale(errorWeight)); prototype.setBias(prototype.getBias() - errorWeight);
support = new DefaultWeightedValue<InputType>( this.getPoint(i), weight); supportsMap.put(i, support); support.setWeight(weight);
/** * Computes the squared 2-norm of the weight vector implied by the given * kernel binary categorizer. Useful for bypassing the square-root * computation in the 2-norm computation. * * @param <InputType> * The type of input to the categorizer value. * @param target * A kernel binary categorizer. * @return * The 2-norm of the categorizer according to the kernel. */ public static <InputType> double norm2Squared( final DefaultKernelBinaryCategorizer<InputType> target) { double result = 0.0; final double bias = target.getBias(); for (DefaultWeightedValue<InputType> example : target.getExamples()) { double sum = target.evaluateAsDouble(example.getValue()) - bias; result += sum * example.getWeight(); } return result; }
/** * Gets the alpha (weight) value for the given training example index. Note * that alpha values are stored as weights that incorporate the label as the * sign of the weight. That is, weight = y * alpha where y is either +1 or * -1. * * @param i * The training example index. Must be between 0 and dataSize - 1. * @return * The current alpha value for i. */ private double getAlpha( final int i) { final DefaultWeightedValue<InputType> support = this.supportsMap.get(i); if (support == null) { // Not a support, so the alpha value is zero. return 0.0; } else { // The weight is the label (+1 or -1) times alpha. Alpha is always // greater than zero, so we just take the absolute value of the // weight to get it. return Math.abs(support.getWeight()); } }
/** * Creates a new instance of {@code WeightedValue}. * * @param weight The weight. * @param value The value. */ public DefaultWeightedValue( final ValueType value, final double weight) { super(weight); this.setValue(value); }
public ProbabilityFunction<ObservationType> call() { final int N = this.gammas.size(); for( int n = 0; n < N; n++ ) { this.weightedValues.get(n).setWeight( this.gammas.get(n).getElement( this.index ) ); } return this.distributionLearner.learn(this.weightedValues).getProbabilityFunction(); }
@Override public DataDistribution<ParameterType> learn( final Collection<? extends ObservationType> data) { ArrayList<DefaultWeightedValue<ParameterType>> weightedSamples = new ArrayList<DefaultWeightedValue<ParameterType>>( this.getNumSamples()); double maxWeight = Double.NEGATIVE_INFINITY; for( int n = 0; n < this.getNumSamples(); n++ ) { ParameterType parameter = this.getUpdater().makeProposal(random); double ll = this.getUpdater().computeLogLikelihood(parameter, data); double lq = this.getUpdater().computeLogImportanceValue(parameter); double weight = ll - lq; if( maxWeight < weight ) { maxWeight = weight; } weightedSamples.add( new DefaultWeightedValue<ParameterType>( parameter, weight ) ); } maxWeight -= Math.log(Double.MAX_VALUE/ this.getNumSamples() / 2.0 ); DataDistribution<ParameterType> retval = new DefaultDataDistribution<ParameterType>( this.getNumSamples()); for( DefaultWeightedValue<ParameterType> weightedSample : weightedSamples ) { double mass = Math.exp(weightedSample.getWeight() - maxWeight); retval.increment( weightedSample.getValue(), mass ); } return retval; }
final double value = input.getElement(dimension); DefaultWeightedValue<OutputType> entry = values.get(index); entry.setWeight(value); entry.setValue(output); index++; final double smallestValue = values.get(0).getWeight(); final double largestValue = values.get(totalCount - 1).getWeight(); final OutputType label = valueLabel.getValue(); final double value = valueLabel.getWeight();
if (difference >= 0.0 && !actual.equals(category)) errors.add(DefaultWeightedValue.create(category, difference)); differenceSum += difference; target.getPrototypes().get(category.getValue()); final double errorWeight = category.getWeight() / differenceSum; prototype.getWeights().minusEquals(input.scale(errorWeight)); prototype.setBias(prototype.getBias() - errorWeight);
/** * Scales all of the weights in the given kernel binary categorizer by * the given value. * * @param target * The kernel binary categorizer to update the weights on. * @param scale * The scale to apply to all the weights. */ public static void scaleEquals( final DefaultKernelBinaryCategorizer<?> target, final double scale) { for (DefaultWeightedValue<?> example : target.getExamples()) { final double oldWeight = example.getWeight(); final double newWeight = scale * oldWeight; example.setWeight(newWeight); } } }