@Override public <T> long apply( final SqlStreamOptimizerInfo<ENTITY> info, final SqlStreamTerminator<ENTITY> sqlStreamTerminator, final DoublePipeline pipeline ) { requireNonNull(info); requireNonNull(sqlStreamTerminator); requireNonNull(pipeline); return sqlStreamTerminator.optimize(pipeline).getAsDoubleStream().count(); }
default long count(DoublePipeline pipeline) { requireNonNull(pipeline); return optimize(pipeline).getAsDoubleStream().count(); }
public void testEquivalentStreams() { // For datasets of many double values created from an array, we test many combinations of finite // and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { double[] array = values.asArray(); Stats stats = Stats.of(array); // instance methods on Stats vs on instance methods on DoubleStream assertThat(stats.count()).isEqualTo(stream(array).count()); assertEquivalent(stats.mean(), stream(array).average().getAsDouble()); assertEquivalent(stats.sum(), stream(array).sum()); assertEquivalent(stats.max(), stream(array).max().getAsDouble()); assertEquivalent(stats.min(), stream(array).min().getAsDouble()); // static method on Stats vs on instance method on DoubleStream assertEquivalent(Stats.meanOf(array), stream(array).average().getAsDouble()); // instance methods on Stats vs instance methods on DoubleSummaryStatistics DoubleSummaryStatistics streamStats = stream(array).summaryStatistics(); assertThat(stats.count()).isEqualTo(streamStats.getCount()); assertEquivalent(stats.mean(), streamStats.getAverage()); assertEquivalent(stats.sum(), streamStats.getSum()); assertEquivalent(stats.max(), streamStats.getMax()); assertEquivalent(stats.min(), streamStats.getMin()); } }
@Override public long count() { return stream.count(); }
@Override public long count() { return stream.count(); }
@Override public long count() { // This is a terminal operation return evalAndclose(() -> stream.count()); }
@Override public double evaluate(double[] values) throws MathIllegalArgumentException { long below = Arrays.stream(values) .filter(val -> val <= threshold) .count(); return (double) below / values.length; }
private static double[] count(double[] input, List<Double> levels){ double[] count = new double[levels.size()]; for (int l=0;l<levels.size();l++){ double level = levels.get(l); count[l] = Arrays.stream(input).filter(d->d==level).count(); } for (int i=0;i<count.length;i++){ count[i] /= input.length; } return count; }
@Override public <T> long apply( final SqlStreamOptimizerInfo<ENTITY> info, final SqlStreamTerminator<ENTITY> sqlStreamTerminator, final DoublePipeline pipeline ) { requireNonNull(info); requireNonNull(sqlStreamTerminator); requireNonNull(pipeline); return sqlStreamTerminator.optimize(pipeline).getAsDoubleStream().count(); }
@Override public double evaluate(double[] values, int begin, int length) throws MathIllegalArgumentException { long below = IntStream.range(begin, length) .mapToDouble(i -> values[i]) .filter(val -> val > threshold) .count(); return 100.0 * below / length; }
default long count(DoublePipeline pipeline) { requireNonNull(pipeline); return optimize(pipeline).getAsDoubleStream().count(); }
/** * Creates a a weighted linear regression. * @param x the independent variable * @param y the dependent variable * @param weights the weights to apply to x and y * @return */ private static SimpleRegression createWeightedLinearRegression( final double[] x, final double[] y, final double[] weights) { final double[] weightedX = new double[x.length]; final double[] weightedY = new double[y.length]; final long numZeroWeights = Arrays.stream(weights).filter(weight -> weight <= 0).count(); for (int i = 0; i < x.length; i++) { if (numZeroWeights >= 0.4 * weights.length) { // See: http://www.ncsu.edu/crsc/events/ugw07/Presentations/Crooks_Qiao/Crooks_Qiao_Alt_Presentation.pdf weightedX[i] = Math.sqrt(weights[i]) * x[i]; weightedY[i] = Math.sqrt(weights[i]) * y[i]; } else { weightedX[i] = x[i]; weightedY[i] = y[i]; } } return createLinearRegression(weightedX, weightedY); }
@Override public String summary() { StringBuilder sb = new StringBuilder(); sb.append("\n > Kolmogorov-Smirnoff 1-sample test\n"); int ties = (int) (v.rowCount() - v.stream().mapToDouble().distinct().count()); sb.append(String.format("sample size: %d, ties: %d\n", v.rowCount(), ties)); if (ties > 0) sb.append(" (warning: p-values will not be exact because of ties)\n"); sb.append(String.format("densities: %s\n", cdf.name())); sb.append("D statistic: ").append(floatFlex(D)).append("\n"); sb.append("p-value: ").append(floatFlex(pValue)).append(" ").append(Format.pValueStars(pValue)).append("\n"); sb.append("\n"); return sb.toString(); } }
@Override public String summary() { StringBuilder sb = new StringBuilder(); sb.append("\n > Kolmogorov-Smirnoff 2-sample test\n"); int ties1 = (int) (v1.rowCount() - v1.stream().mapToDouble().distinct().count()); int ties2 = (int) (v2.rowCount() - v2.stream().mapToDouble().distinct().count()); sb.append(String.format("first sample size: %d, ties: %d\n", v1.rowCount(), ties1)); sb.append(String.format("second sample size: %d, ties: %d\n", v2.rowCount(), ties2)); if (ties1 + ties2 > 0) sb.append(" (warning: p-values will not be exact because of ties)\n"); sb.append(String.format("D statistic: %.6f\n", D)); sb.append(String.format("p-value: %.16f %s\n", pValue, Format.pValueStars(pValue))); sb.append("\n"); return sb.toString(); } }
@Override public long count() { if (context.fjp != null) return context.terminate(stream()::count); return stream().count(); }
public EntryQueryRelevance(Entry entry, String[] keywords) { this.entry = entry; double[] ranks = Arrays.stream(keywords).mapToDouble(w -> fieldScore(entry.getTitle(), w) * WEIGHT_KW_IN_TITLE + fieldScore(entry.getUsername(), w) * WEIGHT_KW_IN_USERNAME + fieldScore(entry.getNotes(), w) * WEIGHT_KW_IN_NOTES + fieldScore(entry.getTags(), w) * WEIGHT_KW_IN_TAGS + fieldScore(entry.getUrl(), w) * WEIGHT_KW_IN_URL).toArray(); rank = Arrays.stream(ranks).sum(); unrelatedKeywords = (int) Arrays.stream(ranks).filter(r -> r == 0).count(); }
public void test_emptySeries() { LocalDateDoubleTimeSeries test = LocalDateDoubleTimeSeries.empty(); assertEquals(test.isEmpty(), true); assertEquals(test.size(), 0); assertEquals(test.containsDate(DATE_2010_01_01), false); assertEquals(test.containsDate(DATE_2011_01_01), false); assertEquals(test.containsDate(DATE_2012_01_01), false); assertEquals(test.get(DATE_2010_01_01), OptionalDouble.empty()); assertEquals(test.get(DATE_2011_01_01), OptionalDouble.empty()); assertEquals(test.get(DATE_2012_01_01), OptionalDouble.empty()); assertEquals(test, LocalDateDoubleTimeSeries.builder().putAll(dates(), values()).build()); assertEquals(test.dates().count(), 0); assertEquals(test.values().count(), 0); }
public void flatVolTest() { double tol = 2.0e-2; double constantVol = 0.15; ConstantSurface impliedVolSurface = ConstantSurface.of("impliedVol", constantVol); Function<Double, Double> zeroRate = new Function<Double, Double>() { @Override public Double apply(Double x) { return 0.05d; } }; Function<Double, Double> zeroRate1 = new Function<Double, Double>() { @Override public Double apply(Double x) { return 0.02d; } }; ImpliedTrinomialTreeLocalVolatilityCalculator calc = new ImpliedTrinomialTreeLocalVolatilityCalculator(45, 1d, INTERP_TIMESQ_LINEAR); InterpolatedNodalSurface localVolSurface = calc.localVolatilityFromImpliedVolatility(impliedVolSurface, 100d, zeroRate, zeroRate1); assertEquals(localVolSurface.getZValues().stream().filter(d -> !DoubleMath.fuzzyEquals(d, constantVol, tol)).count(), 0); }
public void testEquivalentStreams() { // For datasets of many double values created from an array, we test many combinations of finite // and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { double[] array = values.asArray(); Stats stats = Stats.of(array); // instance methods on Stats vs on instance methods on DoubleStream assertThat(stats.count()).isEqualTo(stream(array).count()); assertEquivalent(stats.mean(), stream(array).average().getAsDouble()); assertEquivalent(stats.sum(), stream(array).sum()); assertEquivalent(stats.max(), stream(array).max().getAsDouble()); assertEquivalent(stats.min(), stream(array).min().getAsDouble()); // static method on Stats vs on instance method on DoubleStream assertEquivalent(Stats.meanOf(array), stream(array).average().getAsDouble()); // instance methods on Stats vs instance methods on DoubleSummaryStatistics DoubleSummaryStatistics streamStats = stream(array).summaryStatistics(); assertThat(stats.count()).isEqualTo(streamStats.getCount()); assertEquivalent(stats.mean(), streamStats.getAverage()); assertEquivalent(stats.sum(), streamStats.getSum()); assertEquivalent(stats.max(), streamStats.getMax()); assertEquivalent(stats.min(), streamStats.getMin()); } }
public void flatVolPriceTest() { double tol = 2.0e-2; double constantVol = 0.15; double spot = 100d; double maxTime = 1d; int nSteps = 9; ConstantSurface impliedVolSurface = ConstantSurface.of("impliedVol", constantVol); Function<Double, Double> zeroRate = new Function<Double, Double>() { @Override public Double apply(Double x) { return 0d; } }; Function<DoublesPair, ValueDerivatives> func = new Function<DoublesPair, ValueDerivatives>() { @Override public ValueDerivatives apply(DoublesPair x) { double price = BlackFormulaRepository.price(spot, x.getSecond(), x.getFirst(), constantVol, true); return ValueDerivatives.of(price, DoubleArray.EMPTY); } }; DeformedSurface priceSurface = DeformedSurface.of(DefaultSurfaceMetadata.of("price"), impliedVolSurface, func); ImpliedTrinomialTreeLocalVolatilityCalculator calc = new ImpliedTrinomialTreeLocalVolatilityCalculator(nSteps, maxTime, INTERP_TIMESQ_LINEAR); InterpolatedNodalSurface localVolSurface = calc.localVolatilityFromPrice(priceSurface, spot, zeroRate, zeroRate); assertEquals(localVolSurface.getZValues().stream().filter(d -> !DoubleMath.fuzzyEquals(d, constantVol, tol)).count(), 0); }