/** * Completes an append request to the member. * * @param time The time in milliseconds for the append. */ public void completeAppend(long time) { appending--; timeStats.addValue(time); }
/** * {@inheritDoc} */ @Override public synchronized void addValue(double v) { super.addValue(v); }
/** * Adds a reply time to the history. * * @param type the message type * @param replyTime the reply time to add to the history */ private void addReplyTime(String type, long replyTime) { DescriptiveStatistics samples = replySamples.get(type); if (samples == null) { samples = replySamples.computeIfAbsent(type, t -> new SynchronizedDescriptiveStatistics(WINDOW_SIZE)); } samples.addValue(replyTime); }
@Test public void testTableSampleBernoulli() { DescriptiveStatistics stats = new DescriptiveStatistics(); int total = computeExpected("SELECT orderkey FROM orders", ImmutableList.of(BIGINT)).getMaterializedRows().size(); for (int i = 0; i < 100; i++) { List<MaterializedRow> values = computeActual("SELECT orderkey FROM orders TABLESAMPLE BERNOULLI (50)").getMaterializedRows(); assertEquals(values.size(), ImmutableSet.copyOf(values).size(), "TABLESAMPLE produced duplicate rows"); stats.addValue(values.size() * 1.0 / total); } double mean = stats.getGeometricMean(); assertTrue(mean > 0.45 && mean < 0.55, format("Expected mean sampling rate to be ~0.5, but was %s", mean)); }
private DescriptiveStatistics getDescriptiveStatistics( final LinkedList<InMemoryHistoryNode> selectedLists) throws ClassCastException { final DescriptiveStatistics descStats = new DescriptiveStatistics(); for (final InMemoryHistoryNode node : selectedLists) { descStats.addValue(((Number) node.getValue()).doubleValue()); } return descStats; }
test.run(); long end = System.nanoTime(); stats.addValue((double)(end - start) / scaleFactor); if (post != null) { post.run();
/** * Report a new heart beat for the specified node id. * * @param arrivalTime arrival time */ public void report(long arrivalTime) { checkArgument(arrivalTime >= 0, "arrivalTime must not be negative"); long latestHeartbeat = history.latestHeartbeatTime(); history.samples().addValue(arrivalTime - latestHeartbeat); history.setLatestHeartbeatTime(arrivalTime); }
/** * Add a result to this aggregate result. * @param time Time in nanoseconds * @param span Span. To be kept if the time taken was over 1 second */ public void addResult(long time, Span span) { if (span == null) { return; } stats.addValue(TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS)); if (TimeUnit.SECONDS.convert(time, TimeUnit.NANOSECONDS) >= 1) { traces.add(span.getTracerId()); } }
private SimpleStats calculateStats(StatsType st) { if (! isAnyNumeric()) return SimpleStats.na(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (ColMeta col : _cols) { if (!col._ignored && !col._response && col._isNumeric) { double v; switch (st) { case Skewness: v = col._skew; break; case Kurtosis: v = col._kurtosis; break; case Cardinality: v = col._cardinality; break; default: throw new IllegalStateException("Unsupported type " + st); } stats.addValue(v); } } return SimpleStats.from(stats); }
/** * Normalize (standardize) the sample, so it is has a mean of 0 and a standard deviation of 1. * * @param sample Sample to normalize. * @return normalized (standardized) sample. * @since 2.2 */ public static double[] normalize(final double[] sample) { DescriptiveStatistics stats = new DescriptiveStatistics(); // Add the data from the series to stats for (int i = 0; i < sample.length; i++) { stats.addValue(sample[i]); } // Compute mean and standard deviation double mean = stats.getMean(); double standardDeviation = stats.getStandardDeviation(); // initialize the standardizedSample, which has the same length as the sample double[] standardizedSample = new double[sample.length]; for (int i = 0; i < sample.length; i++) { // z = (x- mean)/standardDeviation standardizedSample[i] = (sample[i] - mean) / standardDeviation; } return standardizedSample; }
System.out.printf("Compile time = %dms%n", dur); if (i>=WARMUP) { stats.addValue((double) dur);
@Test(dataProvider = "provideStandardErrors") public void testMultiplePositions(double maxStandardError) { DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < 500; ++i) { int uniques = ThreadLocalRandom.current().nextInt(getUniqueValuesCount()) + 1; List<Object> values = createRandomSample(uniques, (int) (uniques * 1.5)); long actual = estimateGroupByCount(values, maxStandardError); double error = (actual - uniques) * 1.0 / uniques; stats.addValue(error); } assertLessThan(stats.getMean(), 1.0e-2); assertLessThan(stats.getStandardDeviation(), 1.0e-2 + maxStandardError); }
@Override public void addValue(double value) { descStats.addValue(value); }
public DescriptiveStatistics getStats(List<Double> avg) { DescriptiveStatistics stats = new DescriptiveStatistics(); for(Double d : avg) { if(d == null || Double.isNaN(d)) { continue; } stats.addValue(d); } return stats; } }
public void put(int depth, Pair p, double val) { getStats(depth, p).addValue(val); }
public static DescriptiveStatistics run(StellarStatement statement, int warmupRounds, int benchmarkRounds ) { run(warmupRounds, statement, ts -> {}); final DescriptiveStatistics stats = new DescriptiveStatistics(); run(benchmarkRounds, statement, ts -> { stats.addValue(ts);}); return stats; }
public static double getMedianHighOpenSpread(String symbol, int days) throws Exception { YahooQuoteFetcher yahooFetcher = new YahooQuoteFetcher(); String quoteStr = yahooFetcher.fetchQuotes(symbol, days, 60*60*24); List<Quote> dailyQuoteList = yahooFetcher.parseQuotes(quoteStr, 60*60*24); DescriptiveStatistics stats = new DescriptiveStatistics(); for(Quote quote : dailyQuoteList) { stats.addValue(quote.getHigh().subtract(quote.getOpen()).doubleValue()); } return stats.getPercentile(50); }
public double findFrequency(double[] voltage, double samplingInterval) { int voltageLength = voltage.length; double[] frequency; double[] amplitude; int index = 0; double max = 0; Complex[] complex; DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < voltageLength; i++) stats.addValue(voltage[i]); double voltageMean = stats.getMean(); for (int i = 0; i < voltageLength; i++) voltage[i] = voltage[i] - voltageMean; // remove DC component frequency = Arrays.copyOfRange(fftFrequency(voltageLength, samplingInterval), 0, voltageLength / 2); // take only the +ive half of the frequncy array FastFourierTransformer fastFourierTransformer = new FastFourierTransformer(DftNormalization.STANDARD); complex = fastFourierTransformer.transform(voltage, TransformType.FORWARD); amplitude = new double[complex.length / 2]; for (int i = 0; i < complex.length / 2; i++) { // take only the +ive half of the fft result amplitude[i] = complex[i].abs() / voltageLength; if (amplitude[i] > max) { // search for the tallest peak, the fundamental max = amplitude[i]; index = i; } } return frequency[index]; }
@Test(groups = "slow") public void testError() throws Exception { DescriptiveStatistics stats = new DescriptiveStatistics(); int buckets = 2048; for (int i = 0; i < 10000; ++i) { HyperLogLog estimator = new HyperLogLog(buckets); Set<Long> randomSet = makeRandomSet(5 * buckets); for (Long value : randomSet) { estimator.add(value); } double error = (estimator.estimate() - randomSet.size()) * 1.0 / randomSet.size(); stats.addValue(error); } assertTrue(stats.getMean() < 1e-2); assertTrue(stats.getStandardDeviation() < 1.04 / Math.sqrt(buckets)); }