/** * Calculates percentile from {@link TDigest}. * <p>Handles cases where only one value in TDigest object. */ public static double calculatePercentile(@Nonnull TDigest tDigest, int percentile) { if (tDigest.size() == 1) { // Specialize cases where only one value in TDigest (cannot use quantile method) return tDigest.centroids().iterator().next().mean(); } else { return tDigest.quantile(percentile / 100.0); } }
private void testPercentileSize(int sumNums, Integer sqrtNum, Integer compresion) throws Exception { compresion = compresion == null ? DEFAULT_COMPRESSION : compresion; PercentileAggregator aggregator = createPercentileAggreator(sumNums, sqrtNum, compresion); double actual = getActualSize(aggregator); double estimate = getEstimateSize((int) aggregator.getState().getRegisters().size(), 1, compresion); assertTrue(Math.abs(actual - estimate) / actual < 0.3); aggregator.reset(); }
@Override public int size() { return (int) snapshot.size(); } };
@Override public int size() { return (int) snapshot.size(); } };
@Override public long count() { return perThreadHistogramBins.values().stream().flatMap(List::stream).mapToLong(bin -> bin.dist.size()).sum(); }
@Override public long count() { return perThreadHistogramBins.values().stream().flatMap(List::stream).mapToLong(bin -> bin.dist.size()).sum(); }
private void internalProcessWavefrontHistogram(WavefrontHistogram hist, Context context) throws Exception { final JsonGenerator json = context.json; json.writeStartObject(); json.writeArrayFieldStart("bins"); for (WavefrontHistogram.MinuteBin bin : hist.bins(clear)) { final Collection<Centroid> centroids = bin.getDist().centroids(); json.writeStartObject(); // Count json.writeNumberField("count", bin.getDist().size()); // Start json.writeNumberField("startMillis", bin.getMinMillis()); // Duration json.writeNumberField("durationMillis", 60 * 1000); // Means json.writeArrayFieldStart("means"); for (Centroid c : centroids) { json.writeNumber(c.mean()); } json.writeEndArray(); // Counts json.writeArrayFieldStart("counts"); for (Centroid c : centroids) { json.writeNumber(c.count()); } json.writeEndArray(); json.writeEndObject(); } json.writeEndArray(); json.writeEndObject(); }
private void internalProcessWavefrontHistogram(WavefrontHistogram hist, Context context) throws Exception { final JsonGenerator json = context.json; json.writeStartObject(); json.writeArrayFieldStart("bins"); for (WavefrontHistogram.MinuteBin bin : hist.bins(clear)) { final Collection<Centroid> centroids = bin.getDist().centroids(); json.writeStartObject(); // Count json.writeNumberField("count", bin.getDist().size()); // Start json.writeNumberField("startMillis", bin.getMinMillis()); // Duration json.writeNumberField("durationMillis", 60 * 1000); // Means json.writeArrayFieldStart("means"); for (Centroid c : centroids) { json.writeNumber(c.mean()); } json.writeEndArray(); // Counts json.writeArrayFieldStart("counts"); for (Centroid c : centroids) { json.writeNumber(c.count()); } json.writeEndArray(); json.writeEndObject(); } json.writeEndArray(); json.writeEndObject(); }
@Override public void processHistogram(MetricName name, Histogram histogram, FlushProcessorContext context) throws Exception { if (histogram instanceof WavefrontHistogram && useWavefrontHistograms) { WavefrontHistogram wavefrontHistogram = (WavefrontHistogram) histogram; wavefront.report.Histogram.Builder builder = wavefront.report.Histogram.newBuilder(); builder.setBins(Lists.newLinkedList()); builder.setCounts(Lists.newLinkedList()); long minMillis = Long.MAX_VALUE; if (wavefrontHistogram.count() == 0) return; for (WavefrontHistogram.MinuteBin minuteBin : wavefrontHistogram.bins(true)) { builder.getBins().add(minuteBin.getDist().quantile(.5)); builder.getCounts().add(Math.toIntExact(minuteBin.getDist().size())); minMillis = Long.min(minMillis, minuteBin.getMinMillis()); } builder.setType(HistogramType.TDIGEST); builder.setDuration(Math.toIntExact(currentMillis.get() - minMillis)); context.report(builder.build()); } else { context.reportSubMetric(histogram.count(), "count"); for (Map.Entry<String, Double> entry : MetricsToTimeseries.explodeSummarizable(histogram, reportEmptyHistogramStats).entrySet()) { context.reportSubMetric(entry.getValue(), entry.getKey()); } for (Map.Entry<String, Double> entry : MetricsToTimeseries.explodeSampling(histogram, reportEmptyHistogramStats).entrySet()) { context.reportSubMetric(entry.getValue(), entry.getKey()); } histogram.clear(); } sentCounter.inc(); }
@Override public void processHistogram(MetricName name, Histogram histogram, FlushProcessorContext context) throws Exception { if (histogram instanceof WavefrontHistogram && useWavefrontHistograms) { WavefrontHistogram wavefrontHistogram = (WavefrontHistogram) histogram; wavefront.report.Histogram.Builder builder = wavefront.report.Histogram.newBuilder(); builder.setBins(Lists.newLinkedList()); builder.setCounts(Lists.newLinkedList()); long minMillis = Long.MAX_VALUE; if (wavefrontHistogram.count() == 0) return; for (WavefrontHistogram.MinuteBin minuteBin : wavefrontHistogram.bins(true)) { builder.getBins().add(minuteBin.getDist().quantile(.5)); builder.getCounts().add(Math.toIntExact(minuteBin.getDist().size())); minMillis = Long.min(minMillis, minuteBin.getMinMillis()); } builder.setType(HistogramType.TDIGEST); builder.setDuration(Math.toIntExact(currentMillis.get() - minMillis)); context.report(builder.build()); } else { context.reportSubMetric(histogram.count(), "count"); for (Map.Entry<String, Double> entry : MetricsToTimeseries.explodeSummarizable(histogram, reportEmptyHistogramStats).entrySet()) { context.reportSubMetric(entry.getValue(), entry.getKey()); } for (Map.Entry<String, Double> entry : MetricsToTimeseries.explodeSampling(histogram, reportEmptyHistogramStats).entrySet()) { context.reportSubMetric(entry.getValue(), entry.getKey()); } histogram.clear(); } sentCounter.inc(); }