@Override public synchronized long getTotalCount() { return super.getTotalCount(); }
if (getTotalCount() > integerValuesHistogram.getCountAtIndex(0)) {
((intervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()), ((DoubleHistogram) intervalHistogram).getTotalCount(), ((DoubleHistogram) intervalHistogram).getValueAtPercentile(50.0) / config.outputValueUnitRatio, ((DoubleHistogram) intervalHistogram).getValueAtPercentile(90.0) / config.outputValueUnitRatio, ((DoubleHistogram) intervalHistogram).getMaxValue() / config.outputValueUnitRatio, accumulatedDoubleHistogram.getTotalCount(), accumulatedDoubleHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, ((intervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()), ((DoubleHistogram) movingWindowSumHistogram).getTotalCount(), ((DoubleHistogram) movingWindowSumHistogram).getValueAtPercentile(config.movingWindowPercentileToReport) / config.outputValueUnitRatio, ((DoubleHistogram) movingWindowSumHistogram).getMaxValue() / config.outputValueUnitRatio
if (getTotalCount() > integerValuesHistogram.getCountAtIndex(0)) {
@Override protected int doHashCode() { // we cannot use state.hashCode at the moment because of: // https://github.com/HdrHistogram/HdrHistogram/issues/81 // TODO: upgrade the HDRHistogram library return Objects.hash(keyed, Arrays.hashCode(keys), state.getIntegerToDoubleValueConversionRatio(), state.getTotalCount()); } }
@Override public double percentile(double percent) { if (state.getTotalCount() == 0) { return Double.NaN; } return state.getValueAtPercentile(percent); }
static double percentileRank(DoubleHistogram state, double value) { if (state.getTotalCount() == 0) { return Double.NaN; } double percentileRank = state.getPercentileAtOrBelowValue(value); if (percentileRank < 0) { percentileRank = 0; } else if (percentileRank > 100) { percentileRank = 100; } return percentileRank; }
@Override public Percentile next() { double percent = percents[i]; double value = (state.getTotalCount() == 0) ? Double.NaN : state.getValueAtPercentile(percent); final Percentile next = new Percentile(percent, value); ++i; return next; }
@UserAggregationResult public List<Number> result() { long totalCount = values != null ? values.getTotalCount() : doubles.getTotalCount(); boolean empty = totalCount == 0; List<Number> result = new ArrayList<>(percentiles.size()); for (Double percentile : percentiles) { if (percentile == null || empty) { result.add(null); } else { if (values != null) { result.add(values.getValueAtPercentile(percentile * 100D)); } else { result.add(doubles.getValueAtPercentile(percentile * 100D)); } } } return result; } }
@UserAggregationResult public Map<String,Number> result() { long totalCount = values != null ? values.getTotalCount() : doubles.getTotalCount(); boolean empty = totalCount == 0; Map<String,Number> result = new LinkedHashMap<>(percentiles.size()+6); result.put("min",values != null ? (Number)values.getMinValue() : (Number)doubles.getMinValue()); result.put("minNonZero",values != null ? (Number)values.getMinNonZeroValue() : (Number)doubles.getMinNonZeroValue()); result.put("max",values != null ? (Number)values.getMaxValue() : (Number)doubles.getMaxValue()); result.put("total",totalCount); result.put("mean",values != null ? values.getMean() : doubles.getMean()); result.put("stdev",values != null ? values.getStdDeviation() : doubles.getStdDeviation()); for (Double percentile : percentiles) { if (percentile != null && !empty) { if (values != null) { result.put(percentile.toString(), values.getValueAtPercentile(percentile * 100D)); } else { result.put(percentile.toString(), doubles.getValueAtPercentile(percentile * 100D)); } } } return result; } }
private double percentageNegative() { final long nc = negativeHistogram == null ? 0 : negativeHistogram.getTotalCount(); final long tc = positiveHistogram.getTotalCount() + nc; return (double) nc / (double) tc; }
@Override public synchronized long getTotalCount() { return super.getTotalCount(); }
@Override protected int doHashCode() { // we cannot use state.hashCode at the moment because of: // https://github.com/HdrHistogram/HdrHistogram/issues/81 // TODO: upgrade the HDRHistogram library return Objects.hash(keyed, Arrays.hashCode(keys), state.getIntegerToDoubleValueConversionRatio(), state.getTotalCount()); } }
@Override public double percentile(double percent) { if (state.getTotalCount() == 0) { return Double.NaN; } return state.getValueAtPercentile(percent); }
static double percentileRank(DoubleHistogram state, double value) { if (state.getTotalCount() == 0) { return Double.NaN; } double percentileRank = state.getPercentileAtOrBelowValue(value); if (percentileRank < 0) { percentileRank = 0; } else if (percentileRank > 100) { percentileRank = 100; } return percentileRank; }
@Override public double percentile(double percent) { if (state.getTotalCount() == 0) { return Double.NaN; } return state.getValueAtPercentile(percent); }
@Override public double percentile(double percent) { if (state.getTotalCount() == 0) { return Double.NaN; } return state.getValueAtPercentile(percent); }
static double percentileRank(DoubleHistogram state, double value) { if (state.getTotalCount() == 0) { return Double.NaN; } double percentileRank = state.getPercentileAtOrBelowValue(value); if (percentileRank < 0) { percentileRank = 0; } else if (percentileRank > 100) { percentileRank = 100; } return percentileRank; }
@Override public Percentile next() { double percent = percents[i]; double value = (state.getTotalCount() == 0) ? Double.NaN : state.getValueAtPercentile(percent); final Percentile next = new Percentile(percent, value); ++i; return next; }
@Override public Percentile next() { double percent = percents[i]; double value = (state.getTotalCount() == 0) ? Double.NaN : state.getValueAtPercentile(percent); final Percentile next = new Percentile(percent, value); ++i; return next; }