@Override public synchronized double getValueAtPercentile(final double percentile) { return super.getValueAtPercentile(percentile); }
/** * Get a copy of this histogram, corrected for coordinated omission. * <p> * To compensate for the loss of sampled values when a recorded value is larger than the expected * interval between value samples, the new histogram will include an auto-generated additional series of * decreasingly-smaller (down to the expectedIntervalBetweenValueSamples) value records for each count found * in the current histogram that is larger than the expectedIntervalBetweenValueSamples. * * Note: This is a post-correction method, as opposed to the at-recording correction method provided * by {@link #recordValueWithExpectedInterval(double, double) recordValueWithExpectedInterval}. The two * methods are mutually exclusive, and only one of the two should be be used on a given data set to correct * for the same coordinated omission issue. * by * <p> * See notes in the description of the Histogram calls for an illustration of why this corrective behavior is * important. * * @param expectedIntervalBetweenValueSamples If expectedIntervalBetweenValueSamples is larger than 0, add * auto-generated value records as appropriate if value is larger * than expectedIntervalBetweenValueSamples * @return a copy of this histogram, corrected for coordinated omission. */ public DoubleHistogram copyCorrectedForCoordinatedOmission(final double expectedIntervalBetweenValueSamples) { final DoubleHistogram targetHistogram = new DoubleHistogram(configuredHighestToLowestValueRatio, getNumberOfSignificantValueDigits()); targetHistogram.setTrackableValueRange(currentLowestValueInAutoRange, currentHighestValueLimitInAutoRange); targetHistogram.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return targetHistogram; }
@Override public synchronized void add(final DoubleHistogram fromHistogram) throws ArrayIndexOutOfBoundsException { // Synchronize add(). Avoid deadlocks by synchronizing in order of construction identity count. if (integerValuesHistogram.identity < fromHistogram.integerValuesHistogram.identity) { synchronized (this) { synchronized (fromHistogram) { super.add(fromHistogram); } } } else { synchronized (fromHistogram) { synchronized (this) { super.add(fromHistogram); } } } }
/** * Copy this histogram into the target histogram, overwriting it's contents. * * @param targetHistogram the histogram to copy into */ public void copyInto(final DoubleHistogram targetHistogram) { targetHistogram.reset(); targetHistogram.add(this); targetHistogram.setStartTimeStamp(integerValuesHistogram.startTimeStampMsec); targetHistogram.setEndTimeStamp(integerValuesHistogram.endTimeStampMsec); }
@Override public synchronized DoubleHistogram copyCorrectedForCoordinatedOmission(final double expectedIntervalBetweenValueSamples) { final DoubleHistogram targetHistogram = new DoubleHistogram(this); targetHistogram.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return targetHistogram; }
/** * Create a copy of this histogram, complete with data and everything. * * @return A distinct copy of this histogram. */ public DoubleHistogram copy() { final DoubleHistogram targetHistogram = new DoubleHistogram(configuredHighestToLowestValueRatio, getNumberOfSignificantValueDigits()); targetHistogram.setTrackableValueRange(currentLowestValueInAutoRange, currentHighestValueLimitInAutoRange); integerValuesHistogram.copyInto(targetHistogram.integerValuesHistogram); return targetHistogram; }
((DoubleHistogram) intervalHistogram).copy() : new DoubleHistogram(3); accumulatedDoubleHistogram.reset(); accumulatedDoubleHistogram.setAutoResize(true); new DoubleHistogram(3) : new Histogram(3); accumulatedDoubleHistogram.add((DoubleHistogram) intervalHistogram); } else { if (logUsesDoubleHistograms) { ((DoubleHistogram) movingWindowSumHistogram).add((DoubleHistogram) intervalHistogram); } else { ((Histogram) movingWindowSumHistogram).add((Histogram) intervalHistogram); if (movingWindowSumHistogram instanceof DoubleHistogram) { if (prevHist != null) { ((DoubleHistogram) movingWindowSumHistogram).subtract((DoubleHistogram) prevHist); ((DoubleHistogram) intervalHistogram).getTotalCount(), ((DoubleHistogram) intervalHistogram).getValueAtPercentile(50.0) / config.outputValueUnitRatio, ((DoubleHistogram) intervalHistogram).getValueAtPercentile(90.0) / config.outputValueUnitRatio, ((DoubleHistogram) intervalHistogram).getMaxValue() / config.outputValueUnitRatio, accumulatedDoubleHistogram.getTotalCount(), accumulatedDoubleHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio,
static SimilaritySummaryResult from(long length, AtomicLong similarityPairs, String writeRelationshipType, String writeProperty, boolean write, DoubleHistogram histogram) { return new SimilaritySummaryResult( length, similarityPairs.get(), write, writeRelationshipType, writeProperty, histogram.getMinValue(), histogram.getMaxValue(), histogram.getMean(), histogram.getStdDeviation(), histogram.getValueAtPercentile(25D), histogram.getValueAtPercentile(50D), histogram.getValueAtPercentile(75D), histogram.getValueAtPercentile(90D), histogram.getValueAtPercentile(95D), histogram.getValueAtPercentile(99D), histogram.getValueAtPercentile(99.9D), histogram.getValueAtPercentile(100D) ); } }
@Override public AbstractInternalHDRPercentiles doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) { DoubleHistogram merged = null; for (InternalAggregation aggregation : aggregations) { final AbstractInternalHDRPercentiles percentiles = (AbstractInternalHDRPercentiles) aggregation; if (merged == null) { merged = new DoubleHistogram(percentiles.state); merged.setAutoResize(true); } merged.add(percentiles.state); } return createReduced(getName(), keys, merged, keyed, pipelineAggregators(), getMetaData()); }
@Override public void collect(int doc, long bucket) throws IOException { states = bigArrays.grow(states, bucket + 1); DoubleHistogram state = states.get(bucket); if (state == null) { state = new DoubleHistogram(numberOfSignificantValueDigits); // Set the histogram to autosize so it can resize itself as // the data range increases. Resize operations should be // rare as the histogram buckets are exponential (on the top // level). In the future we could expose the range as an // option on the request so the histogram can be fixed at // initialisation and doesn't need resizing. state.setAutoResize(true); states.set(bucket, state); } if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); for (int i = 0; i < valueCount; i++) { state.recordValue(values.nextValue()); } } } };
@Override public double percentile(double percent) { if (state.getTotalCount() == 0) { return Double.NaN; } return state.getValueAtPercentile(percent); }
protected Object[] buildDoubleHistogramStatistics(DoubleHistogram intervalHistogram, DoubleHistogram accumulatedHistogram) { double intervalThroughput = ((double) (intervalHistogram.getTotalCount()) / (intervalHistogram.getEndTimeStamp() - intervalHistogram.getStartTimeStamp())); double totalThroughput = ((double) accumulatedHistogram.getTotalCount()) / (accumulatedHistogram.getEndTimeStamp() - accumulatedHistogram.getStartTimeStamp()); ((intervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()), (intervalHistogram.getEndTimeStamp() / 1000.0), intervalHistogram.getTotalCount(), intervalHistogram.getValueAtPercentile(25.0) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(75) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio, intervalHistogram.getValueAtPercentile(99.999) / config.outputValueUnitRatio, intervalHistogram.getMinValue() / config.outputValueUnitRatio, intervalHistogram.getMaxValue() / config.outputValueUnitRatio, intervalHistogram.getMean() / config.outputValueUnitRatio, intervalHistogram.getStdDeviation() / config.outputValueUnitRatio, intervalThroughput / config.outputValueUnitRatio, accumulatedHistogram.getTotalCount(), accumulatedHistogram.getValueAtPercentile(25.0) / config.outputValueUnitRatio, accumulatedHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, accumulatedHistogram.getValueAtPercentile(75.0) / config.outputValueUnitRatio, accumulatedHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio,
@UserAggregationResult public Map<String,Number> result() { long totalCount = values != null ? values.getTotalCount() : doubles.getTotalCount(); boolean empty = totalCount == 0; Map<String,Number> result = new LinkedHashMap<>(percentiles.size()+6); result.put("min",values != null ? (Number)values.getMinValue() : (Number)doubles.getMinValue()); result.put("minNonZero",values != null ? (Number)values.getMinNonZeroValue() : (Number)doubles.getMinNonZeroValue()); result.put("max",values != null ? (Number)values.getMaxValue() : (Number)doubles.getMaxValue()); result.put("total",totalCount); result.put("mean",values != null ? values.getMean() : doubles.getMean()); result.put("stdev",values != null ? values.getStdDeviation() : doubles.getStdDeviation()); for (Double percentile : percentiles) { if (percentile != null && !empty) { if (values != null) { result.put(percentile.toString(), values.getValueAtPercentile(percentile * 100D)); } else { result.put(percentile.toString(), doubles.getValueAtPercentile(percentile * 100D)); } } } return result; } }
@Override public InternalAggregation buildEmptyAggregation() { DoubleHistogram state; state = new DoubleHistogram(numberOfSignificantValueDigits); state.setAutoResize(true); return new InternalHDRPercentiles(name, keys, state, keyed, format, pipelineAggregators(), metaData()); } }
@Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(format); out.writeDoubleArray(keys); out.writeLong(state.getHighestToLowestValueRatio()); ByteBuffer stateBuffer = ByteBuffer.allocate(state.getNeededByteBufferCapacity()); final int serializedLen = state.encodeIntoCompressedByteBuffer(stateBuffer); out.writeVInt(serializedLen); out.writeBytes(stateBuffer.array(), 0, serializedLen); out.writeBoolean(keyed); }
protected Object[] buildDoubleHistogramStatistics(DoubleHistogram doubleIntervalHistogram, DoubleHistogram accumulatedDoubleHistogram) { return new Object[]{((doubleIntervalHistogram.getEndTimeStamp() / 1000.0) - logReader.getStartTimeSec()), // values recorded during the last reporting interval doubleIntervalHistogram.getTotalCount(), doubleIntervalHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, doubleIntervalHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, doubleIntervalHistogram.getMaxValue() / config.outputValueUnitRatio, // values recorded from the beginning until now accumulatedDoubleHistogram.getTotalCount(), accumulatedDoubleHistogram.getValueAtPercentile(50.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(90.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(99.0) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(99.9) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getValueAtPercentile(99.99) / config.outputValueUnitRatio, accumulatedDoubleHistogram.getMaxValue() / config.outputValueUnitRatio}; }
@Override public synchronized void recordValue(final double value) throws ArrayIndexOutOfBoundsException { super.recordValue(value); }
getIntegerToDoubleValueConversionRatio() * shiftMultiplier; if (getTotalCount() > integerValuesHistogram.getCountAtIndex(0)) { } catch (ArrayIndexOutOfBoundsException ex) { handleShiftValuesException(numberOfBinaryOrdersOfMagnitude, ex); } finally { setTrackableValueRange(newLowestValueInAutoRange, newHighestValueLimitInAutoRange);
/** * Construct a new DoubleHistogram by decoding it from a compressed form in a ByteBuffer. * @param buffer The buffer to decode from * @param minBarForHighestToLowestValueRatio Force highestTrackableValue to be set at least this high * @return The newly constructed DoubleHistogram * @throws DataFormatException on error parsing/decompressing the buffer */ public static DoubleHistogram decodeFromCompressedByteBuffer( final ByteBuffer buffer, final long minBarForHighestToLowestValueRatio) throws DataFormatException { return decodeFromCompressedByteBuffer(buffer, Histogram.class, minBarForHighestToLowestValueRatio); }
@Override public synchronized int getEstimatedFootprintInBytes() { return super.getEstimatedFootprintInBytes(); }