long[] offsets = new EstimatedHistogram().getBucketOffsets();
static EstimatedHistogram defaultPartitionSizeHistogram() { // EH of 150 can track a max value of 1697806495183, i.e., > 1.5PB return new EstimatedHistogram(150); }
static EstimatedHistogram defaultPartitionSizeHistogram() { // EH of 150 can track a max value of 1697806495183, i.e., > 1.5PB return new EstimatedHistogram(150); }
static EstimatedHistogram defaultPartitionSizeHistogram() { // EH of 150 can track a max value of 1697806495183, i.e., > 1.5PB return new EstimatedHistogram(150); }
static EstimatedHistogram defaultCellPerPartitionCountHistogram() { // EH of 114 can track a max value of 2395318855, i.e., > 2B columns return new EstimatedHistogram(114); }
static EstimatedHistogram defaultCellPerPartitionCountHistogram() { // EH of 114 can track a max value of 2395318855, i.e., > 2B columns return new EstimatedHistogram(114); }
static EstimatedHistogram defaultColumnCountHistogram() { // EH of 114 can track a max value of 2395318855, i.e., > 2B columns return new EstimatedHistogram(114); }
static EstimatedHistogram defaultCellPerPartitionCountHistogram() { // EH of 114 can track a max value of 2395318855, i.e., > 2B columns return new EstimatedHistogram(114); }
static EstimatedHistogram defaultRowSizeHistogram() { // EH of 150 can track a max value of 1697806495183, i.e., > 1.5PB return new EstimatedHistogram(150); }
static EstimatedHistogram defaultCellPerPartitionCountHistogram() { // EH of 114 can track a max value of 2395318855, i.e., > 2B columns return new EstimatedHistogram(114); }
static EstimatedHistogram defaultPartitionSizeHistogram() { // EH of 150 can track a max value of 1697806495183, i.e., > 1.5PB return new EstimatedHistogram(150); }
public EstimatedHistogram deserialize(DataInput in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
return new EstimatedHistogram(EMPTY_LONG_ARRAY, ZERO); System.arraycopy(highhalf, 0, ranges, lowhalf.length + 1, highhalf.length); final EstimatedHistogram hist = new EstimatedHistogram(ranges, new long[ranges.length + 1]); for (int i = 0 ; i < count ; i++) hist.add(values[i]);
return new EstimatedHistogram(EMPTY_LONG_ARRAY, ZERO); System.arraycopy(highhalf, 0, ranges, lowhalf.length + 1, highhalf.length); final EstimatedHistogram hist = new EstimatedHistogram(ranges, new long[ranges.length + 1]); for (int i = 0 ; i < count ; i++) hist.add(values[i]);
return new EstimatedHistogram(EMPTY_LONG_ARRAY, ZERO); System.arraycopy(highhalf, 0, ranges, lowhalf.length + 1, highhalf.length); final EstimatedHistogram hist = new EstimatedHistogram(ranges, new long[ranges.length + 1]); for (int i = 0 ; i < count ; i++) hist.add(values[i]);
public EstimatedHistogram deserialize(DataInputPlus in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
public EstimatedHistogram deserialize(DataInputPlus in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
public EstimatedHistogram deserialize(DataInputPlus in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
public EstimatedHistogram deserialize(DataInputPlus in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
public double[] metricPercentilesAsArray(long[] counts) { double[] result = new double[7]; if (isEmpty(counts)) { Arrays.fill(result, Double.NaN); return result; } double[] offsetPercentiles = new double[] { 0.5, 0.75, 0.95, 0.98, 0.99 }; EstimatedHistogram metric = new EstimatedHistogram(counts); if (metric.isOverflowed()) { System.err.println(String.format("EstimatedHistogram overflowed larger than %s, unable to calculate percentiles", metric.getLargestBucketOffset())); for (int i = 0; i < result.length; i++) result[i] = Double.NaN; } else { for (int i = 0; i < offsetPercentiles.length; i++) result[i] = metric.percentile(offsetPercentiles[i]); } result[5] = metric.min(); result[6] = metric.max(); return result; }