public double[] metricPercentilesAsArray(long[] counts) { double[] result = new double[7]; if (isEmpty(counts)) { Arrays.fill(result, Double.NaN); return result; } double[] offsetPercentiles = new double[] { 0.5, 0.75, 0.95, 0.98, 0.99 }; EstimatedHistogram metric = new EstimatedHistogram(counts); if (metric.isOverflowed()) { System.err.println(String.format("EstimatedHistogram overflowed larger than %s, unable to calculate percentiles", metric.getLargestBucketOffset())); for (int i = 0; i < result.length; i++) result[i] = Double.NaN; } else { for (int i = 0; i < offsetPercentiles.length; i++) result[i] = metric.percentile(offsetPercentiles[i]); } result[5] = metric.min(); result[6] = metric.max(); return result; }
EstimatedHistogram columnCountHist = new EstimatedHistogram(estimatedColumnCount); if (partitionSizeHist.isOverflowed()) if (columnCountHist.isOverflowed())
EstimatedHistogram columnCountHist = new EstimatedHistogram(estimatedColumnCount); if (partitionSizeHist.isOverflowed()) if (columnCountHist.isOverflowed())
EstimatedHistogram columnCountHist = new EstimatedHistogram(estimatedColumnCount); if (partitionSizeHist.isOverflowed()) if (columnCountHist.isOverflowed())
long estimatedKeys = histogramCount > 0 && !sstableMetadata.estimatedPartitionSize.isOverflowed() ? histogramCount
long estimatedKeys = histogramCount > 0 && !sstableMetadata.estimatedPartitionSize.isOverflowed() ? histogramCount
long estimatedKeys = histogramCount > 0 && !sstableMetadata.estimatedPartitionSize.isOverflowed() ? histogramCount
long estimatedKeys = histogramCount > 0 && !sstableMetadata.estimatedPartitionSize.isOverflowed() ? histogramCount
long estimatedKeys = histogramCount > 0 && !sstableMetadata.estimatedRowSize.isOverflowed() ? histogramCount