/** * Gives estimated max size per aggregator. It is assumed that every aggregator will have enough overhead for its own * object header and for a pointer to a selector. We are adding a overhead-factor for each object as additional 16 * bytes. * These 16 bytes or 128 bits is the object metadata for 64-bit JVM process and consists of: * <ul> * <li>Class pointer which describes the object type: 64 bits * <li>Flags which describe state of the object including hashcode: 64 bits * <ul/> * total size estimation consists of: * <ul> * <li> metrics length : Integer.BYTES * len * <li> maxAggregatorIntermediateSize : getMaxIntermediateSize per aggregator + overhead-factor(16 bytes) * </ul> * * @param incrementalIndexSchema * * @return long max aggregator size in bytes */ private static long getMaxBytesPerRowForAggregators(IncrementalIndexSchema incrementalIndexSchema) { long maxAggregatorIntermediateSize = Integer.BYTES * incrementalIndexSchema.getMetrics().length; maxAggregatorIntermediateSize += Arrays.stream(incrementalIndexSchema.getMetrics()) .mapToLong(aggregator -> aggregator.getMaxIntermediateSizeWithNulls() + Long.BYTES * 2) .sum(); return maxAggregatorIntermediateSize; }
this.rollup = incrementalIndexSchema.isRollup(); this.virtualColumns = incrementalIndexSchema.getVirtualColumns(); this.metrics = incrementalIndexSchema.getMetrics(); this.rowTransformers = new CopyOnWriteArrayList<>(); this.deserializeComplexMetrics = deserializeComplexMetrics;
Iterables.toArray( Iterables.transform( Arrays.asList(schema.getMetrics()), new Function<AggregatorFactory, AggregatorFactory>()
/** * Gives estimated max size per aggregator. It is assumed that every aggregator will have enough overhead for its own * object header and for a pointer to a selector. We are adding a overhead-factor for each object as additional 16 * bytes. * These 16 bytes or 128 bits is the object metadata for 64-bit JVM process and consists of: * <ul> * <li>Class pointer which describes the object type: 64 bits * <li>Flags which describe state of the object including hashcode: 64 bits * <ul/> * total size estimation consists of: * <ul> * <li> metrics length : Integer.BYTES * len * <li> maxAggregatorIntermediateSize : getMaxIntermediateSize per aggregator + overhead-factor(16 bytes) * </ul> * * @param incrementalIndexSchema * * @return long max aggregator size in bytes */ private static long getMaxBytesPerRowForAggregators(IncrementalIndexSchema incrementalIndexSchema) { long maxAggregatorIntermediateSize = Integer.BYTES * incrementalIndexSchema.getMetrics().length; maxAggregatorIntermediateSize += Arrays.stream(incrementalIndexSchema.getMetrics()) .mapToLong(aggregator -> aggregator.getMaxIntermediateSizeWithNulls() + Long.BYTES * 2) .sum(); return maxAggregatorIntermediateSize; }
this.rollup = incrementalIndexSchema.isRollup(); this.virtualColumns = incrementalIndexSchema.getVirtualColumns(); this.metrics = incrementalIndexSchema.getMetrics(); this.rowTransformers = new CopyOnWriteArrayList<>(); this.deserializeComplexMetrics = deserializeComplexMetrics;