private final List<AggregatorFactory> aggs = query.getAggregatorSpecs();
@Override public void numMetrics(TimeseriesQuery query) { setDimension("numMetrics", String.valueOf(query.getAggregatorSpecs().size())); }
@Override public void numComplexMetrics(TimeseriesQuery query) { int numComplexAggs = DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs()); setDimension("numComplexMetrics", String.valueOf(numComplexAggs)); }
@Override protected BinaryFn<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> createMergeFn( Query<Result<TimeseriesResultValue>> input ) { TimeseriesQuery query = (TimeseriesQuery) input; return new TimeseriesBinaryFn( query.getGranularity(), query.getAggregatorSpecs() ); } };
private Result<TimeseriesResultValue> getNullTimeseriesResultValue(TimeseriesQuery query) { List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs(); Aggregator[] aggregators = new Aggregator[aggregatorSpecs.size()]; String[] aggregatorNames = new String[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); i++) { aggregators[i] = aggregatorSpecs.get(i) .factorize(RowBasedColumnSelectorFactory.create(() -> new MapBasedRow( null, null ), null)); aggregatorNames[i] = aggregatorSpecs.get(i).getName(); } final DateTime start = query.getIntervals().isEmpty() ? DateTimes.EPOCH : query.getIntervals().get(0).getStart(); TimeseriesResultBuilder bob = new TimeseriesResultBuilder(start); for (int i = 0; i < aggregatorSpecs.size(); i++) { bob.addMetric(aggregatorNames[i], aggregators[i]); aggregators[i].close(); } return bob.build(); }
} else if (query instanceof TimeseriesQuery) { TimeseriesQuery q = (TimeseriesQuery) query; dimensions.addAll(extractFieldsFromAggregations(q.getAggregatorSpecs())); } else if (query instanceof GroupByQuery) { GroupByQuery q = (GroupByQuery) query;
final Object[] grandTotals = new Object[query.getAggregatorSpecs().size()]; final Sequence<Result<TimeseriesResultValue>> mappedSequence = Sequences.map( finalSequence, resultValue -> { for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { final AggregatorFactory aggregatorFactory = query.getAggregatorSpecs().get(i); final Object value = resultValue.getValue().getMetric(aggregatorFactory.getName()); if (grandTotals[i] == null) { final Map<String, Object> totalsMap = new HashMap<>(); for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { totalsMap.put(query.getAggregatorSpecs().get(i).getName(), grandTotals[i]);
private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return result -> { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = new HashMap<>(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<>( result.getTimestamp(), new TimeseriesResultValue(values) ); }; } }
private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
@Override public byte[] computeCacheKey(TimeseriesQuery query) { return new CacheKeyBuilder(TIMESERIES_QUERY) .appendBoolean(query.isDescending()) .appendBoolean(query.isSkipEmptyBuckets()) .appendCacheable(query.getGranularity()) .appendCacheable(query.getDimensionsFilter()) .appendCacheables(query.getAggregatorSpecs()) .appendCacheable(query.getVirtualColumns()) .appendInt(query.getLimit()) .build(); }
public static TimeseriesQueryBuilder copy(TimeseriesQuery query) { return new TimeseriesQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .virtualColumns(query.getVirtualColumns()) .filters(query.getDimensionsFilter()) .granularity(query.getGranularity()) .aggregators(query.getAggregatorSpecs()) .postAggregators(query.getPostAggregatorSpecs()) .limit(query.getLimit()) .context(query.getContext()); }
private final List<AggregatorFactory> aggs = query.getAggregatorSpecs();
@Override public void numMetrics(TimeseriesQuery query) { setDimension("numMetrics", String.valueOf(query.getAggregatorSpecs().size())); }
@Override public void numComplexMetrics(TimeseriesQuery query) { int numComplexAggs = DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs()); setDimension("numComplexMetrics", String.valueOf(numComplexAggs)); }
@Override protected BinaryFn<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> createMergeFn( Query<Result<TimeseriesResultValue>> input ) { TimeseriesQuery query = (TimeseriesQuery) input; return new TimeseriesBinaryFn( query.getGranularity(), query.getAggregatorSpecs() ); } };
private Result<TimeseriesResultValue> getNullTimeseriesResultValue(TimeseriesQuery query) { List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs(); Aggregator[] aggregators = new Aggregator[aggregatorSpecs.size()]; String[] aggregatorNames = new String[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); i++) { aggregators[i] = aggregatorSpecs.get(i) .factorize(RowBasedColumnSelectorFactory.create(() -> new MapBasedRow( null, null ), null)); aggregatorNames[i] = aggregatorSpecs.get(i).getName(); } final DateTime start = query.getIntervals().isEmpty() ? DateTimes.EPOCH : query.getIntervals().get(0).getStart(); TimeseriesResultBuilder bob = new TimeseriesResultBuilder(start); for (int i = 0; i < aggregatorSpecs.size(); i++) { bob.addMetric(aggregatorNames[i], aggregators[i]); aggregators[i].close(); } return bob.build(); }
private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return result -> { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = Maps.newHashMap(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<>( result.getTimestamp(), new TimeseriesResultValue(values) ); }; } }
private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
@Override public byte[] computeCacheKey(TimeseriesQuery query) { return new CacheKeyBuilder(TIMESERIES_QUERY) .appendBoolean(query.isDescending()) .appendBoolean(query.isSkipEmptyBuckets()) .appendCacheable(query.getGranularity()) .appendCacheable(query.getDimensionsFilter()) .appendCacheables(query.getAggregatorSpecs()) .appendCacheable(query.getVirtualColumns()) .appendInt(query.getLimit()) .build(); }
public static TimeseriesQueryBuilder copy(TimeseriesQuery query) { return new TimeseriesQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .virtualColumns(query.getVirtualColumns()) .filters(query.getDimensionsFilter()) .granularity(query.getGranularity()) .aggregators(query.getAggregatorSpecs()) .postAggregators(query.getPostAggregatorSpecs()) .limit(query.getLimit()) .context(query.getContext()); }