public TimeseriesQuery build() { return new TimeseriesQuery( dataSource, querySegmentSpec, descending, virtualColumns, dimFilter, granularity, aggregatorSpecs, postAggregatorSpecs, limit, context ); }
public static TimeseriesQueryBuilder copy(TimeseriesQuery query) { return new TimeseriesQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .virtualColumns(query.getVirtualColumns()) .filters(query.getDimensionsFilter()) .granularity(query.getGranularity()) .aggregators(query.getAggregatorSpecs()) .postAggregators(query.getPostAggregatorSpecs()) .limit(query.getLimit()) .context(query.getContext()); }
@Override public QueryRunner<Result<TimeseriesResultValue>> preMergeQueryDecoration(final QueryRunner<Result<TimeseriesResultValue>> runner) { return intervalChunkingQueryRunnerDecorator.decorate( (queryPlus, responseContext) -> { TimeseriesQuery timeseriesQuery = (TimeseriesQuery) queryPlus.getQuery(); if (timeseriesQuery.getDimensionsFilter() != null) { timeseriesQuery = timeseriesQuery.withDimFilter(timeseriesQuery.getDimensionsFilter().optimize()); queryPlus = queryPlus.withQuery(timeseriesQuery); } return runner.run(queryPlus, responseContext); }, this); }
@Override public String toString() { return "TimeseriesQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", virtualColumns=" + virtualColumns + ", dimFilter=" + dimFilter + ", granularity='" + getGranularity() + '\'' + ", aggregatorSpecs=" + aggregatorSpecs + ", postAggregatorSpecs=" + postAggregatorSpecs + ", limit=" + limit + ", context=" + getContext() + '}'; }
@Override protected BinaryFn<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> createMergeFn( Query<Result<TimeseriesResultValue>> input ) { TimeseriesQuery query = (TimeseriesQuery) input; return new TimeseriesBinaryFn( query.getGranularity(), query.getAggregatorSpecs() ); } };
@Override public byte[] computeCacheKey(TimeseriesQuery query) { return new CacheKeyBuilder(TIMESERIES_QUERY) .appendBoolean(query.isDescending()) .appendBoolean(query.isSkipEmptyBuckets()) .appendCacheable(query.getGranularity()) .appendCacheable(query.getDimensionsFilter()) .appendCacheables(query.getAggregatorSpecs()) .appendCacheable(query.getVirtualColumns()) .appendInt(query.getLimit()) .build(); }
query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<TimeseriesResultValue>>() private final boolean skipEmptyBuckets = query.isSkipEmptyBuckets(); private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
if (query.getGranularity().equals(Granularities.ALL) && !query.isSkipEmptyBuckets()) { if (query.isGrandTotal()) { final Object[] grandTotals = new Object[query.getAggregatorSpecs().size()]; final Sequence<Result<TimeseriesResultValue>> mappedSequence = Sequences.map( finalSequence, resultValue -> { for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { final AggregatorFactory aggregatorFactory = query.getAggregatorSpecs().get(i); final Object value = resultValue.getValue().getMetric(aggregatorFactory.getName()); if (grandTotals[i] == null) { final Map<String, Object> totalsMap = new HashMap<>(); for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { totalsMap.put(query.getAggregatorSpecs().get(i).getName(), grandTotals[i]);
private final List<AggregatorFactory> aggs = query.getAggregatorSpecs();
public Sequence<Result<TimeseriesResultValue>> process(final TimeseriesQuery query, final StorageAdapter adapter) { if (adapter == null) { throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); final int limit = query.getLimit(); Sequence<Result<TimeseriesResultValue>> result = generateTimeseriesResult(adapter, query, filter); if (limit < Integer.MAX_VALUE) { return result.limit(limit); } return result; }
private Result<TimeseriesResultValue> getNullTimeseriesResultValue(TimeseriesQuery query) { List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs(); Aggregator[] aggregators = new Aggregator[aggregatorSpecs.size()]; String[] aggregatorNames = new String[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); i++) { aggregators[i] = aggregatorSpecs.get(i) .factorize(RowBasedColumnSelectorFactory.create(() -> new MapBasedRow( null, null ), null)); aggregatorNames[i] = aggregatorSpecs.get(i).getName(); } final DateTime start = query.getIntervals().isEmpty() ? DateTimes.EPOCH : query.getIntervals().get(0).getStart(); TimeseriesResultBuilder bob = new TimeseriesResultBuilder(start); for (int i = 0; i < aggregatorSpecs.size(); i++) { bob.addMetric(aggregatorNames[i], aggregators[i]); aggregators[i].close(); } return bob.build(); }
private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return result -> { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = new HashMap<>(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<>( result.getTimestamp(), new TimeseriesResultValue(values) ); }; } }
@Override public TimeseriesQuery withOverriddenContext(Map<String, Object> contextOverrides) { Map<String, Object> newContext = computeOverriddenContext(getContext(), contextOverrides); return Druids.TimeseriesQueryBuilder.copy(this).context(newContext).build(); }
private Sequence<Result<TimeseriesResultValue>> toFilteredQueryableTimeseriesResults( TimeseriesQuery query, List<SegmentId> segmentIds, List<Interval> queryIntervals, List<Iterable<Result<TimeseriesResultValue>>> results ) { MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec) query.getQuerySegmentSpec(); List<Result<TimeseriesResultValue>> ret = new ArrayList<>(); for (SegmentDescriptor descriptor : spec.getDescriptors()) { SegmentId id = SegmentId.dummy( StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()) ); int index = segmentIds.indexOf(id); if (index != -1) { Result result = new Result( results.get(index).iterator().next().getTimestamp(), new BySegmentResultValueClass( Lists.newArrayList(results.get(index)), id.toString(), descriptor.getInterval() ) ); ret.add(result); } else { throw new ISE("Descriptor %s not found in server", id); } } return Sequences.simple(ret); }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryFilteredSingleQueryableIndex(Blackhole blackhole) { final QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner( factory, SegmentId.dummy("qIndex"), new QueryableIndexSegment(qIndexes.get(0), SegmentId.dummy("qIndex")) ); DimFilter filter = new SelectorDimFilter("dimSequential", "399", null); Query filteredQuery = query.withDimFilter(filter); List<Result<TimeseriesResultValue>> results = TimeseriesBenchmark.runQuery(factory, runner, filteredQuery); for (Result<TimeseriesResultValue> result : results) { blackhole.consume(result); } }
public boolean isSkipEmptyBuckets() { return getContextBoolean(SKIP_EMPTY_BUCKETS, false); }
@Override public byte[] computeCacheKey(TimeseriesQuery query) { return new CacheKeyBuilder(TIMESERIES_QUERY) .appendBoolean(query.isDescending()) .appendBoolean(query.isSkipEmptyBuckets()) .appendCacheable(query.getGranularity()) .appendCacheable(query.getDimensionsFilter()) .appendCacheables(query.getAggregatorSpecs()) .appendCacheable(query.getVirtualColumns()) .appendInt(query.getLimit()) .build(); }
query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<TimeseriesResultValue>>() private final boolean skipEmptyBuckets = query.isSkipEmptyBuckets(); private final List<AggregatorFactory> aggregatorSpecs = query.getAggregatorSpecs();
@Override public String toString() { return "TimeseriesQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", virtualColumns=" + virtualColumns + ", dimFilter=" + dimFilter + ", granularity='" + getGranularity() + '\'' + ", aggregatorSpecs=" + aggregatorSpecs + ", postAggregatorSpecs=" + postAggregatorSpecs + ", limit=" + limit + ", context=" + getContext() + '}'; }