public static PagingSpec newSpec(int threshold) { return new PagingSpec(null, threshold); }
+ query.getPagingSpec().getCacheKey().length + dimensionsBytesSize + metricBytesSize .put(granularityBytes) .put(filterBytes) .put(query.getPagingSpec().getCacheKey()) .put(isDescendingByte);
public PagingOffset getPagingOffset(String identifier) { return pagingSpec.getOffset(identifier, isDescending()); }
private boolean checkPagingSpec(PagingSpec pagingSpec, boolean descending) { for (Integer value : pagingSpec.getPagingIdentifiers().values()) { if (descending ^ (value < 0)) { return false; } } return pagingSpec.getThreshold() >= 0; }
private PagingSpec toNextCursor(Map<String, Integer> merged, SelectQuery query, int threshold) { if (!fromNext) { merged = PagingSpec.next(merged, query.isDescending()); } return new PagingSpec(merged, threshold, fromNext); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryQueryableIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); SegmentId segmentId = SegmentId.dummy("qIndex"); QueryRunner<Result<SelectResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner( factory, segmentId, new QueryableIndexSegment(qIndexes.get(0), segmentId) ); boolean done = false; while (!done) { List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy); SelectResultValue result = results.get(0).getValue(); if (result.getEvents().size() == 0) { done = true; } else { for (EventHolder eh : result.getEvents()) { blackhole.consume(eh); } queryCopy = incrementQueryPagination(queryCopy, result); } } }
@Test public void testPagingIdentifiersForUnionDatasource() { Druids.SelectQueryBuilder selectQueryBuilder = Druids .newSelectQueryBuilder() .dataSource( new UnionDataSource( ImmutableList.of( new TableDataSource(QueryRunnerTestHelper.dataSource), new TableDataSource("testing-2") ) ) ) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) .granularity(QueryRunnerTestHelper.allGran) .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions)) .pagingSpec(PagingSpec.newSpec(3)); SelectQuery query = selectQueryBuilder.build(); QueryRunner unionQueryRunner = new UnionQueryRunner(runner); List<Result<SelectResultValue>> results = unionQueryRunner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Map<String, Integer> pagingIdentifiers = results.get(0).getValue().getPagingIdentifiers(); query = query.withPagingSpec(toNextCursor(PagingSpec.merge(Collections.singletonList(pagingIdentifiers)), query, 3)); unionQueryRunner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); }
@Override protected Queue<EventHolder> instantiatePQueue() { int threshold = pagingSpec.getThreshold(); return MinMaxPriorityQueue.orderedBy(descending ? comparator.reversed() : comparator) .maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE) .create(); }
Map<String, Integer> paging = pagingSpec.getPagingIdentifiers(); if (paging == null || paging.isEmpty()) { return segments;
@Test public void testSequentialPaging() { int[] asc = {2, 5, 8, 11, 14, 17, 20, 23, 25}; int[] dsc = {-3, -6, -9, -12, -15, -18, -21, -24, -26}; int[] expected = descending ? dsc : asc; SelectQuery query = newTestQuery().intervals(I_0112_0114_SPEC).build(); for (int offset : expected) { List<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Assert.assertEquals(1, results.size()); SelectResultValue result = results.get(0).getValue(); Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers(); Assert.assertEquals(offset, pagingIdentifiers.get(SEGMENT_ID_I_0112_0114.toString()).intValue()); Map<String, Integer> next = PagingSpec.next(pagingIdentifiers, descending); query = query.withPagingSpec(new PagingSpec(next, 3, false)); } query = newTestQuery().intervals(I_0112_0114_SPEC).build(); for (int offset : expected) { List<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Assert.assertEquals(1, results.size()); SelectResultValue result = results.get(0).getValue(); Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers(); Assert.assertEquals(offset, pagingIdentifiers.get(SEGMENT_ID_I_0112_0114.toString()).intValue()); // use identifier as-is but with fromNext=true query = query.withPagingSpec(new PagingSpec(pagingIdentifiers, 3, true)); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryIncrementalIndex(Blackhole blackhole) { SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); SegmentId segmentId = SegmentId.dummy("incIndex"); QueryRunner<Row> runner = QueryBenchmarkUtil.makeQueryRunner( factory, segmentId, new IncrementalIndexSegment(incIndexes.get(0), segmentId) ); boolean done = false; while (!done) { List<Result<SelectResultValue>> results = SelectBenchmark.runQuery(factory, runner, queryCopy); SelectResultValue result = results.get(0).getValue(); if (result.getEvents().size() == 0) { done = true; } else { for (EventHolder eh : result.getEvents()) { blackhole.consume(eh); } queryCopy = incrementQueryPagination(queryCopy, result); } } }
private boolean checkPagingSpec(PagingSpec pagingSpec, boolean descending) { for (Integer value : pagingSpec.getPagingIdentifiers().values()) { if (descending ^ (value < 0)) { return false; } } return pagingSpec.getThreshold() >= 0; }
@Override protected Queue<EventHolder> instantiatePQueue() { int threshold = pagingSpec.getThreshold(); return MinMaxPriorityQueue.orderedBy(descending ? Comparators.inverse(comparator) : comparator) .maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE) .create(); }
Map<String, Integer> paging = pagingSpec.getPagingIdentifiers(); if (paging == null || paging.isEmpty()) { return segments;
/** * Don't run this benchmark with a query that doesn't use {@link Granularities#ALL}, * this pagination function probably doesn't work correctly in that case. */ private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValue prevResult) { Map<String, Integer> pagingIdentifiers = prevResult.getPagingIdentifiers(); Map<String, Integer> newPagingIdentifers = new HashMap<>(); for (String segmentId : pagingIdentifiers.keySet()) { int newOffset = pagingIdentifiers.get(segmentId) + 1; newPagingIdentifers.put(segmentId, newOffset); } return query.withPagingSpec(new PagingSpec(newPagingIdentifers, pagingThreshold)); }
queryBuilder.pagingSpec(PagingSpec.newSpec(pagingThreshold)); query = queryBuilder.build();
public PagingOffset getPagingOffset(String identifier) { return pagingSpec.getOffset(identifier, isDescending()); }
+ query.getPagingSpec().getCacheKey().length + dimensionsBytesSize + metricBytesSize .put(granularityBytes) .put(filterBytes) .put(query.getPagingSpec().getCacheKey()) .put(isDescendingByte);
final PagingSpec pagingSpec = new PagingSpec(null, threshold);
public void queryMultiQueryableIndex(Blackhole blackhole) SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold));