public PagingOffset getPagingOffset(String identifier) { return pagingSpec.getOffset(identifier, isDescending()); }
@Override protected BinaryFn<Result<SelectResultValue>, Result<SelectResultValue>, Result<SelectResultValue>> createMergeFn( Query<Result<SelectResultValue>> input ) { SelectQuery query = (SelectQuery) input; return new SelectBinaryFn( query.getGranularity(), query.getPagingSpec(), query.isDescending() ); } };
@Override public String toString() { return "SelectQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", dimFilter=" + dimFilter + ", granularity=" + getGranularity() + ", dimensions=" + dimensions + ", metrics=" + metrics + ", virtualColumns=" + virtualColumns + ", pagingSpec=" + pagingSpec + '}'; }
.filter(identifier -> SegmentId.tryParse(dataSource, identifier) != null) .map(SegmentId.makeIntervalExtractor(dataSource)) .sorted(query.isDescending() ? Comparators.intervalsByEndThenStart() : Comparators.intervalsByStartThenEnd()) .forEach(interval -> { if (query.isDescending()) { long granularEnd = granularity.bucketStart(interval.getEnd()).getMillis(); Long currentEnd = granularThresholds.get(granularEnd); if (query.isDescending()) { while (it.hasNext()) { Interval interval = it.next().getInterval();
private PagingSpec toNextCursor(Map<String, Integer> merged, SelectQuery query, int threshold) { if (!fromNext) { merged = PagingSpec.next(merged, query.isDescending()); } return new PagingSpec(merged, threshold, fromNext); } }
final byte isDescendingByte = query.isDescending() ? (byte) 1 : 0;
private void runDayGranularityTest(SelectQuery query, int[][] expectedOffsets) { for (int[] expected : expectedOffsets) { List<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Assert.assertEquals(2, results.size()); SelectResultValue value0 = results.get(0).getValue(); SelectResultValue value1 = results.get(1).getValue(); Map<String, Integer> pagingIdentifiers0 = value0.getPagingIdentifiers(); Map<String, Integer> pagingIdentifiers1 = value1.getPagingIdentifiers(); Map<String, Integer> merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers0, pagingIdentifiers1)); for (int i = 0; i < 4; i++) { if (query.isDescending() ^ expected[i] >= 0) { Assert.assertEquals(expected[i], merged.get(segmentIdentifiers.get(i)).intValue()); } } query = query.withPagingSpec(toNextCursor(merged, query, 3)); } }
private void runAllGranularityTest(SelectQuery query, int[][] expectedOffsets) { for (int[] expected : expectedOffsets) { List<Result<SelectResultValue>> results = runner.run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); Assert.assertEquals(1, results.size()); SelectResultValue value = results.get(0).getValue(); Map<String, Integer> pagingIdentifiers = value.getPagingIdentifiers(); Map<String, Integer> merged = PagingSpec.merge(Collections.singletonList(pagingIdentifiers)); for (int i = 0; i < 4; i++) { if (query.isDescending() ^ expected[i] >= 0) { Assert.assertEquals(expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue()); } } Assert.assertEquals(expected[4], value.getEvents().size()); query = query.withPagingSpec(toNextCursor(merged, query, 3)); } }
filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>()
cursor.getTime(), query.getPagingSpec(), query.isDescending() );
public static SelectQueryBuilder copy(SelectQuery query) { return new SelectQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .filters(query.getFilter()) .granularity(query.getGranularity()) .dimensionSpecs(query.getDimensions()) .metrics(query.getMetrics()) .virtualColumns(query.getVirtualColumns()) .pagingSpec(query.getPagingSpec()) .context(query.getContext()); }
public PagingOffset getPagingOffset(String identifier) { return pagingSpec.getOffset(identifier, isDescending()); }
@Override protected BinaryFn<Result<SelectResultValue>, Result<SelectResultValue>, Result<SelectResultValue>> createMergeFn( Query<Result<SelectResultValue>> input ) { SelectQuery query = (SelectQuery) input; return new SelectBinaryFn( query.getGranularity(), query.getPagingSpec(), query.isDescending() ); } };
@Override public String toString() { return "SelectQuery{" + "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", descending=" + isDescending() + ", dimFilter=" + dimFilter + ", granularity=" + getGranularity() + ", dimensions=" + dimensions + ", metrics=" + metrics + ", virtualColumns=" + virtualColumns + ", pagingSpec=" + pagingSpec + '}'; }
final byte isDescendingByte = query.isDescending() ? (byte) 1 : 0;
); Collections.sort( intervals, query.isDescending() ? Comparators.intervalsByEndThenStart() : Comparators.intervalsByStartThenEnd() ); if (query.isDescending()) { long granularEnd = granularity.bucketStart(interval.getEnd()).getMillis(); Long currentEnd = granularThresholds.get(granularEnd); if (query.isDescending()) { while (it.hasNext()) { Interval interval = it.next().getInterval();
filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>()
cursor.getTime(), query.getPagingSpec(), query.isDescending() );
public static SelectQueryBuilder copy(SelectQuery query) { return new SelectQueryBuilder() .dataSource(query.getDataSource()) .intervals(query.getQuerySegmentSpec()) .descending(query.isDescending()) .filters(query.getFilter()) .granularity(query.getGranularity()) .dimensionSpecs(query.getDimensions()) .metrics(query.getMetrics()) .virtualColumns(query.getVirtualColumns()) .pagingSpec(query.getPagingSpec()) .context(query.getContext()); }