public QuerySegmentSpec getQuerySegmentSpec() { return new MultipleIntervalSegmentSpec(intervals); }
public static QuerySegmentSpec QSS(final Interval... intervals) { return new MultipleIntervalSegmentSpec(Arrays.asList(intervals)); }
private static SearchQueryBuilder basicB(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); final List<String> dimUniformFilterVals = new ArrayList<>(); int resultNum = (int) (100000 * 0.1); int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } List<String> dimHyperUniqueFilterVals = new ArrayList<>(); resultNum = (int) (100000 * 0.1); step = 100000 / resultNum; for (int i = 0; i < 100001 && dimHyperUniqueFilterVals.size() < resultNum; i += step) { dimHyperUniqueFilterVals.add(String.valueOf(i)); } final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter("dimUniform", dimUniformFilterVals, null)); dimFilters.add(new InDimFilter("dimHyperUnique", dimHyperUniqueFilterVals, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform", "dimHyperUnique")) .filters(new AndDimFilter(dimFilters)); }
private void setupQueries() { // queries for the basic schema Map<String, Druids.SelectQueryBuilder> basicQueries = new LinkedHashMap<>(); BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic"); { // basic.A QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); Druids.SelectQueryBuilder queryBuilderA = Druids.newSelectQueryBuilder() .dataSource(new TableDataSource("blah")) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) .intervals(intervalSpec) .granularity(Granularities.ALL) .descending(false); basicQueries.put("A", queryBuilderA); } SCHEMA_QUERY_MAP.put("basic", basicQueries); }
private static SearchQueryBuilder basicD(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( Collections.singletonList(basicSchema.getDataInterval()) ); final List<String> dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } final String dimName = "dimUniform"; final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null)); dimFilters.add(new SelectorDimFilter(dimName, "3", null)); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Collections.singletonList("dimUniform")) .filters(new AndDimFilter(dimFilters)); }
private static SearchQueryBuilder basicA(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("123"); }
private static SearchQueryBuilder basicC(final BenchmarkSchemaInfo basicSchema) final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
@Override public Sequence<Result<SearchResultValue>> run( QueryPlus<Result<SearchResultValue>> queryPlus, Map<String, Object> responseContext ) { final QueryPlus<Result<SearchResultValue>> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-01-12/2011-02-28"))) ); final QueryPlus<Result<SearchResultValue>> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-03-01/2011-04-15"))) ); return Sequences.concat(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)); } }
@Test public void testQueryMaxTimeout() { exception.expect(IAE.class); exception.expectMessage("configured [timeout = 1000] is more than enforced limit of maxQueryTimeout [100]."); Query<?> query = new TestQuery( new TableDataSource("test"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1000) ); QueryContexts.verifyMaxQueryTimeout(query, 100); }
@Test public void testMaxScatterGatherBytes() { exception.expect(IAE.class); exception.expectMessage("configured [maxScatterGatherBytes = 1000] is more than enforced limit of [100]."); Query<?> query = new TestQuery( new TableDataSource("test"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, 1000) ); QueryContexts.withMaxScatterGatherBytes(query, 100); } }
@Override public void run() { HashMap<String, Object> context = new HashMap<>(); for (int i = 0; i < numTimesToQuery; ++i) { TestHelper.assertExpectedResults( expected, runner.run( QueryPlus.wrap( query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( actualQueryInterval ) ) ) ), context ) ); if (queryCompletedCallback != null) { queryCompletedCallback.run(); } } } },
@Override public Sequence<T> apply(Interval singleInterval) { return new AsyncQueryRunner<T>( //Note: it is assumed that toolChest.mergeResults(..) gives a query runner that is //not lazy i.e. it does most of its work on call to run() method toolChest.mergeResults( new MetricsEmittingQueryRunner<T>( emitter, toolChest, baseRunner, QueryMetrics::reportIntervalChunkTime, queryMetrics -> queryMetrics.chunkInterval(singleInterval) ).withWaitMeasuredFromNow() ), executor, queryWatcher ).run( queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(singleInterval))), responseContext ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus<Row> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus<Row> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( (queryPlus3, responseContext1) -> new MergeSequence<>( queryPlus3.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList( runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1) ) ) ) ).run(queryPlus, responseContext); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus<Row> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus<Row> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( (queryPlus3, responseContext1) -> new MergeSequence<>( queryPlus3.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList( runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1) ) ) ) ).run(queryPlus, responseContext); } }
new TimeBoundaryQuery( new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, null,