public QuerySegmentSpec getQuerySegmentSpec() { return new MultipleIntervalSegmentSpec(intervals); }
public static QuerySegmentSpec QSS(final Interval... intervals) { return new MultipleIntervalSegmentSpec(Arrays.asList(intervals)); }
private void setupQueries() { // queries for the basic schema Map<String, Druids.SelectQueryBuilder> basicQueries = new LinkedHashMap<>(); BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic"); { // basic.A QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); Druids.SelectQueryBuilder queryBuilderA = Druids.newSelectQueryBuilder() .dataSource(new TableDataSource("blah")) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) .intervals(intervalSpec) .granularity(Granularities.ALL) .descending(false); basicQueries.put("A", queryBuilderA); } SCHEMA_QUERY_MAP.put("basic", basicQueries); }
private static SearchQueryBuilder basicD(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( Collections.singletonList(basicSchema.getDataInterval()) ); final List<String> dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } final String dimName = "dimUniform"; final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null)); dimFilters.add(new SelectorDimFilter(dimName, "3", null)); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Collections.singletonList("dimUniform")) .filters(new AndDimFilter(dimFilters)); }
private static SearchQueryBuilder basicA(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("123"); }
@Override public Sequence<Result<SearchResultValue>> run( QueryPlus<Result<SearchResultValue>> queryPlus, Map<String, Object> responseContext ) { final QueryPlus<Result<SearchResultValue>> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-01-12/2011-02-28"))) ); final QueryPlus<Result<SearchResultValue>> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-03-01/2011-04-15"))) ); return Sequences.concat(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)); } }
@Test public void testQueryMaxTimeout() { exception.expect(IAE.class); exception.expectMessage("configured [timeout = 1000] is more than enforced limit of maxQueryTimeout [100]."); Query<?> query = new TestQuery( new TableDataSource("test"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1000) ); QueryContexts.verifyMaxQueryTimeout(query, 100); }
@Test public void testMaxScatterGatherBytes() { exception.expect(IAE.class); exception.expectMessage("configured [maxScatterGatherBytes = 1000] is more than enforced limit of [100]."); Query<?> query = new TestQuery( new TableDataSource("test"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, 1000) ); QueryContexts.withMaxScatterGatherBytes(query, 100); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)) ) ); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus<Row> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus<Row> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( (queryPlus3, responseContext1) -> new MergeSequence<>( queryPlus3.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList( runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1) ) ) ) ).run(queryPlus, responseContext); } }
@Override public Sequence<Row> run(QueryPlus<Row> queryPlus, Map<String, Object> responseContext) { // simulate two daily segments final QueryPlus<Row> queryPlus1 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus<Row> queryPlus2 = queryPlus.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( (queryPlus3, responseContext1) -> new MergeSequence<>( queryPlus3.getQuery().getResultOrdering(), Sequences.simple( Arrays.asList( runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1) ) ) ) ).run(queryPlus, responseContext); } }
@Test public void testFindServer() { QueryHostFinder queryRunner = new QueryHostFinder( brokerSelector, new RendezvousHashAvaticaConnectionBalancer() ); Server server = queryRunner.findServer( new TimeBoundaryQuery( new TableDataSource("test"), new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-08-31/2011-09-01"))), null, null, null ) ); Assert.assertEquals("foo", server.getHost()); } }
@Test public void testPrioritySelect2() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") ) ) ) .context(ImmutableMap.of("priority", 5)) .build() ).lhs; Assert.assertEquals("hotBroker", brokerName); }
@Test public void testPrioritySelect() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") ) ) ) .context(ImmutableMap.of("priority", -1)) .build() ).lhs; Assert.assertEquals("hotBroker", brokerName); }
@Test public void testSelectMultiInterval() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( Intervals.of("2013-08-31/2013-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2011-08-31/2011-09-01") ) ) ).build() ).lhs; Assert.assertEquals("coldBroker", brokerName); }
@Test public void testSelectMultiInterval2() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( Intervals.of("2011-08-31/2011-09-01"), Intervals.of("2012-08-31/2012-09-01"), Intervals.of("2013-08-31/2013-09-01") ) ) ).build() ).lhs; Assert.assertEquals("coldBroker", brokerName); }