.dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Collections.singletonList("dimUniform")) .filters(new AndDimFilter(dimFilters));
@Test public void testFragmentSearch() { Druids.SearchQueryBuilder builder = testBuilder(); Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); SearchQuery searchQuery; searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build(); expectedResults.put(qualityDimension, Sets.newHashSet("automotive", "AutoMotive")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.fragments(Arrays.asList("auto", "ve"), true).build(); expectedResults.put(qualityDimension, Sets.newHashSet("automotive")); checkSearchQuery(searchQuery, expectedResults); }
public SearchQueryBuilder fragments(List<String> q) { return fragments(q, false); }
@Override public Query<Result<SearchResultValue>> withDataSource(DataSource dataSource) { return Druids.SearchQueryBuilder.copy(this).dataSource(dataSource).build(); }
public static SearchQueryBuilder copy(SearchQuery query) { return new SearchQueryBuilder() .dataSource(query.getDataSource()) .filters(query.getDimensionsFilter()) .granularity(query.getGranularity()) .limit(query.getLimit()) .intervals(query.getQuerySegmentSpec()) .dimensions(query.getDimensions()) .query(query.getQuery()) .sortSpec(query.getSort()) .context(query.getContext()); }
.dataSource(DATA_SOURCE) .filters(DIM_FILTER) .granularity(GRANULARITY) .limit(1000) .intervals(SEG_SPEC) .dimensions(Collections.singletonList(TOP_DIM)) .query("how") .context(CONTEXT); builder.build(), Intervals.of("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); SearchQuery query = builder .intervals("2011-01-01/2011-01-10") .dimensions(new DefaultDimensionSpec(TOP_DIM, "new_dim")) .build(); TestHelper.assertExpectedResults( makeSearchResults(
.dataSource(DATA_SOURCE) .filters(DIM_FILTER) .granularity(GRANULARITY) .limit(1000) .intervals(SEG_SPEC) .dimensions(Collections.singletonList(TOP_DIM)) .query("how") .context(CONTEXT); builder.build(), Intervals.of("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) );
.dimensions( new DefaultDimensionSpec("table", "table") .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .context(ImmutableMap.of("searchStrategy", "cursorOnly")) .build();
SearchQuery query = Druids .newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.dayGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions(new ListFilteredDimensionSpec( new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), null )) .build();
final List<Interval> intervals = Collections.singletonList(interval); final SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) .intervals(intervals) .granularity(granularity) .limit(10000) .query("wow") .build(); final QueryRunner<Result<SearchResultValue>> runner = serverManager.getQueryRunnerForIntervals( query,
.dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .query("a") .build();
.dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .filters( new ExtractionDimFilter( QueryRunnerTestHelper.qualityDimension, .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions( new ExtractionDimensionSpec( QueryRunnerTestHelper.qualityDimension, .query("☃") .build();
@Test public void testSearchAll() { List<SearchHit> expectedHits = new ArrayList<>(); expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "spot", 837)); expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "upfront", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions(QueryRunnerTestHelper.marketDimension) .query("") .build(), expectedHits ); checkSearchQuery( Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions(QueryRunnerTestHelper.marketDimension) .build(), expectedHits ); }
@Test public void testEquals() { Query query1 = Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions( new DefaultDimensionSpec( QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension ) ) .query("a") .build(); Query query2 = Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .dimensions( new DefaultDimensionSpec( QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.qualityDimension ) ) .query("a") .build(); Assert.assertEquals(query1, query2); }
private static SearchQueryBuilder basicB(final BenchmarkSchemaInfo basicSchema) { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); final List<String> dimUniformFilterVals = new ArrayList<>(); int resultNum = (int) (100000 * 0.1); int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } List<String> dimHyperUniqueFilterVals = new ArrayList<>(); resultNum = (int) (100000 * 0.1); step = 100000 / resultNum; for (int i = 0; i < 100001 && dimHyperUniqueFilterVals.size() < resultNum; i += step) { dimHyperUniqueFilterVals.add(String.valueOf(i)); } final List<DimFilter> dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter("dimUniform", dimUniformFilterVals, null)); dimFilters.add(new InDimFilter("dimHyperUnique", dimHyperUniqueFilterVals, null)); return Druids.newSearchQueryBuilder() .dataSource("blah") .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform", "dimHyperUnique")) .filters(new AndDimFilter(dimFilters)); }