@Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new CountAggregatorFactory(name)); }
private BufferArrayGrouper newGrouper( TestColumnSelectorFactory columnSelectorFactory, int bufferSize ) { final ByteBuffer buffer = ByteBuffer.allocate(bufferSize); final BufferArrayGrouper grouper = new BufferArrayGrouper( Suppliers.ofInstance(buffer), columnSelectorFactory, new AggregatorFactory[]{ new LongSumAggregatorFactory("valueSum", "value"), new CountAggregatorFactory("count") }, 1000 ); grouper.init(); return grouper; }
public IntervalChunkingQueryRunnerTest() { queryBuilder = Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))); }
@Test public void testSerde() throws IOException { final FinalizingFieldAccessPostAggregator original = new FinalizingFieldAccessPostAggregator("foo", "bar"); final FinalizingFieldAccessPostAggregator decorated = original.decorate( ImmutableMap.of("bar", new CountAggregatorFactory("bar")) ); final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); Assert.assertEquals( original, objectMapper.readValue(objectMapper.writeValueAsString(decorated), PostAggregator.class) ); }
public AsyncQueryRunnerTest() { this.executor = Executors.newSingleThreadExecutor(); query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Collections.singletonList(new CountAggregatorFactory("count"))) .build(); }
@Override public Aggregator apply(Cursor input) { Aggregator agg = new FilteredAggregatorFactory( new CountAggregatorFactory("count"), maybeOptimize(filter) ).factorize(input.getColumnSelectorFactory()); for (; !input.isDone(); input.advance()) { agg.aggregate(); } return agg; } }
@Override public IncrementalIndex createIndex() { return new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(1000) .buildOnheap(); } }
private IncrementalIndex getSingleDimIndex(String dimName, List<String> values) throws Exception { IncrementalIndex toPersist1 = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("count")) .setMaxRowCount(1000) .buildOnheap(); addDimValuesToIndex(toPersist1, dimName, values); return toPersist1; }
@Test public void testComparator() { CountAggregator agg = new CountAggregator(); Object first = agg.get(); agg.aggregate(); Comparator comp = new CountAggregatorFactory("null").getComparator(); Assert.assertEquals(-1, comp.compare(first, agg.get())); Assert.assertEquals(0, comp.compare(first, first)); Assert.assertEquals(0, comp.compare(agg.get(), agg.get())); Assert.assertEquals(1, comp.compare(agg.get(), first)); } }
@Test public void testSelectMatchesNothing() { String brokerName = (String) brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) .intervals(Collections.singletonList(Intervals.of("2010-08-31/2010-09-01"))) .build() ).lhs; Assert.assertEquals("hotBroker", brokerName); }
private static IndexBuilder overrideIndexBuilderSchema(IndexBuilder indexBuilder) { IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMetrics( new CountAggregatorFactory("count"), new HyperUniquesAggregatorFactory("hyperion", "dim1"), new DoubleMaxAggregatorFactory("dmax", "dim0") ).build(); return indexBuilder.schema(schema); }
@Test public void testBasicSelect2() { Pair<String, Server> p = brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) .intervals(Collections.singletonList(Intervals.of("2013-08-31/2013-09-01"))) .build() ); Assert.assertEquals("hotBroker", p.lhs); Assert.assertEquals("hotHost:8080", p.rhs.getHost()); }
@Test public void testFilterOnTimeFloorMisaligned() throws Exception { testQuery( "SELECT COUNT(*) FROM druid.foo " + "WHERE floor(__time TO month) = TIMESTAMP '2000-01-01 00:00:01'", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS()) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of() ); }
@Test public void testIncrementalIndexRowSizeComplex() { IncrementalIndex index = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(10000) .setMaxBytesInMemory(1000) .buildOnheap(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow( time + 1, "billy", "nelson", "joe", Arrays.asList("123", "abcdef") )); IncrementalIndexRow td1 = tndResult.getIncrementalIndexRow(); Assert.assertEquals(74, td1.estimateBytesInMemory()); }
@Test public void testIncrementalIndexRowSizeBasic() { IncrementalIndex index = new IncrementalIndex.Builder() .setSimpleTestingIndexSchema(new CountAggregatorFactory("cnt")) .setMaxRowCount(10000) .setMaxBytesInMemory(1000) .buildOnheap(); long time = System.currentTimeMillis(); IncrementalIndex.IncrementalIndexRowResult tndResult = index.toIncrementalIndexRow(toMapRow(time, "billy", "A", "joe", "B")); IncrementalIndexRow td1 = tndResult.getIncrementalIndexRow(); Assert.assertEquals(44, td1.estimateBytesInMemory()); }
@Test public void testUnqualifiedTableName() throws Exception { testQuery( "SELECT COUNT(*) FROM foo", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{6L} ) ); }
@Test public void testCountStar() throws Exception { testQuery( "SELECT COUNT(*) FROM druid.foo", ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( new Object[]{6L} ) ); }
private IncrementalIndex getIndexWithDimsFromSchemata(List<DimensionSchema> dims) { IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec(dims, null, null)) .withMetrics(new CountAggregatorFactory("count")) .build(); return new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) .buildOnheap(); }
private IncrementalIndex getIndexWithDims(List<String> dims) { IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withDimensionsSpec(new DimensionsSpec(makeDimensionSchemas(dims), null, null)) .withMetrics(new CountAggregatorFactory("count")) .build(); return new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(1000) .buildOnheap(); }
private static class TestFirehoseFactory implements FirehoseFactory<InputRowParser> { public TestFirehoseFactory() { } @Override @SuppressWarnings("unchecked") public Firehose connect(InputRowParser parser, File temporaryDirectory) throws ParseException { return new TestFirehose(parser); } }