@Override public AggregatorFactory getCombiningFactory() { return new DoubleMinAggregatorFactory(name, name, null, macroTable); }
@Override public List<AggregatorFactory> getRequiredColumns() { return Collections.singletonList(new DoubleMinAggregatorFactory(fieldName, fieldName, expression, macroTable)); }
private static AggregatorFactory createMinAggregatorFactory( final ValueType aggregationType, final String name, final String fieldName, final String expression, final ExprMacroTable macroTable ) { switch (aggregationType) { case LONG: return new LongMinAggregatorFactory(name, fieldName, expression, macroTable); case FLOAT: return new FloatMinAggregatorFactory(name, fieldName, expression, macroTable); case DOUBLE: return new DoubleMinAggregatorFactory(name, fieldName, expression, macroTable); default: throw new ISE("Cannot create aggregator factory for type[%s]", aggregationType); } } }
@Test public void testEqualsAndHashCode() { DoubleMinAggregatorFactory one = new DoubleMinAggregatorFactory("name1", "fieldName1"); DoubleMinAggregatorFactory oneMore = new DoubleMinAggregatorFactory("name1", "fieldName1"); DoubleMinAggregatorFactory two = new DoubleMinAggregatorFactory("name2", "fieldName2"); Assert.assertEquals(one.hashCode(), oneMore.hashCode()); Assert.assertTrue(one.equals(oneMore)); Assert.assertFalse(one.equals(two)); }
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")
private TimeseriesQuery makeTimeseriesQuery() { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(addRowsIndexConstant) .build(); }
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")
private TopNQuery makeTopNQuery() { return new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(3) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); }
@Test public void testQuerySerialization() throws IOException { Query query = new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(4) .intervals(fullOnIntervalSpec) .aggregators( Lists.newArrayList( Iterables.concat( commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); }
private void testFullOnTimeseries( QueryRunner runner, List<Result<TimeseriesResultValue>> expectedResults, String failMsg ) { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(addRowsIndexConstant) .build(); failMsg += " timeseries "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
private void testFilteredTimeseries( QueryRunner runner, List<Result<TimeseriesResultValue>> expectedResults, String failMsg ) { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .filters(marketDimension, "spot") .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(addRowsIndexConstant) .build(); failMsg += " filtered timeseries "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
/** See {@link #runTests} */ @SuppressWarnings("unused") private void testFullOnTopN(QueryRunner runner, List<Result<TopNResultValue>> expectedResults, String failMsg) { TopNQuery query = new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(3) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); failMsg += " topN "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<TopNResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
/** See {@link #runTests} */ @SuppressWarnings("unused") private void testFilteredTopN(QueryRunner runner, List<Result<TopNResultValue>> expectedResults, String failMsg) { TopNQuery query = new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .filters(marketDimension, "spot") .metric(indexMetric) .threshold(3) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); failMsg += " filtered topN "; HashMap<String, Object> context = new HashMap<>(); Iterable<Result<TopNResultValue>> actualResults = runner.run(QueryPlus.wrap(query), context).toList(); TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg); }
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"), new DoubleFirstAggregatorFactory("first", "index")
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")
Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")
private TopNQuery makeFilteredTopNQuery() { return new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(3) .filters( new AndDimFilter( new SelectorDimFilter(marketDimension, "spot", null), new SelectorDimFilter(placementDimension, "preferred", null) ) ) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Collections.singletonList(addRowsIndexConstant)) .build(); }
@Test public void testFullOnTimeseriesMaxMin() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( Arrays.asList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) .descending(descending) .build(); DateTime expectedEarliest = DateTimes.of("2011-01-12"); DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); Result<TimeseriesResultValue> result = results.iterator().next(); Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); final TimeseriesResultValue value = result.getValue(); Assert.assertEquals(result.toString(), 1870.061029, value.getDoubleMetric("maxIndex"), 1870.061029 * 1e-6); Assert.assertEquals(result.toString(), 59.021022, value.getDoubleMetric("minIndex"), 59.021022 * 1e-6); }
@Override @Test public void testFullOnTimeseriesMaxMin() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) .descending(descending) .build(); DateTime expectedEarliest = DateTimes.of("1970-01-01"); DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); Result<TimeseriesResultValue> result = results.iterator().next(); Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); final TimeseriesResultValue value = result.getValue(); Assert.assertEquals(result.toString(), 1870.061029, value.getDoubleMetric("maxIndex"), 1870.061029 * 1e-6); Assert.assertEquals(result.toString(), 59.021022, value.getDoubleMetric("minIndex"), 59.021022 * 1e-6); }