public Result<TimeseriesResultValue> build() { return new Result<TimeseriesResultValue>( timestamp, new TimeseriesResultValue(metricValues) ); } }
private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return result -> { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = new HashMap<>(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<>( result.getTimestamp(), new TimeseriesResultValue(values) ); }; } }
@Test public void testSimpleDataIngestAndTimeseriesQuery() throws Exception { AggregationTestHelper timeseriesQueryAggregationTestHelper = AggregationTestHelper.createTimeseriesQueryAggregationTestHelper( sm.getJacksonModules(), tempFolder ); Sequence seq = timeseriesQueryAggregationTestHelper.runQueryOnSegments( ImmutableList.of(s1, s2), readFileFromClasspathAsString("timeseries_query.json") ); Result<TimeseriesResultValue> result = (Result<TimeseriesResultValue>) Iterables.getOnlyElement(seq.toList()); Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketch_count"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchEstimatePostAgg"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchUnionPostAggEstimate"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01); Assert.assertEquals(0.0, result.getValue().getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01); Assert.assertEquals(0.0, result.getValue().getDoubleMetric("non_existing_col_validation"), 0.01); }
@Override public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> arg1, Result<TimeseriesResultValue> arg2) { if (arg1 == null) { return arg2; } if (arg2 == null) { return arg1; } TimeseriesResultValue arg1Val = arg1.getValue(); TimeseriesResultValue arg2Val = arg2.getValue(); Map<String, Object> retVal = new LinkedHashMap<String, Object>(); for (AggregatorFactory factory : aggregations) { final String metricName = factory.getName(); retVal.put(metricName, factory.combine(arg1Val.getMetric(metricName), arg2Val.getMetric(metricName))); } return (gran instanceof AllGranularity) ? new Result<TimeseriesResultValue>( arg1.getTimestamp(), new TimeseriesResultValue(retVal) ) : new Result<TimeseriesResultValue>( gran.bucketStart(arg1.getTimestamp()), new TimeseriesResultValue(retVal) ); }
result.toString(), QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L, value.getLongMetric("rows").longValue() ); result.toString(), Doubles.tryParse(expectedIndex[count]).doubleValue(), value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6 ); Assert.assertEquals( new Double(expectedIndex[count]) + 13L + 1L, value.getDoubleMetric("addRowsIndexConstant"), value.getDoubleMetric("addRowsIndexConstant") * 1e-6 ); Assert.assertEquals( value.getDoubleMetric("uniques"), 9.0d, 0.02 result.toString(), 0.0D, value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6 ); Assert.assertEquals( result.toString(),
@Override public Object[] apply(final Result<TimeseriesResultValue> result) { final Map<String, Object> row = result.getValue().getBaseObject(); final Object[] retVal = new Object[fieldList.size()]; for (final RelDataTypeField field : fieldList) { final String outputName = druidQuery.getOutputRowSignature().getRowOrder().get(field.getIndex()); if (outputName.equals(timeOutputName)) { retVal[field.getIndex()] = coerce(result.getTimestamp(), field.getType().getSqlTypeName()); } else { retVal[field.getIndex()] = coerce(row.get(outputName), field.getType().getSqlTypeName()); } } return retVal; } }
@Nullable public Long sumMetric(final Task task, final DimFilter filter, final String metric) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test_ds") .filters(filter) .aggregators( ImmutableList.of( new LongSumAggregatorFactory(metric, metric) ) ).granularity(Granularities.ALL) .intervals("2000/3000") .build(); List<Result<TimeseriesResultValue>> results = task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); if (results.isEmpty()) { return 0L; } else { return results.get(0).getValue().getLongMetric(metric); } } }
@Override public Function<Result<TimeseriesResultValue>, Object> prepareForCache(boolean isResultLevelCache) { return input -> { TimeseriesResultValue results = input.getValue(); final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size()); retVal.add(input.getTimestamp().getMillis()); for (AggregatorFactory agg : aggs) { retVal.add(results.getMetric(agg.getName())); } if (isResultLevelCache) { for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { retVal.add(results.getMetric(postAgg.getName())); } } return retVal; }; }
for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { final AggregatorFactory aggregatorFactory = query.getAggregatorSpecs().get(i); final Object value = resultValue.getValue().getMetric(aggregatorFactory.getName()); if (grandTotals[i] == null) { grandTotals[i] = value; new TimeseriesResultValue(totalsMap) );
final int expectedVal = elementsPerThread * taskCount; for (Result<TimeseriesResultValue> result : results) { Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue()); for (int i = 0; i < dimensionCount; ++i) { Assert.assertEquals( StringUtils.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue() ); Assert.assertEquals( StringUtils.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue() );
private static void assertTimeseriesResultValue(String msg, Result expected, Result actual) { // Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't // always generate exactly the same results (different merge ordering / float vs double) Assert.assertEquals(StringUtils.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp()); TimeseriesResultValue expectedVal = (TimeseriesResultValue) expected.getValue(); TimeseriesResultValue actualVal = (TimeseriesResultValue) actual.getValue(); final Map<String, Object> expectedMap = (Map<String, Object>) expectedVal.getBaseObject(); final Map<String, Object> actualMap = (Map<String, Object>) actualVal.getBaseObject(); assertRow(msg, new MapBasedRow(expected.getTimestamp(), expectedMap), new MapBasedRow(actual.getTimestamp(), actualMap)); }
@Nullable public Long sumMetric(final Task task, final DimFilter filter, final String metric) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test_ds") .filters(filter) .aggregators( ImmutableList.of( new LongSumAggregatorFactory(metric, metric) ) ).granularity(Granularities.ALL) .intervals("2000/3000") .build(); List<Result<TimeseriesResultValue>> results = task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); if (results.isEmpty()) { return 0L; } else { return results.get(0).getValue().getLongMetric(metric); } }
@Override public Function<Result<TimeseriesResultValue>, Object> prepareForCache(boolean isResultLevelCache) { return input -> { TimeseriesResultValue results = input.getValue(); final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size()); retVal.add(input.getTimestamp().getMillis()); for (AggregatorFactory agg : aggs) { retVal.add(results.getMetric(agg.getName())); } if (isResultLevelCache) { for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { retVal.add(results.getMetric(postAgg.getName())); } } return retVal; }; }
public static List<Result<TimeseriesResultValue>> timeseriesResult(final Map<String, ?> map) { return ImmutableList.of(new Result<>(DateTimes.of("2000"), new TimeseriesResultValue((Map<String, Object>) map))); }
private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return result -> { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = Maps.newHashMap(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<>( result.getTimestamp(), new TimeseriesResultValue(values) ); }; } }
@Override public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> arg1, Result<TimeseriesResultValue> arg2) { if (arg1 == null) { return arg2; } if (arg2 == null) { return arg1; } TimeseriesResultValue arg1Val = arg1.getValue(); TimeseriesResultValue arg2Val = arg2.getValue(); Map<String, Object> retVal = new LinkedHashMap<String, Object>(); for (AggregatorFactory factory : aggregations) { final String metricName = factory.getName(); retVal.put(metricName, factory.combine(arg1Val.getMetric(metricName), arg2Val.getMetric(metricName))); } return (gran instanceof AllGranularity) ? new Result<TimeseriesResultValue>( arg1.getTimestamp(), new TimeseriesResultValue(retVal) ) : new Result<TimeseriesResultValue>( gran.bucketStart(arg1.getTimestamp()), new TimeseriesResultValue(retVal) ); }
result.toString(), QueryRunnerTestHelper.skippedDay.equals(result.getTimestamp()) ? 0L : 2L, value.getLongMetric("rows").longValue() ); Assert.assertEquals( result.toString(), QueryRunnerTestHelper.skippedDay.equals(result.getTimestamp()) ? 0.0d : 2.0d, value.getDoubleMetric( "uniques" ),
@Test public void testTimeseriesNoAggregators() { Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(gran) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .descending(descending) .build(); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); final DateTime expectedLast = descending ? QueryRunnerTestHelper.earliest : QueryRunnerTestHelper.last; Result lastResult = null; for (Result<TimeseriesResultValue> result : results) { DateTime current = result.getTimestamp(); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast) ); Assert.assertEquals(ImmutableMap.of(), result.getValue().getBaseObject()); lastResult = result; } Assert.assertEquals(lastResult.toString(), expectedLast, lastResult.getTimestamp()); }
@Override public void run() { QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>( factory.createRunner(incrementalIndexSegment), factory.getToolchest() ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); Map<String, Object> context = new HashMap<String, Object>(); List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), context).toList(); for (Result<TimeseriesResultValue> result : results) { if (someoneRan.get()) { Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0); } } if (currentlyRunning.get() > 0) { concurrentlyRan.set(true); } } }
Assert.assertEquals("result size", 1, results.size()); Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows")); Assert.assertEquals("result size", 1, results.size()); Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows"));