@Test public void testSimpleDataIngestAndTimeseriesQuery() throws Exception { AggregationTestHelper timeseriesQueryAggregationTestHelper = AggregationTestHelper.createTimeseriesQueryAggregationTestHelper( sm.getJacksonModules(), tempFolder ); Sequence seq = timeseriesQueryAggregationTestHelper.runQueryOnSegments( ImmutableList.of(s1, s2), readFileFromClasspathAsString("timeseries_query.json") ); Result<TimeseriesResultValue> result = (Result<TimeseriesResultValue>) Iterables.getOnlyElement(seq.toList()); Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketch_count"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchEstimatePostAgg"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchUnionPostAggEstimate"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01); Assert.assertEquals(0.0, result.getValue().getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01); Assert.assertEquals(0.0, result.getValue().getDoubleMetric("non_existing_col_validation"), 0.01); }
result.toString(), Doubles.tryParse(expectedIndex[count]).doubleValue(), value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6 ); Assert.assertEquals( new Double(expectedIndex[count]) + 13L + 1L, value.getDoubleMetric("addRowsIndexConstant"), value.getDoubleMetric("addRowsIndexConstant") * 1e-6 ); Assert.assertEquals( value.getDoubleMetric("uniques"), 9.0d, 0.02 result.toString(), 0.0D, value.getDoubleMetric("index").doubleValue(), value.getDoubleMetric("index").doubleValue() * 1e-6 ); Assert.assertEquals( result.toString(), new Double(expectedIndex[count]) + 1L, value.getDoubleMetric("addRowsIndexConstant"), value.getDoubleMetric("addRowsIndexConstant") * 1e-6 ); Assert.assertEquals(
@Override public void run() { QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>( factory.createRunner(incrementalIndexSegment), factory.getToolchest() ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); Map<String, Object> context = new HashMap<String, Object>(); List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), context).toList(); for (Result<TimeseriesResultValue> result : results) { if (someoneRan.get()) { Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0); } } if (currentlyRunning.get() > 0) { concurrentlyRan.set(true); } } }
StringUtils.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue() );
@Test public void testFullOnTimeseriesMaxMin() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( Arrays.asList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) .descending(descending) .build(); DateTime expectedEarliest = DateTimes.of("2011-01-12"); DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); Result<TimeseriesResultValue> result = results.iterator().next(); Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); final TimeseriesResultValue value = result.getValue(); Assert.assertEquals(result.toString(), 1870.061029, value.getDoubleMetric("maxIndex"), 1870.061029 * 1e-6); Assert.assertEquals(result.toString(), 59.021022, value.getDoubleMetric("minIndex"), 59.021022 * 1e-6); }
@Override @Test public void testFullOnTimeseriesMaxMin() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) .descending(descending) .build(); DateTime expectedEarliest = DateTimes.of("1970-01-01"); DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query), CONTEXT).toList(); Result<TimeseriesResultValue> result = results.iterator().next(); Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); final TimeseriesResultValue value = result.getValue(); Assert.assertEquals(result.toString(), 1870.061029, value.getDoubleMetric("maxIndex"), 1870.061029 * 1e-6); Assert.assertEquals(result.toString(), 59.021022, value.getDoubleMetric("minIndex"), 59.021022 * 1e-6); }
result.toString(), QueryRunnerTestHelper.skippedDay.equals(result.getTimestamp()) ? 0.0d : 2.0d, value.getDoubleMetric( "uniques" ),
"Failed double sum on dimension " + i, 2 * rows, result.getValue().getDoubleMetric("doubleSumResult" + i).intValue() );