@Override public DateTime increment(DateTime time) { return time.plus(getDuration()); }
@Override public DateTime bucketStart(DateTime time) { long t = time.getMillis(); final long duration = getDurationMillis(); long offset = t % duration - origin; if (offset < 0) { offset += duration; } return new DateTime(t - offset, time.getChronology()); }
private static Interval getBucketInterval(DataSegment segment) { return BUCKET_GRANULARITY.bucket(segment.getInterval().getStart()); } }
@Test public void testSerializeDuration() throws Exception { ObjectMapper mapper = new DefaultObjectMapper(); String json = "{ \"type\": \"duration\", \"duration\": \"3600000\" }"; Granularity gran = mapper.readValue(json, Granularity.class); Assert.assertEquals(new DurationGranularity(3600000, null), gran); json = "{ \"type\": \"duration\", \"duration\": \"5\", \"origin\": \"2012-09-01T00:00:00.002Z\" }"; gran = mapper.readValue(json, Granularity.class); Assert.assertEquals(new DurationGranularity(5, 2), gran); DurationGranularity expected = new DurationGranularity(5, 2); Assert.assertEquals(expected, mapper.readValue(mapper.writeValueAsString(expected), Granularity.class)); String illegalJson = "{ \"type\": \"duration\", \"duration\": \"0\" }"; try { mapper.readValue(illegalJson, Granularity.class); Assert.fail(); } catch (JsonMappingException e) { } }
@Test public void testDurationToDateTime() { final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); Assert.assertEquals( DateTimes.of("2012-01-01T05:00:04.123-08:00"), gran.toDateTime(DateTimes.of("2012-01-01T05:00:04.123-08:00").getMillis()) ); }
@Test public void testDurationTruncate() { { final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); assertSameDateTime( Lists.newArrayList( DateTimes.of("2012-01-01T04:50:00.000-08:00"), DateTimes.of("2012-01-02T05:00:00.000-08:00"), DateTimes.of("2012-01-02T17:05:00.000-08:00"), DateTimes.of("2012-02-03T22:25:00.000-08:00") ), Lists.newArrayList( gran.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), gran.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), gran.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), gran.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } }
@Override public DateTime bucketStart(DateTime time) { long t = time.getMillis(); final long duration = getDurationMillis(); long offset = t % duration - origin; if (offset < 0) { offset += duration; } return new DateTime(t - offset, time.getChronology()); }
@Override public DateTime increment(DateTime time) { return time.plus(getDuration()); }
private static Interval getBucketInterval(DataSegment segment) { return BUCKET_GRANULARITY.bucket(segment.getInterval().getStart()); } }
actual.getGranularitySpec(), new ArbitraryGranularitySpec( new DurationGranularity(86400000, null), ImmutableList.of(Intervals.of("2014/2015"))
.setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new DurationGranularity(86400L, 0L)) .build();