public Bucket build() { ArrayList<DataSegment> segmentsList = new ArrayList<>(segments.size()); double[] leftSum = new double[segments.size()]; double[] rightSum = new double[segments.size()]; int i = 0; for (SegmentAndSum segmentAndSum : segments) { segmentsList.add(segmentAndSum.dataSegment); leftSum[i] = segmentAndSum.leftSum; rightSum[i] = segmentAndSum.rightSum; ++i; } long bucketEndMillis = segmentsList .stream() .mapToLong(s -> s.getInterval().getEndMillis()) .max() .orElseGet(interval::getEndMillis); return new Bucket(Intervals.utc(interval.getStartMillis(), bucketEndMillis), segmentsList, leftSum, rightSum); } }
if (!uncoveredIntervalsOverflowed && startMillis != intervalStart) { if (uncoveredIntervalsLimit > uncoveredIntervals.size()) { uncoveredIntervals.add(Intervals.utc(startMillis, intervalStart)); } else { uncoveredIntervalsOverflowed = true; uncoveredIntervals.add(Intervals.utc(startMillis, endMillis)); } else { uncoveredIntervalsOverflowed = true;
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
@Override public Sequence<Result<TimeseriesResultValue>> run(QueryPlus queryPlus, Map context) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } },
@Test public void testIterableNone() { final Iterator<Interval> iterator = Granularities.NONE.getIterable(Intervals.utc(0, 1000)).iterator(); int count = 0; while (iterator.hasNext()) { Assert.assertEquals(count, iterator.next().getStartMillis()); count++; } }
public static int D(final String dayString) { return (int) (Intervals.utc(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); }
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { if ((int) context.get("count") < 3) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", (int) context.get("count") + 1); return Sequences.empty(); } else { return Sequences.simple( Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( new HashMap<>() ) ) ) ); } } },
@Override public Sequence<Result<TimeseriesResultValue>> run( QueryPlus<Result<TimeseriesResultValue>> queryPlus, Map<String, Object> context ) { if ((int) context.get("count") == 0) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", 1); return Sequences.empty(); } else { return Sequences.simple( Collections.singletonList( new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( new HashMap<>() ) ) ) ); } } },
public static List<Interval> toIntervals(final RangeSet<Long> rangeSet) { final List<Interval> retVal = new ArrayList<>(); for (Range<Long> range : rangeSet.asRanges()) { final long start; final long end; if (range.hasLowerBound()) { final long millis = range.lowerEndpoint(); start = millis + (range.lowerBoundType() == BoundType.OPEN ? 1 : 0); } else { start = Filtration.eternity().getStartMillis(); } if (range.hasUpperBound()) { final long millis = range.upperEndpoint(); end = millis + (range.upperBoundType() == BoundType.OPEN ? 0 : 1); } else { end = Filtration.eternity().getEndMillis(); } retVal.add(Intervals.utc(start, end)); } return retVal; } }
@Test public void testFilteredAggregatorDontOptimizeOnNonTimeColumn() { // Filter is not on __time, so no optimizations should be made. LongSumAggregatorFactory longSumAggregatorFactory = new LongSumAggregatorFactory("test", "test"); FilteredAggregatorFactory aggregatorFactory = new FilteredAggregatorFactory( longSumAggregatorFactory, new IntervalDimFilter( "not_time", Collections.singletonList(Intervals.utc(1000, 2000)), null ) ); Interval exclude = Intervals.utc(2000, 3000); Interval include = Intervals.utc(1500, 1600); Interval partial = Intervals.utc(1500, 2500); AggregatorFactory excludedAgg = aggregatorFactory.optimizeForSegment(getOptimizationContext(exclude)); Assert.assertEquals(aggregatorFactory, excludedAgg); AggregatorFactory includedAgg = aggregatorFactory.optimizeForSegment(getOptimizationContext(include)); Assert.assertEquals(aggregatorFactory, includedAgg); AggregatorFactory partialAgg = aggregatorFactory.optimizeForSegment(getOptimizationContext(partial)); Assert.assertEquals(aggregatorFactory, partialAgg); }
new IntervalDimFilter( ColumnHolder.TIME_COLUMN_NAME, Collections.singletonList(Intervals.utc(1000, 2000)), null Interval exclude = Intervals.utc(2000, 3000); Interval include = Intervals.utc(1500, 1600); Interval partial = Intervals.utc(1500, 2500); new IntervalDimFilter( ColumnHolder.TIME_COLUMN_NAME, Collections.singletonList(Intervals.utc(1500, 2000)), null
private void testPersist(final Object commitMetadata) throws Exception Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(),
Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(),
@Test public void testDimensionsSpecSerde() throws Exception { LocatedSegmentDescriptor expected = new LocatedSegmentDescriptor( new SegmentDescriptor(Intervals.utc(100, 200), "version", 100), 65535, Arrays.asList( new DruidServerMetadata("server1", "host1", null, 30000L, ServerType.HISTORICAL, "tier1", 0), new DruidServerMetadata("server2", "host2", null, 40000L, ServerType.HISTORICAL, "tier1", 1), new DruidServerMetadata("server3", "host3", null, 50000L, ServerType.REALTIME, "tier2", 2) ) ); LocatedSegmentDescriptor actual = mapper.readValue( mapper.writeValueAsString(expected), LocatedSegmentDescriptor.class ); Assert.assertEquals(expected, actual); } }
Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)) );
); QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( Collections.singletonList(Intervals.utc(0, 1000000)) );
Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)) ); GroupByQuery query = GroupByQuery
Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)) ); GroupByQuery innerQuery = GroupByQuery
Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)) ); GroupByQuery query = GroupByQuery
Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)) );