private static DataSegment computeMergedSegment( final String dataSource, final String version, final List<DataSegment> segments ) { final Interval mergedInterval = computeMergedInterval(segments); final Set<String> mergedDimensions = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); final Set<String> mergedMetrics = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (DataSegment segment : segments) { mergedDimensions.addAll(segment.getDimensions()); mergedMetrics.addAll(segment.getMetrics()); } return DataSegment.builder() .dataSource(dataSource) .interval(mergedInterval) .version(version) .binaryVersion(IndexIO.CURRENT_VERSION_ID) .shardSpec(NoneShardSpec.instance()) .dimensions(Lists.newArrayList(mergedDimensions)) .metrics(Lists.newArrayList(mergedMetrics)) .build(); } }
private DataSegment makeSegment(int offset) { return DataSegment.builder() .dataSource("foo") .interval( new Interval( DateTimes.of("2013-01-01").plusDays(offset), DateTimes.of("2013-01-02").plusDays(offset) ) ) .version(DateTimes.nowUtc().toString()) .dimensions(ImmutableList.of("dim1", "dim2")) .metrics(ImmutableList.of("met1", "met2")) .loadSpec(ImmutableMap.of("type", "local")) .build(); }
private DataSegment dataSegmentWithInterval(String intervalStr) { return DataSegment.builder() .dataSource("test_load_queue_peon") .interval(Intervals.of(intervalStr)) .loadSpec(ImmutableMap.of()) .version("2015-05-27T03:38:35.683Z") .dimensions(ImmutableList.of()) .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(1200) .build(); }
private static DataSegment computeMergedSegment( final String dataSource, final String version, final List<DataSegment> segments ) { final Interval mergedInterval = computeMergedInterval(segments); final Set<String> mergedDimensions = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); final Set<String> mergedMetrics = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); for (DataSegment segment : segments) { mergedDimensions.addAll(segment.getDimensions()); mergedMetrics.addAll(segment.getMetrics()); } return DataSegment.builder() .dataSource(dataSource) .interval(mergedInterval) .version(version) .binaryVersion(IndexIO.CURRENT_VERSION_ID) .shardSpec(NoneShardSpec.instance()) .dimensions(Lists.newArrayList(mergedDimensions)) .metrics(Lists.newArrayList(mergedMetrics)) .build(); } }
public DataSegment withMetrics(List<String> metrics) { return builder(this).metrics(metrics).build(); }
private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String version) { return DataSegment.builder() .dataSource("test_curator_druid_coordinator") .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", "local", "path", "somewhere" ) ) .version(version) .dimensions(ImmutableList.of()) .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) .build(); } }
private void addSegmentWithShardSpec( VersionedIntervalTimeline<String, ServerSelector> timeline, DruidServer server, String interval, List<String> dims, List<String> metrics, String version, ShardSpec shardSpec ) { DataSegment segment = DataSegment.builder() .dataSource(dataSource) .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) .shardSpec(shardSpec) .size(1) .build(); server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, shardSpec.createChunk(ss)); }
private void addSegment( VersionedIntervalTimeline<String, ServerSelector> timeline, DruidServer server, String interval, List<String> dims, List<String> metrics, String version ) { DataSegment segment = DataSegment.builder() .dataSource(dataSource) .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) .size(1) .build(); server.addDataSegment(segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); timeline.add(Intervals.of(interval), version, new SingleElementPartitionChunk<ServerSelector>(ss)); }
private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String version) { return DataSegment.builder() .dataSource("test_overlord_server_view") .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", "local", "path", "somewhere" ) ) .version(version) .dimensions(ImmutableList.of()) .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) .build(); }
private DataSegment dataSegmentWithInterval(String intervalStr) { return DataSegment.builder() .dataSource("test_segment_loader") .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", "local", "path", "somewhere" ) ) .version("2015-05-27T03:38:35.683Z") .dimensions(ImmutableList.of()) .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(10L) .build(); } }
private DataSegment dataSegmentWithIntervalAndVersion(String intervalStr, String version) { return DataSegment.builder() .dataSource("test_broker_server_view") .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", "local", "path", "somewhere" ) ) .version(version) .dimensions(ImmutableList.of()) .metrics(ImmutableList.of()) .shardSpec(NoneShardSpec.instance()) .binaryVersion(9) .size(0) .build(); }
public DataSegment withMetrics(List<String> metrics) { return builder(this).metrics(metrics).build(); }