public static SegmentIdWithShardSpec fromDataSegment(final DataSegment segment) { return new SegmentIdWithShardSpec( segment.getDataSource(), segment.getInterval(), segment.getVersion(), segment.getShardSpec() ); } }
return new SegmentIdWithShardSpec( dataSource, interval,
@Override public SegmentIdWithShardSpec allocate( final InputRow row, final String sequenceName, final String previousSegmentId, final boolean skipSegmentLineageCheck ) { synchronized (counters) { DateTime dateTimeTruncated = granularity.bucketStart(row.getTimestamp()); final long timestampTruncated = dateTimeTruncated.getMillis(); if (!counters.containsKey(timestampTruncated)) { counters.put(timestampTruncated, new AtomicInteger()); } final int partitionNum = counters.get(timestampTruncated).getAndIncrement(); return new SegmentIdWithShardSpec( dataSource, granularity.bucket(dateTimeTruncated), VERSION, new NumberedShardSpec(partitionNum, 0) ); } } }
private static SegmentIdWithShardSpec SI(String interval, String version, int partitionNum) { return new SegmentIdWithShardSpec( AppenderatorTester.DATASOURCE, Intervals.of(interval), version, new LinearShardSpec(partitionNum) ); }
private SegmentIdWithShardSpec getSegmentIdentifier(long timestamp) { if (!rejectionPolicy.accept(timestamp)) { return null; } final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); final long truncatedTime = truncatedDateTime.getMillis(); SegmentIdWithShardSpec retVal = segments.get(truncatedTime); if (retVal == null) { final Interval interval = new Interval( truncatedDateTime, segmentGranularity.increment(truncatedDateTime) ); retVal = new SegmentIdWithShardSpec( schema.getDataSource(), interval, versioningPolicy.getVersion(interval), config.getShardSpec() ); addSegment(retVal); } return retVal; }
return new SegmentIdWithShardSpec( dataSource, interval, return null; } else if (maxId.getShardSpec() instanceof LinearShardSpec) { return new SegmentIdWithShardSpec( dataSource, maxId.getInterval(), ); } else if (maxId.getShardSpec() instanceof NumberedShardSpec) { return new SegmentIdWithShardSpec( dataSource, maxId.getInterval(),
@Test(timeout = 60_000L) public void testSimple() throws Exception { final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>(); Assert.assertNull(driver.startJob()); for (int i = 0; i < ROWS.size(); i++) { committerSupplier.setMetadata(i + 1); Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); } final SegmentsAndMetadata published = driver.publish( makeOkPublisher(), committerSupplier.get(), ImmutableList.of("dummy") ).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); while (driver.getSegments().containsKey("dummy")) { Thread.sleep(100); } final SegmentsAndMetadata segmentsAndMetadata = driver.registerHandoff(published) .get(HANDOFF_CONDITION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); Assert.assertEquals( ImmutableSet.of( new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); Assert.assertEquals(3, segmentsAndMetadata.getCommitMetadata()); }
@Test public void testSimple() throws Exception { Assert.assertNull(driver.startJob()); for (InputRow row : ROWS) { Assert.assertTrue(driver.add(row, "dummy").isOk()); } checkSegmentStates(2, SegmentState.APPENDING); driver.pushAllAndClear(TIMEOUT); checkSegmentStates(2, SegmentState.PUSHED_AND_DROPPED); final SegmentsAndMetadata published = driver.publishAll(makeOkPublisher()).get( TIMEOUT, TimeUnit.MILLISECONDS ); Assert.assertEquals( ImmutableSet.of( new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), published.getSegments() .stream() .map(SegmentIdWithShardSpec::fromDataSegment) .collect(Collectors.toSet()) ); Assert.assertNull(published.getCommitMetadata()); }
@Test public void testIncrementalPush() throws Exception { Assert.assertNull(driver.startJob()); int i = 0; for (InputRow row : ROWS) { Assert.assertTrue(driver.add(row, "dummy").isOk()); checkSegmentStates(1, SegmentState.APPENDING); checkSegmentStates(i, SegmentState.PUSHED_AND_DROPPED); driver.pushAllAndClear(TIMEOUT); checkSegmentStates(0, SegmentState.APPENDING); checkSegmentStates(++i, SegmentState.PUSHED_AND_DROPPED); } final SegmentsAndMetadata published = driver.publishAll(makeOkPublisher()).get( TIMEOUT, TimeUnit.MILLISECONDS ); Assert.assertEquals( ImmutableSet.of( new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)), new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(1, 0)) ), published.getSegments() .stream() .map(SegmentIdWithShardSpec::fromDataSegment) .collect(Collectors.toSet()) ); Assert.assertNull(published.getCommitMetadata()); }
new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence0.getSegments()) new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence1.getSegments())
Interval interval = action.getPreferredSegmentGranularity().bucket(action.getTimestamp()); ShardSpec shardSpec = new NumberedShardSpec(segmentAllocatePartitionCounter++, 0); return (RetType) new SegmentIdWithShardSpec(action.getDataSource(), interval, "latestVersion", shardSpec);
new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) new SegmentIdWithShardSpec(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(i - 1, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments())
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id2, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id3, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id2, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id3, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id4, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), assertSameIdentifier( id5, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id2, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id2, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME), assertSameIdentifier( id2, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(THE_DISTANT_FUTURE), assertSameIdentifier( id3, new SegmentIdWithShardSpec( DATA_SOURCE, Granularities.HOUR.bucket(PARTY_TIME),
final Set<DataSegment> segments = checker.findUsedSegments( ImmutableSet.of( new SegmentIdWithShardSpec("foo", Intervals.of("2000/P1D"), "a", new LinearShardSpec(1)), new SegmentIdWithShardSpec("foo", Intervals.of("2001/P1D"), "b", new LinearShardSpec(0)), new SegmentIdWithShardSpec("bar", Intervals.of("2002/P1D"), "b", new LinearShardSpec(0))