@Test(timeout = 60_000L, expected = TimeoutException.class) public void testHandoffTimeout() throws Exception { final TestCommitterSupplier<Integer> committerSupplier = new TestCommitterSupplier<>(); segmentHandoffNotifierFactory.disableHandoff(); Assert.assertNull(driver.startJob()); for (int i = 0; i < ROWS.size(); i++) { committerSupplier.setMetadata(i + 1); Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); } final SegmentsAndMetadata published = driver.publish( makeOkPublisher(), committerSupplier.get(), ImmutableList.of("dummy") ).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); while (driver.getSegments().containsKey("dummy")) { Thread.sleep(100); } driver.registerHandoff(published).get(HANDOFF_CONDITION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); }
driver.persist( new Committer()
); driver = new StreamAppenderatorDriver( createDropFailAppenderator(), allocator, ); driver.startJob(); Assert.assertNull(driver.startJob()); Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); final SegmentsAndMetadata published = driver.publish( StreamAppenderatorDriverTest.makeOkPublisher(), committerSupplier.get(), ).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); driver.registerHandoff(published).get();
final Object restoredMetadata = driver.startJob(); if (restoredMetadata == null) { nextOffsets.putAll(ioConfig.getStartPartitions().getPartitionSequenceNumberMap()); if (row != null && task.withinMinMaxRecordTime(row)) { final String sequenceName = sequenceNames.get(record.partition()); final AppenderatorDriverAddResult addResult = driver.add( row, sequenceName, driver.persist(committerSupplier.get()); driver.moveSegmentOut(sequence, new ArrayList<>(segments)); }); driver.persist(committerSupplier.get()); // persist pending data final SegmentsAndMetadata published = driver.publish( publisher, committerSupplier.get(), ); final Future<SegmentsAndMetadata> handoffFuture = driver.registerHandoff(published); SegmentsAndMetadata handedOff = null; if (tuningConfig.getHandoffConditionTimeout() == 0) {
final Object restoredMetadata = driver.startJob(); if (restoredMetadata == null) { final AppenderatorDriverAddResult addResult = driver.add( row, sequenceToUse.getSequenceName(), driver.persistAsync(committerSupplier.get()), new FutureCallback<Object>() log.info("Persisting all pending data"); try { driver.persist(committerSupplier.get()); // persist pending data driver.close();
Assert.assertNull(driver.startJob()); Assert.assertTrue(driver.add(ROWS.get(0), "dummy", committerSupplier, false, true).isOk()); final SegmentsAndMetadata segmentsAndMetadata = driver.publishAndRegisterHandoff( makeOkPublisher(), committerSupplier.get(), Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); final SegmentsAndMetadata segmentsAndMetadata = driver.publishAndRegisterHandoff( makeOkPublisher(), committerSupplier.get(), driver.persist(committerSupplier.get()); final SegmentsAndMetadata segmentsAndMetadata = driver.publishAndRegisterHandoff( makeOkPublisher(), committerSupplier.get(),
Assert.assertNull(driver.startJob()); Assert.assertTrue(driver.add(ROWS.get(0), "sequence_0", committerSupplier, false, true).isOk()); Assert.assertTrue(driver.add(ROWS.get(i), "sequence_1", committerSupplier, false, true).isOk()); final ListenableFuture<SegmentsAndMetadata> futureForSequence0 = driver.publishAndRegisterHandoff( makeOkPublisher(), committerSupplier.get(), ); final ListenableFuture<SegmentsAndMetadata> futureForSequence1 = driver.publishAndRegisterHandoff( makeOkPublisher(), committerSupplier.get(),
public ListenableFuture<SegmentsAndMetadata> publishAndRegisterHandoff( final TransactionalSegmentPublisher publisher, final Committer committer, final Collection<String> sequenceNames ) { return ListenableFutures.transformAsync( publish(publisher, committer, sequenceNames), this::registerHandoff ); }
@Before public void setUp() throws Exception { appenderatorTester = new AppenderatorTester(MAX_ROWS_IN_MEMORY); allocator = new TestSegmentAllocator(DATA_SOURCE, Granularities.HOUR); segmentHandoffNotifierFactory = new TestSegmentHandoffNotifierFactory(); dataSegmentKiller = createStrictMock(DataSegmentKiller.class); driver = new StreamAppenderatorDriver( appenderatorTester.getAppenderator(), allocator, segmentHandoffNotifierFactory, new TestUsedSegmentChecker(appenderatorTester), dataSegmentKiller, OBJECT_MAPPER, new FireDepartmentMetrics() ); EasyMock.replay(dataSegmentKiller); }
+ "foo_2000-01-01T01:00:00.000Z_2000-01-01T02:00:00.000Z_abc123]]"); driver = new StreamAppenderatorDriver( createPersistFailAppenderator(), allocator, ); driver.startJob(); Assert.assertNull(driver.startJob()); Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); driver.publish( StreamAppenderatorDriverTest.makeOkPublisher(), committerSupplier.get(),
private void publishSegments( StreamAppenderatorDriver driver, TransactionalSegmentPublisher publisher, Supplier<Committer> committerSupplier, String sequenceName ) { final ListenableFuture<SegmentsAndMetadata> publishFuture = driver.publish( publisher, committerSupplier.get(), Collections.singletonList(sequenceName) ); pendingHandoffs.add(ListenableFutures.transformAsync(publishFuture, driver::registerHandoff)); }
public StreamAppenderatorDriver newDriver( final Appenderator appenderator, final TaskToolbox toolbox, final FireDepartmentMetrics metrics ) { return new StreamAppenderatorDriver( appenderator, new ActionBasedSegmentAllocator( toolbox.getTaskActionClient(), dataSchema, (schema, row, sequenceName, previousSegmentId, skipSegmentLineageCheck) -> new SegmentAllocateAction( schema.getDataSource(), row.getTimestamp(), schema.getGranularitySpec().getQueryGranularity(), schema.getGranularitySpec().getSegmentGranularity(), sequenceName, previousSegmentId, skipSegmentLineageCheck ) ), toolbox.getSegmentHandoffNotifierFactory(), new ActionBasedUsedSegmentChecker(toolbox.getTaskActionClient()), toolbox.getDataSegmentKiller(), toolbox.getObjectMapper(), metrics ); }
+ "foo_2000-01-01T01:00:00.000Z_2000-01-01T02:00:00.000Z_abc123]]"); driver = new StreamAppenderatorDriver( createPushFailAppenderator(), allocator, ); driver.startJob(); Assert.assertNull(driver.startJob()); Assert.assertTrue(driver.add(ROWS.get(i), "dummy", committerSupplier, false, true).isOk()); driver.publish( StreamAppenderatorDriverTest.makeOkPublisher(), committerSupplier.get(),
Assert.assertNull(driver.startJob()); final AppenderatorDriverAddResult addResult = driver.add(row, "dummy", committerSupplier, false, true); Assert.assertTrue(addResult.isOk()); if (addResult.getNumRowsInSegment() > MAX_ROWS_PER_SEGMENT) { driver.moveSegmentOut("dummy", ImmutableList.of(addResult.getSegmentIdentifier())); final SegmentsAndMetadata published = driver.publish( makeOkPublisher(), committerSupplier.get(), ).get(PUBLISH_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); while (driver.getSegments().containsKey("dummy")) { Thread.sleep(100); final SegmentsAndMetadata segmentsAndMetadata = driver.registerHandoff(published) .get(HANDOFF_CONDITION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS); Assert.assertEquals(numSegments, segmentsAndMetadata.getSegments().size());
driver.publish( sequenceMetadata.createPublisher(toolbox, ioConfig.isUseTransaction()), sequenceMetadata.getCommitterSupplier(stream, lastPersistedOffsets).get(),
private void maybePersistAndPublishSequences(Supplier<Committer> committerSupplier) throws InterruptedException { for (SequenceMetadata sequenceMetadata : sequences) { sequenceMetadata.updateAssignments(currOffsets); if (!sequenceMetadata.isOpen() && !publishingSequences.contains(sequenceMetadata.getSequenceName())) { publishingSequences.add(sequenceMetadata.getSequenceName()); try { Object result = driver.persist(committerSupplier.get()); log.info( "Persist completed with results: [%s], adding sequence [%s] to publish queue", result, sequenceMetadata ); publishAndRegisterHandoff(sequenceMetadata); } catch (InterruptedException e) { log.warn("Interrupted while persisting sequence [%s]", sequenceMetadata); throw e; } } } }