public boolean isPushRequired(AppenderatorConfig tuningConfig) { return isPushRequired(tuningConfig.getMaxRowsPerSegment(), tuningConfig.getMaxTotalRows()); }
/** * Persist all data indexed through this driver so far. Returns a future of persisted commitMetadata. * <p> * Should be called after all data has been added through {@link #add(InputRow, String, Supplier, boolean, boolean)}. * * @param committer committer representing all data that has been added so far * * @return future containing commitMetadata persisted */ public ListenableFuture<Object> persistAsync(final Committer committer) { return appenderator.persistAll(wrapCommitter(committer)); }
public AppenderatorDriverAddResult add( InputRow row, String sequenceName, final Supplier<Committer> committerSupplier ) throws IOException { return append(row, sequenceName, committerSupplier, false, true); }
void add(SegmentIdWithShardSpec identifier) { intervalToSegmentStates.computeIfAbsent( identifier.getInterval().getStartMillis(), k -> new SegmentsOfInterval(identifier.getInterval()) ).setAppendingSegment(SegmentWithState.newSegment(identifier)); lastSegmentId = identifier.toString(); }
@Before public void setup() { appenderatorTester = new AppenderatorTester(MAX_ROWS_IN_MEMORY); allocator = new TestSegmentAllocator(DATA_SOURCE, Granularities.HOUR); dataSegmentKiller = createStrictMock(DataSegmentKiller.class); driver = new BatchAppenderatorDriver( appenderatorTester.getAppenderator(), allocator, new TestUsedSegmentChecker(appenderatorTester), dataSegmentKiller ); EasyMock.replay(dataSegmentKiller); }
@Test public void testRestart() { Assert.assertNull(driver.startJob()); driver.close(); appenderatorTester.getAppenderator().close(); Assert.assertNull(driver.startJob()); }
@Override public Object startJob() { tuningConfig.getBasePersistDirectory().mkdirs(); lockBasePersistDirectory(); final Object retVal = bootstrapSinksFromDisk(); initializeExecutors(); resetNextFlush(); return retVal; }
@Override public Object startJob() { handoffNotifier.start(); Object retVal = appenderator.startJob(); initializeExecutors(); startPersistThread(); // Push pending sinks bootstrapped from previous run mergeAndPush(); return retVal; }
@Override public SegmentIdWithShardSpec apply(DataSegment input) { return SegmentIdWithShardSpec.fromDataSegment(input); } }
@Before public void setUp() { allocator = new TestSegmentAllocator(DATA_SOURCE, Granularities.HOUR); segmentHandoffNotifierFactory = new TestSegmentHandoffNotifierFactory(); dataSegmentKiller = createStrictMock(DataSegmentKiller.class); }
/** * Same as {@link #add(SegmentIdWithShardSpec, InputRow, Supplier, boolean)}, with allowIncrementalPersists set to true */ default AppenderatorAddResult add(SegmentIdWithShardSpec identifier, InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException, SegmentNotWritableException { return add(identifier, row, committerSupplier, true); }
@Override public Appenderator build(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) { return Appenderators.createOffline(schema, config, metrics, dataSegmentPusher, objectMapper, indexIO, indexMerger); } }
static Appenderator createPushFailAppenderator() { return new FailableAppenderator().disablePush(); }
static Appenderator createPersistFailAppenderator() { return new FailableAppenderator().disablePersist(); }
static Appenderator createPushInterruptAppenderator() { return new FailableAppenderator().interruptPush(); }
/** * Change the segment state to {@link SegmentState#APPEND_FINISHED}. The current state should be * {@link SegmentState#APPENDING}. */ public void finishAppending() { checkStateTransition(this.state, SegmentState.APPENDING, SegmentState.APPEND_FINISHED); this.state = SegmentState.APPEND_FINISHED; }
static SegmentWithState newSegment(SegmentIdWithShardSpec segmentIdentifier) { return new SegmentWithState(segmentIdentifier, SegmentState.APPENDING, null); }
public static AppenderatorDriverAddResult fail() { return new AppenderatorDriverAddResult(null, 0, 0, false, null); }
/** * Change the segment state to {@link SegmentState#PUSHED_AND_DROPPED}. The current state should be * {@link SegmentState#APPENDING}. This method should be called after the segment of {@link #segmentIdentifier} is * completely pushed and dropped. * * @param dataSegment pushed {@link DataSegment} */ public void pushAndDrop(DataSegment dataSegment) { checkStateTransition(this.state, SegmentState.APPENDING, SegmentState.PUSHED_AND_DROPPED); this.state = SegmentState.PUSHED_AND_DROPPED; this.dataSegment = dataSegment; }
static SegmentWithState newSegment(SegmentIdWithShardSpec segmentIdentifier, SegmentState state) { return new SegmentWithState(segmentIdentifier, state, null); }