@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void addRows(Blackhole blackhole) throws Exception { for (int i = 0; i < rowsPerSegment; i++) { InputRow row = rows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } } }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { final SegmentIdWithShardSpec identifier = getSegmentIdentifier(row.getTimestampFromEpoch()); if (identifier == null) { return Plumber.THROWAWAY; } try { final Appenderator.AppenderatorAddResult addResult = appenderator.add(identifier, row, committerSupplier); lastCommitterSupplier = committerSupplier; return new IncrementalIndexAddResult(addResult.getNumRowsInSegment(), 0, addResult.getParseException()); } catch (SegmentNotWritableException e) { // Segment already started handoff return Plumber.NOT_WRITABLE; } }
sinkRowsInMemoryAfterAdd = addResult.getRowCount(); bytesInMemoryAfterAdd = addResult.getBytesInMemory(); return new AppenderatorAddResult(identifier, sink.getNumRows(), isPersistRequired, addResult.getParseException());
if (addResult.getRowCount() == -1) { metrics.incrementThrownAway(); log.debug("Discarded row[%s], considering thrownAway due to %s.", inputRow, addResult.getReasonOfNotAdded()); return; if (addResult.getRowCount() == -2) { metrics.incrementDedup(); log.debug("Discarded row[%s], considering duplication.", inputRow);
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { long messageTimestamp = row.getTimestampFromEpoch(); final Sink sink = getSink(messageTimestamp); metrics.reportMessageMaxTimestamp(messageTimestamp); if (sink == null) { return Plumber.THROWAWAY; } final IncrementalIndexAddResult addResult = sink.add(row, false); if (config.isReportParseExceptions() && addResult.getParseException() != null) { throw addResult.getParseException(); } if (!sink.canAppendRow() || System.currentTimeMillis() > nextFlush) { persist(committerSupplier.get()); } return addResult; }
if (inputRow != null) { IncrementalIndexAddResult addResult = plumber.add(inputRow, committerSupplier); int numRows = addResult.getRowCount(); if (numRows == -2) { metrics.incrementDedup(); } else if (numRows < 0) { metrics.incrementThrownAway(); log.debug("Throwing away event[%s] due to %s", inputRow, addResult.getReasonOfNotAdded()); } else { metrics.incrementProcessed();
Assert.assertEquals(ParseException.class, result.getParseException().getClass()); Assert.assertEquals( "Found unparseable columns in row: [MapBasedInputRow{timestamp=1970-01-01T00:00:00.000Z, event={string=A, float=19.0, long=asdj, double=21.0}, dimensions=[string, float, long, double]}], exceptions: [could not convert value [asdj] to long,]", result.getParseException().getMessage() ); Assert.assertEquals(ParseException.class, result.getParseException().getClass()); Assert.assertEquals( "Found unparseable columns in row: [MapBasedInputRow{timestamp=1970-01-01T00:00:00.000Z, event={string=A, float=aaa, long=20, double=21.0}, dimensions=[string, float, long, double]}], exceptions: [could not convert value [aaa] to float,]", result.getParseException().getMessage() ); Assert.assertEquals(ParseException.class, result.getParseException().getClass()); Assert.assertEquals( "Found unparseable columns in row: [MapBasedInputRow{timestamp=1970-01-01T00:00:00.000Z, event={string=A, float=19.0, long=20, double=}, dimensions=[string, float, long, double]}], exceptions: [could not convert value [] to double,]", result.getParseException().getMessage() );
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalLongs(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = longRows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } }
if (addResult.getRowCount() == -1) { metrics.incrementThrownAway(); log.debug("Discarded row[%s], considering thrownAway due to %s.", inputRow, addResult.getReasonOfNotAdded()); return; if (addResult.getRowCount() == -2) { metrics.incrementDedup(); log.debug("Discarded row[%s], considering duplication.", inputRow);
sinkRowsInMemoryAfterAdd = addResult.getRowCount(); bytesInMemoryAfterAdd = addResult.getBytesInMemory(); return new AppenderatorAddResult(identifier, sink.getNumRows(), isPersistRequired, addResult.getParseException());
public class Sink implements Iterable<FireHydrant> private static final IncrementalIndexAddResult ALREADY_SWAPPED = new IncrementalIndexAddResult(-1, -1, null, "write after index swapped");
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { long messageTimestamp = row.getTimestampFromEpoch(); final Sink sink = getSink(messageTimestamp); metrics.reportMessageMaxTimestamp(messageTimestamp); if (sink == null) { return Plumber.THROWAWAY; } final IncrementalIndexAddResult addResult = sink.add(row, false); if (config.isReportParseExceptions() && addResult.getParseException() != null) { throw addResult.getParseException(); } if (!sink.canAppendRow() || System.currentTimeMillis() > nextFlush) { persist(committerSupplier.get()); } return addResult; }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalFloats(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = floatRows.get(i); int rv = incFloatIndex.add(row).getRowCount(); blackhole.consume(rv); } }
if (inputRow != null) { IncrementalIndexAddResult addResult = plumber.add(inputRow, committerSupplier); int numRows = addResult.getRowCount(); if (numRows == -2) { metrics.incrementDedup(); } else if (numRows < 0) { metrics.incrementThrownAway(); log.debug("Throwing away event[%s] due to %s", inputRow, addResult.getReasonOfNotAdded()); } else { metrics.incrementProcessed();
public IncrementalIndexAddResult add(InputRow row, boolean skipMaxRowsInMemoryCheck) throws IndexSizeExceededException { IncrementalIndexRowResult incrementalIndexRowResult = toIncrementalIndexRow(row); final AddToFactsResult addToFactsResult = addToFacts( metrics, deserializeComplexMetrics, reportParseExceptions, row, numEntries, bytesInMemory, incrementalIndexRowResult.getIncrementalIndexRow(), in, rowSupplier, skipMaxRowsInMemoryCheck ); updateMaxIngestedTime(row.getTimestamp()); ParseException parseException = getCombinedParseException( row, incrementalIndexRowResult.getParseExceptionMessages(), addToFactsResult.getParseExceptionMessages() ); return new IncrementalIndexAddResult(addToFactsResult.getRowCount(), addToFactsResult.getBytesInMemory(), parseException); }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalStrings(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = stringRows.get(i); int rv = incStrIndex.add(row).getRowCount(); blackhole.consume(rv); } } }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { final SegmentIdentifier identifier = getSegmentIdentifier(row.getTimestampFromEpoch()); if (identifier == null) { return Plumber.THROWAWAY; } try { final Appenderator.AppenderatorAddResult addResult = appenderator.add(identifier, row, committerSupplier); lastCommitterSupplier = committerSupplier; return new IncrementalIndexAddResult(addResult.getNumRowsInSegment(), 0, addResult.getParseException()); } catch (SegmentNotWritableException e) { // Segment already started handoff return Plumber.NOT_WRITABLE; } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void ingest(Blackhole blackhole) throws Exception { incIndexFilteredAgg = makeIncIndex(filteredMetrics); for (InputRow row : inputRows) { int rv = incIndexFilteredAgg.add(row).getRowCount(); blackhole.consume(rv); } }
public class Sink implements Iterable<FireHydrant> private static final IncrementalIndexAddResult ALREADY_SWAPPED = new IncrementalIndexAddResult(-1, -1, null, "write after index swapped");
int numRows = index.add(inputRow).getRowCount();