/** * Adds a new row. The row might correspond with another row that already exists, in which case this will * update that row instead of inserting a new one. * <p> * <p> * Calls to add() are thread safe. * <p> * * @param row the row of data to add * * @return the number of rows in the data set after adding the InputRow */ public IncrementalIndexAddResult add(InputRow row) throws IndexSizeExceededException { return add(row, false); }
@Setup(Level.Iteration) public void setup2() throws IOException { incIndex = makeIncIndex(); for (int i = 0; i < rowsPerSegment; i++) { InputRow row = rows.get(i); incIndex.add(row); } }
@Override public void run() { currentlyRunning.incrementAndGet(); try { for (int i = 0; i < elementsPerThread; i++) { incrementalIndex.add(getLongRow(timestamp + i, 1, dimensionCount)); } } catch (IndexSizeExceededException e) { throw Throwables.propagate(e); } currentlyRunning.decrementAndGet(); someoneRan.set(true); } }
@Override public boolean processLine(String line) throws IOException { if (!runOnce) { startTime.set(System.currentTimeMillis()); runOnce = true; } retVal.add(parser.parse(line)); ++lineCount; return true; }
private void addDimValuesToIndex(IncrementalIndex index, String dimName, List<String> values) throws Exception { for (String val : values) { index.add(new MapBasedInputRow(1, Collections.singletonList(dimName), ImmutableMap.of(dimName, val))); } }
@Override public void run() { try { for (int i = 0; i < elementsPerThread; i++) { index.add(getRow(timestamp + i, i, dimensionCount)); } } catch (Exception e) { e.printStackTrace(); } latch.countDown(); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void addRows(Blackhole blackhole) throws Exception { for (int i = 0; i < rowsPerSegment; i++) { InputRow row = rows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } } }
@Override public Predicate<String> makeStringPredicate() { try { index.add( new MapBasedInputRow( timestamp, Collections.singletonList("billy"), ImmutableMap.of("billy", "v31234") ) ); } catch (IndexSizeExceededException isee) { throw new RuntimeException(isee); } return Predicates.alwaysTrue(); }
public static void populateIndex(long timestamp, IncrementalIndex index) throws IndexSizeExceededException { index.add( new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2") ) ); index.add( new MapBasedInputRow( timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "3", "dim2", "4") ) ); }
@Override public void run() { final Random random = ThreadLocalRandom.current(); try { for (int j = 0; j < MAX_ROWS / addThreadCount; ++j) { index.add(new MapBasedInputRow( 0, Collections.singletonList("billy"), ImmutableMap.of("billy", random.nextLong(), "max", 1) )); } } catch (Exception e) { throw new RuntimeException(e); } } });
public IncrementalIndexAddResult add(InputRow row, boolean skipMaxRowsInMemoryCheck) throws IndexSizeExceededException { if (currHydrant == null) { throw new IAE("No currHydrant but given row[%s]", row); } synchronized (hydrantLock) { if (!writable) { return Plumber.NOT_WRITABLE; } IncrementalIndex index = currHydrant.getIndex(); if (index == null) { return ALREADY_SWAPPED; // the hydrant was swapped without being replaced } if (checkInDedupSet(row)) { return Plumber.DUPLICATE; } return index.add(row, skipMaxRowsInMemoryCheck); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalLongs(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = longRows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalFloats(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = floatRows.get(i); int rv = incFloatIndex.add(row).getRowCount(); blackhole.consume(rv); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @OperationsPerInvocation(maxRows) public void normalStrings(Blackhole blackhole) throws Exception { for (int i = 0; i < maxRows; i++) { InputRow row = stringRows.get(i); int rv = incStrIndex.add(row).getRowCount(); blackhole.consume(rv); } } }
@Test public void sameRow() throws IndexSizeExceededException { MapBasedInputRow row = new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ); IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); index.add(row); index.add(row); index.add(row); Assert.assertEquals(1, index.size()); } }
@Test(expected = ISE.class) public void testDuplicateDimensions() throws IndexSizeExceededException { IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); }
@Test(expected = ISE.class) public void testDuplicateDimensionsFirstOccurrence() throws IndexSizeExceededException { IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); }
@Override public IncrementalIndex accumulate(IncrementalIndex accumulated, T in) { if (in instanceof MapBasedRow) { try { MapBasedRow row = (MapBasedRow) in; accumulated.add( new MapBasedInputRow( row.getTimestamp(), dimensions, row.getEvent() ) ); } catch (IndexSizeExceededException e) { throw new ResourceLimitExceededException(e.getMessage()); } } else { throw new ISE("Unable to accumulate something of type [%s]", in.getClass()); } return accumulated; } };
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void ingest(Blackhole blackhole) throws Exception { incIndexFilteredAgg = makeIncIndex(filteredMetrics); for (InputRow row : inputRows) { int rv = incIndexFilteredAgg.add(row).getRowCount(); blackhole.consume(rv); } }
private QueryableIndex persistAndLoad(List<DimensionSchema> schema, InputRow... rows) throws IOException { IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null, new DimensionsSpec(schema, null, null)); for (InputRow row : rows) { toPersist.add(row); } final File tempDir = temporaryFolder.newFolder(); return closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null))); }