@Override public List<String> getDimensionNames() { return index.getDimensionNames(); }
@Override public List<String> getColumnNames() { List<String> columnNames = new ArrayList<>(getDimensionNames()); columnNames.addAll(getMetricNames()); return columnNames; }
@Override public Indexed<String> getAvailableDimensions() { return new ListIndexed<>(index.getDimensionNames()); }
private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context) throws IOException, InterruptedException { final List<String> dimensions = index.getDimensionNames(); Iterator<Row> rows = index.iterator(); while (rows.hasNext()) { context.progress(); Row row = rows.next(); InputRow inputRow = getInputRowFromRow(row, dimensions); // reportParseExceptions is true as any unparseable data is already handled by the mapper. InputRowSerde.SerializeResult serializeResult = InputRowSerde.toBytes(typeHelperMap, inputRow, combiningAggs); context.write( key, new BytesWritable(serializeResult.getSerializedRow()) ); } index.close(); }
@Test public void testCaseSensitivity() throws Exception { long timestamp = System.currentTimeMillis(); IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(defaultAggregatorFactories)); populateIndex(timestamp, index); Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames()); Assert.assertEquals(2, index.size()); final Iterator<Row> rows = index.iterator(); Row row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); Assert.assertEquals(Collections.singletonList("1"), row.getDimension("dim1")); Assert.assertEquals(Collections.singletonList("2"), row.getDimension("dim2")); row = rows.next(); Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); Assert.assertEquals(Collections.singletonList("3"), row.getDimension("dim1")); Assert.assertEquals(Collections.singletonList("4"), row.getDimension("dim2")); }
Assert.assertEquals(dimensionCount, index.getDimensionNames().size()); Assert.assertEquals(elementsPerThread * (isRollup ? 1 : threadCount), index.size()); Iterator<Row> iterator = index.iterator();
@Test public void testgetDimensions() { final IncrementalIndex<Aggregator> incrementalIndex = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() .withMetrics(new CountAggregatorFactory("count")) .withDimensionsSpec( new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("dim0", "dim1")), null, null ) ) .build() ) .setMaxRowCount(1000000) .buildOnheap(); closerRule.closeLater(incrementalIndex); Assert.assertEquals(Arrays.asList("dim0", "dim1"), incrementalIndex.getDimensionNames()); }
Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames()); Assert.assertEquals( Arrays.asList(
@Override public List<String> getDimensionNames() { return index.getDimensionNames(); }
@Override public List<String> getColumnNames() { List<String> columnNames = new ArrayList<>(getDimensionNames()); columnNames.addAll(getMetricNames()); return columnNames; }
@Override public Indexed<String> getAvailableDimensions() { return new ListIndexed<>(index.getDimensionNames()); }
private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context) throws IOException, InterruptedException { final List<String> dimensions = index.getDimensionNames(); Iterator<Row> rows = index.iterator(); while (rows.hasNext()) { context.progress(); Row row = rows.next(); InputRow inputRow = getInputRowFromRow(row, dimensions); // reportParseExceptions is true as any unparseable data is already handled by the mapper. InputRowSerde.SerializeResult serializeResult = InputRowSerde.toBytes(typeHelperMap, inputRow, combiningAggs); context.write( key, new BytesWritable(serializeResult.getSerializedRow()) ); } index.close(); }