@Override public int getNumRows() { return index.getNumRows(); }
public QueryableIndexIndexableAdapter(QueryableIndex input) { this.input = input; numRows = input.getNumRows(); availableDimensions = ImmutableList.copyOf(input.getAvailableDimensions()); this.metadata = input.getMetadata(); }
.mapToLong(queryableIndexAndDataSegment -> queryableIndexAndDataSegment.lhs.getNumRows()) .sum(); final long totalSizeBytes = queryableIndexAndSegments
@Override public WindowedStorageAdapter apply(WindowedDataSegment segment) { try { logger.info("Getting storage path for segment [%s]", segment.getSegment().getId()); Path path = new Path(JobHelper.getURIFromSegment(segment.getSegment())); logger.info("Fetch segment files from [%s]", path); File dir = Files.createTempDir(); tmpSegmentDirs.add(dir); logger.info("Locally storing fetched segment at [%s]", dir); JobHelper.unzipNoGuava(path, context.getConfiguration(), dir, context, null); logger.info("finished fetching segment files"); QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir); indexes.add(index); numRows += index.getNumRows(); return new WindowedStorageAdapter( new QueryableIndexStorageAdapter(index), segment.getInterval() ); } catch (IOException ex) { throw Throwables.propagate(ex); } } }
ReferenceCountingSegment segment = hydrant.getIncrementedSegment(); try { numRowsExcludingCurrIndex.addAndGet(segment.asQueryableIndex().getNumRows());
); final int totalRows = index.getNumRows();
), ImmutableMultiset.copyOf( IntStream.range(0, index.getNumRows()) .mapToObj(rowNumber -> getRow(dictionaryColumn, rowNumber)) for (int i = 0; i < index.getNumRows(); i++) { final List<String> row = getRow(dictionaryColumn, i); if (row.isEmpty() || row.stream().anyMatch(NullHandling::isNullOrEquivalent)) {
@Override public int getNumRows() { return index.getNumRows(); }
public QueryableIndexIndexableAdapter(QueryableIndex input) { this.input = input; numRows = input.getNumRows(); availableDimensions = ImmutableList.copyOf(input.getAvailableDimensions()); this.metadata = input.getMetadata(); }
.mapToLong(queryableIndexAndDataSegment -> queryableIndexAndDataSegment.lhs.getNumRows()) .sum(); final long totalSizeBytes = queryableIndexAndSegments
@Override public WindowedStorageAdapter apply(WindowedDataSegment segment) { try { logger.info("Getting storage path for segment [%s]", segment.getSegment().getIdentifier()); Path path = new Path(JobHelper.getURIFromSegment(segment.getSegment())); logger.info("Fetch segment files from [%s]", path); File dir = Files.createTempDir(); tmpSegmentDirs.add(dir); logger.info("Locally storing fetched segment at [%s]", dir); JobHelper.unzipNoGuava(path, context.getConfiguration(), dir, context, null); logger.info("finished fetching segment files"); QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir); indexes.add(index); numRows += index.getNumRows(); return new WindowedStorageAdapter( new QueryableIndexStorageAdapter(index), segment.getInterval() ); } catch (IOException ex) { throw Throwables.propagate(ex); } } }
ReferenceCountingSegment segment = hydrant.getIncrementedSegment(); try { numRowsExcludingCurrIndex.addAndGet(segment.asQueryableIndex().getNumRows());
); final int totalRows = index.getNumRows();