public Row staticRow() { Row row = rowIterator.staticRow(); lastClustering = null; return row; }
public Row staticRow() { Row row = rowIterator.staticRow(); lastClustering = null; return row; }
public Row staticRow() { Row row = rowIterator.staticRow(); lastClustering = null; return row; }
/** * Returns whether the provided iterator has no data. */ public default boolean isEmpty() { return staticRow().isEmpty() && !hasNext(); } }
public static Pair<LegacyRangeTombstoneList, Iterator<LegacyCell>> fromRowIterator(final RowIterator iterator) { return fromRowIterator(iterator.metadata(), iterator, iterator.staticRow()); }
/** * Returns whether the provided iterator has no data. */ public default boolean isEmpty() { return staticRow().isEmpty() && !hasNext(); } }
public static Pair<LegacyRangeTombstoneList, Iterator<LegacyCell>> fromRowIterator(final RowIterator iterator) { return fromRowIterator(iterator.metadata(), iterator, iterator.staticRow()); }
/** * Returns whether the provided iterator has no data. */ public default boolean isEmpty() { return staticRow().isEmpty() && !hasNext(); } }
/** * Returns whether the provided iterator has no data. */ public default boolean isEmpty() { return staticRow().isEmpty() && !hasNext(); } }
public static Pair<LegacyRangeTombstoneList, Iterator<LegacyCell>> fromRowIterator(final RowIterator iterator) { return fromRowIterator(iterator.metadata(), iterator, iterator.staticRow()); }
public static Pair<LegacyRangeTombstoneList, Iterator<LegacyCell>> fromRowIterator(final RowIterator iterator) { return fromRowIterator(iterator.metadata(), iterator, iterator.staticRow()); }
public static void digest(RowIterator iterator, MessageDigest digest) { // TODO: we're not computing digest the same way that old nodes. This is // currently ok as this is only used for schema digest and the is no exchange // of schema digest between different versions. If this changes however, // we'll need to agree on a version. digest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(digest); iterator.columns().statics.digest(digest); FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder()); iterator.staticRow().digest(digest); while (iterator.hasNext()) iterator.next().digest(digest); }
public static void digest(RowIterator iterator, MessageDigest digest, MessageDigest altDigest, Set<ByteBuffer> columnsToExclude) { // TODO: we're not computing digest the same way that old nodes. This is // currently ok as this is only used for schema digest and the is no exchange // of schema digest between different versions. If this changes however, // we'll need to agree on a version. digest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(digest); iterator.columns().statics.digest(digest); FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder()); iterator.staticRow().digest(digest); if (altDigest != null) { // Compute the "alternative digest" here. altDigest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(altDigest, columnsToExclude); iterator.columns().statics.digest(altDigest, columnsToExclude); FBUtilities.updateWithBoolean(altDigest, iterator.isReverseOrder()); iterator.staticRow().digest(altDigest, columnsToExclude); } while (iterator.hasNext()) { Row row = iterator.next(); row.digest(digest); if (altDigest != null) row.digest(altDigest, columnsToExclude); } }
public static void digest(RowIterator iterator, MessageDigest digest, MessageDigest altDigest, Set<ByteBuffer> columnsToExclude) { // TODO: we're not computing digest the same way that old nodes. This is // currently ok as this is only used for schema digest and the is no exchange // of schema digest between different versions. If this changes however, // we'll need to agree on a version. digest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(digest); iterator.columns().statics.digest(digest); FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder()); iterator.staticRow().digest(digest); if (altDigest != null) { // Compute the "alternative digest" here. altDigest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(altDigest, columnsToExclude); iterator.columns().statics.digest(altDigest, columnsToExclude); FBUtilities.updateWithBoolean(altDigest, iterator.isReverseOrder()); iterator.staticRow().digest(altDigest, columnsToExclude); } while (iterator.hasNext()) { Row row = iterator.next(); row.digest(digest); if (altDigest != null) row.digest(altDigest, columnsToExclude); } }
public static void digest(RowIterator iterator, MessageDigest digest, MessageDigest altDigest, Set<ByteBuffer> columnsToExclude) { // TODO: we're not computing digest the same way that old nodes. This is // currently ok as this is only used for schema digest and the is no exchange // of schema digest between different versions. If this changes however, // we'll need to agree on a version. digest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(digest); iterator.columns().statics.digest(digest); FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder()); iterator.staticRow().digest(digest); if (altDigest != null) { // Compute the "alternative digest" here. altDigest.update(iterator.partitionKey().getKey().duplicate()); iterator.columns().regulars.digest(altDigest, columnsToExclude); iterator.columns().statics.digest(altDigest, columnsToExclude); FBUtilities.updateWithBoolean(altDigest, iterator.isReverseOrder()); iterator.staticRow().digest(altDigest, columnsToExclude); } while (iterator.hasNext()) { Row row = iterator.next(); row.digest(digest); if (altDigest != null) row.digest(altDigest, columnsToExclude); } }
protected static Holder build(RowIterator rows, DeletionInfo deletion, boolean buildEncodingStats, int initialRowCapacity) { CFMetaData metadata = rows.metadata(); PartitionColumns columns = rows.columns(); boolean reversed = rows.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(false); while (rows.hasNext()) builder.add(rows.next()); if (reversed) builder.reverse(); Row staticRow = rows.staticRow(); Object[] tree = builder.build(); EncodingStats stats = buildEncodingStats ? EncodingStats.Collector.collect(staticRow, BTree.iterator(tree), deletion) : EncodingStats.NO_STATS; return new Holder(columns, tree, deletion, staticRow, stats); }
protected static Holder build(RowIterator rows, DeletionInfo deletion, boolean buildEncodingStats, int initialRowCapacity) { CFMetaData metadata = rows.metadata(); PartitionColumns columns = rows.columns(); boolean reversed = rows.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(false); while (rows.hasNext()) builder.add(rows.next()); if (reversed) builder.reverse(); Row staticRow = rows.staticRow(); Object[] tree = builder.build(); EncodingStats stats = buildEncodingStats ? EncodingStats.Collector.collect(staticRow, BTree.iterator(tree), deletion) : EncodingStats.NO_STATS; return new Holder(columns, tree, deletion, staticRow, stats); }
protected static Holder build(RowIterator rows, DeletionInfo deletion, boolean buildEncodingStats, int initialRowCapacity) { CFMetaData metadata = rows.metadata(); PartitionColumns columns = rows.columns(); boolean reversed = rows.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(false); while (rows.hasNext()) builder.add(rows.next()); if (reversed) builder.reverse(); Row staticRow = rows.staticRow(); Object[] tree = builder.build(); EncodingStats stats = buildEncodingStats ? EncodingStats.Collector.collect(staticRow, BTree.iterator(tree), deletion) : EncodingStats.NO_STATS; return new Holder(columns, tree, deletion, staticRow, stats); }
protected static Holder build(RowIterator rows, DeletionInfo deletion, boolean buildEncodingStats, int initialRowCapacity) { CFMetaData metadata = rows.metadata(); PartitionColumns columns = rows.columns(); boolean reversed = rows.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(false); while (rows.hasNext()) builder.add(rows.next()); if (reversed) builder.reverse(); Row staticRow = rows.staticRow(); Object[] tree = builder.build(); EncodingStats stats = buildEncodingStats ? EncodingStats.Collector.collect(staticRow, BTree.iterator(tree), deletion) : EncodingStats.NO_STATS; return new Holder(columns, tree, deletion, staticRow, stats); }
private void updateWithCurrentValuesFromCFS(List<PartitionUpdate.CounterMark> marks, ColumnFamilyStore cfs) { ColumnFilter.Builder builder = ColumnFilter.selectionBuilder(); BTreeSet.Builder<Clustering> names = BTreeSet.builder(cfs.metadata.comparator); for (PartitionUpdate.CounterMark mark : marks) { if (mark.clustering() != Clustering.STATIC_CLUSTERING) names.add(mark.clustering()); if (mark.path() == null) builder.add(mark.column()); else builder.select(mark.column(), mark.path()); } int nowInSec = FBUtilities.nowInSeconds(); ClusteringIndexNamesFilter filter = new ClusteringIndexNamesFilter(names.build(), false); SinglePartitionReadCommand cmd = SinglePartitionReadCommand.create(cfs.metadata, nowInSec, key(), builder.build(), filter); PeekingIterator<PartitionUpdate.CounterMark> markIter = Iterators.peekingIterator(marks.iterator()); try (ReadExecutionController controller = cmd.executionController(); RowIterator partition = UnfilteredRowIterators.filter(cmd.queryMemtableAndDisk(cfs, controller), nowInSec)) { updateForRow(markIter, partition.staticRow(), cfs); while (partition.hasNext()) { if (!markIter.hasNext()) return; updateForRow(markIter, partition.next(), cfs); } } }