public Columns deserializeSubset(Columns superset, DataInputPlus in) throws IOException { long encoded = in.readUnsignedVInt(); if (encoded == 0L) { return superset; } else if (superset.size() >= 64) { return deserializeLargeSubset(in, superset, (int) encoded); } else { BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); int firstComplexIdx = 0; for (ColumnDefinition column : superset) { if ((encoded & 1) == 0) { builder.add(column); if (column.isSimple()) ++firstComplexIdx; } encoded >>>= 1; } return new Columns(builder.build(), firstComplexIdx); } }
/** * Returns a new {@code Columns} object holing the same columns than the provided set. * * @param s the set from which to create the new {@code Columns}. * @return the newly created {@code Columns} containing the columns from {@code s}. */ public static Columns from(Collection<ColumnDefinition> s) { Object[] tree = BTree.<ColumnDefinition>builder(Comparator.naturalOrder()).addAll(s).build(); return new Columns(tree, findFirstComplexIdx(tree)); }
public Columns deserialize(DataInputPlus in, CFMetaData metadata) throws IOException { int length = (int)in.readUnsignedVInt(); BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); builder.auto(false); for (int i = 0; i < length; i++) { ByteBuffer name = ByteBufferUtil.readWithVIntLength(in); ColumnDefinition column = metadata.getColumnDefinition(name); if (column == null) { // If we don't find the definition, it could be we have data for a dropped column, and we shouldn't // fail deserialization because of that. So we grab a "fake" ColumnDefinition that ensure proper // deserialization. The column will be ignore later on anyway. column = metadata.getDroppedColumnDefinition(name); if (column == null) throw new RuntimeException("Unknown column " + UTF8Type.instance.getString(name) + " during deserialization"); } builder.add(column); } return new Columns(builder.build()); }
rowBuilder.add(row);
private static PartitionUpdate deserialize30(DataInputPlus in, int version, SerializationHelper.Flag flag) throws IOException { CFMetaData metadata = CFMetaData.serializer.deserialize(in, version); UnfilteredRowIteratorSerializer.Header header = UnfilteredRowIteratorSerializer.serializer.deserializeHeader(metadata, null, in, version, flag); if (header.isEmpty) return emptyUpdate(metadata, header.key); assert !header.isReversed; assert header.rowEstimate >= 0; MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(header.partitionDeletion, metadata.comparator, false); BTree.Builder<Row> rows = BTree.builder(metadata.comparator, header.rowEstimate); rows.auto(false); try (UnfilteredRowIterator partition = UnfilteredRowIteratorSerializer.serializer.deserialize(in, version, metadata, flag, header)) { while (partition.hasNext()) { Unfiltered unfiltered = partition.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) rows.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } } MutableDeletionInfo deletionInfo = deletionBuilder.build(); return new PartitionUpdate(metadata, header.key, new Holder(header.sHeader.columns(), rows.build(), deletionInfo, header.staticRow, header.sHeader.stats()), deletionInfo, false); }
private static PartitionUpdate deserialize30(DataInputPlus in, int version, SerializationHelper.Flag flag) throws IOException { CFMetaData metadata = CFMetaData.serializer.deserialize(in, version); UnfilteredRowIteratorSerializer.Header header = UnfilteredRowIteratorSerializer.serializer.deserializeHeader(metadata, null, in, version, flag); if (header.isEmpty) return emptyUpdate(metadata, header.key); assert !header.isReversed; assert header.rowEstimate >= 0; MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(header.partitionDeletion, metadata.comparator, false); BTree.Builder<Row> rows = BTree.builder(metadata.comparator, header.rowEstimate); rows.auto(false); try (UnfilteredRowIterator partition = UnfilteredRowIteratorSerializer.serializer.deserialize(in, version, metadata, flag, header)) { while (partition.hasNext()) { Unfiltered unfiltered = partition.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) rows.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } } MutableDeletionInfo deletionInfo = deletionBuilder.build(); return new PartitionUpdate(metadata, header.key, new Holder(header.sHeader.columns(), rows.build(), deletionInfo, header.staticRow, header.sHeader.stats()), deletionInfo, false); }
public Columns deserializeSubset(Columns superset, DataInputPlus in) throws IOException { long encoded = in.readUnsignedVInt(); if (encoded == 0L) { return superset; } else if (superset.size() >= 64) { return deserializeLargeSubset(in, superset, (int) encoded); } else { BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); int firstComplexIdx = 0; for (ColumnDefinition column : superset) { if ((encoded & 1) == 0) { builder.add(column); if (column.isSimple()) ++firstComplexIdx; } encoded >>>= 1; } return new Columns(builder.build(), firstComplexIdx); } }
private static PartitionUpdate deserialize30(DataInputPlus in, int version, SerializationHelper.Flag flag) throws IOException { CFMetaData metadata = CFMetaData.serializer.deserialize(in, version); UnfilteredRowIteratorSerializer.Header header = UnfilteredRowIteratorSerializer.serializer.deserializeHeader(metadata, null, in, version, flag); if (header.isEmpty) return emptyUpdate(metadata, header.key); assert !header.isReversed; assert header.rowEstimate >= 0; MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(header.partitionDeletion, metadata.comparator, false); BTree.Builder<Row> rows = BTree.builder(metadata.comparator, header.rowEstimate); rows.auto(false); try (UnfilteredRowIterator partition = UnfilteredRowIteratorSerializer.serializer.deserialize(in, version, metadata, flag, header)) { while (partition.hasNext()) { Unfiltered unfiltered = partition.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) rows.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } } MutableDeletionInfo deletionInfo = deletionBuilder.build(); return new PartitionUpdate(metadata, header.key, new Holder(header.sHeader.columns(), rows.build(), deletionInfo, header.staticRow, header.sHeader.stats()), deletionInfo, false); }
private static PartitionUpdate deserialize30(DataInputPlus in, int version, SerializationHelper.Flag flag) throws IOException { CFMetaData metadata = CFMetaData.serializer.deserialize(in, version); UnfilteredRowIteratorSerializer.Header header = UnfilteredRowIteratorSerializer.serializer.deserializeHeader(metadata, null, in, version, flag); if (header.isEmpty) return emptyUpdate(metadata, header.key); assert !header.isReversed; assert header.rowEstimate >= 0; MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(header.partitionDeletion, metadata.comparator, false); BTree.Builder<Row> rows = BTree.builder(metadata.comparator, header.rowEstimate); rows.auto(false); try (UnfilteredRowIterator partition = UnfilteredRowIteratorSerializer.serializer.deserialize(in, version, metadata, flag, header)) { while (partition.hasNext()) { Unfiltered unfiltered = partition.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) rows.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } } MutableDeletionInfo deletionInfo = deletionBuilder.build(); return new PartitionUpdate(metadata, header.key, new Holder(header.sHeader.columns(), rows.build(), deletionInfo, header.staticRow, header.sHeader.stats()), deletionInfo, false); }
public Columns deserializeSubset(Columns superset, DataInputPlus in) throws IOException { long encoded = in.readUnsignedVInt(); if (encoded == 0L) { return superset; } else if (superset.size() >= 64) { return deserializeLargeSubset(in, superset, (int) encoded); } else { BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); int firstComplexIdx = 0; for (ColumnDefinition column : superset) { if ((encoded & 1) == 0) { builder.add(column); if (column.isSimple()) ++firstComplexIdx; } encoded >>>= 1; } return new Columns(builder.build(), firstComplexIdx); } }
private Builder<V> mergeAll(Iterable<V> add, int addCount) { assert auto; // ensure the existing contents are in order autoEnforce(); int curCount = count; // we make room for curCount * 2 + addCount, so that we can copy the current values to the end // if necessary for continuing the merge, and have the new values directly after the current value range if (values.length < curCount * 2 + addCount) values = Arrays.copyOf(values, max(curCount * 2 + addCount, curCount * 3)); if (add instanceof BTreeSet) { // use btree set's fast toArray method, to append directly ((BTreeSet) add).toArray(values, curCount); } else { // consider calling toArray() and System.arraycopy int i = curCount; for (V v : add) values[i++] = v; } return mergeAll(addCount); }
public Columns deserializeSubset(Columns superset, DataInputPlus in) throws IOException { long encoded = in.readUnsignedVInt(); if (encoded == 0L) { return superset; } else if (superset.size() >= 64) { return deserializeLargeSubset(in, superset, (int) encoded); } else { BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); int firstComplexIdx = 0; for (ColumnDefinition column : superset) { if ((encoded & 1) == 0) { builder.add(column); if (column.isSimple()) ++firstComplexIdx; } encoded >>>= 1; } return new Columns(builder.build(), firstComplexIdx); } }
private Builder<V> mergeAll(Iterable<V> add, int addCount) { assert auto; // ensure the existing contents are in order autoEnforce(); int curCount = count; // we make room for curCount * 2 + addCount, so that we can copy the current values to the end // if necessary for continuing the merge, and have the new values directly after the current value range if (values.length < curCount * 2 + addCount) values = Arrays.copyOf(values, max(curCount * 2 + addCount, curCount * 3)); if (add instanceof BTreeSet) { // use btree set's fast toArray method, to append directly ((BTreeSet) add).toArray(values, curCount); } else { // consider calling toArray() and System.arraycopy int i = curCount; for (V v : add) values[i++] = v; } return mergeAll(addCount); }
private Builder<V> mergeAll(Iterable<V> add, int addCount) { assert auto; // ensure the existing contents are in order autoEnforce(); int curCount = count; // we make room for curCount * 2 + addCount, so that we can copy the current values to the end // if necessary for continuing the merge, and have the new values directly after the current value range if (values.length < curCount * 2 + addCount) values = Arrays.copyOf(values, max(curCount * 2 + addCount, curCount * 3)); if (add instanceof BTreeSet) { // use btree set's fast toArray method, to append directly ((BTreeSet) add).toArray(values, curCount); } else { // consider calling toArray() and System.arraycopy int i = curCount; for (V v : add) values[i++] = v; } return mergeAll(addCount); }
private Builder<V> mergeAll(Iterable<V> add, int addCount) { assert auto; // ensure the existing contents are in order autoEnforce(); int curCount = count; // we make room for curCount * 2 + addCount, so that we can copy the current values to the end // if necessary for continuing the merge, and have the new values directly after the current value range if (values.length < curCount * 2 + addCount) values = Arrays.copyOf(values, max(curCount * 2 + addCount, curCount * 3)); if (add instanceof BTreeSet) { // use btree set's fast toArray method, to append directly ((BTreeSet) add).toArray(values, curCount); } else { // consider calling toArray() and System.arraycopy int i = curCount; for (V v : add) values[i++] = v; } return mergeAll(addCount); }
public Columns deserialize(DataInputPlus in, CFMetaData metadata) throws IOException { int length = (int)in.readUnsignedVInt(); BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); builder.auto(false); for (int i = 0; i < length; i++) { ByteBuffer name = ByteBufferUtil.readWithVIntLength(in); ColumnDefinition column = metadata.getColumnDefinition(name); if (column == null) { // If we don't find the definition, it could be we have data for a dropped column, and we shouldn't // fail deserialization because of that. So we grab a "fake" ColumnDefinition that ensure proper // deserialization. The column will be ignore later on anyway. column = metadata.getDroppedColumnDefinition(name); if (column == null) throw new RuntimeException("Unknown column " + UTF8Type.instance.getString(name) + " during deserialization"); } builder.add(column); } return new Columns(builder.build()); }
protected static Holder build(UnfilteredRowIterator iterator, int initialRowCapacity, boolean ordered) { CFMetaData metadata = iterator.metadata(); PartitionColumns columns = iterator.columns(); boolean reversed = iterator.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(!ordered); MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(iterator.partitionLevelDeletion(), metadata.comparator, reversed); while (iterator.hasNext()) { Unfiltered unfiltered = iterator.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) builder.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } if (reversed) builder.reverse(); return new Holder(columns, builder.build(), deletionBuilder.build(), iterator.staticRow(), iterator.stats()); }
public Columns deserialize(DataInputPlus in, CFMetaData metadata) throws IOException { int length = (int)in.readUnsignedVInt(); BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); builder.auto(false); for (int i = 0; i < length; i++) { ByteBuffer name = ByteBufferUtil.readWithVIntLength(in); ColumnDefinition column = metadata.getColumnDefinition(name); if (column == null) { // If we don't find the definition, it could be we have data for a dropped column, and we shouldn't // fail deserialization because of that. So we grab a "fake" ColumnDefinition that ensure proper // deserialization. The column will be ignore later on anyway. column = metadata.getDroppedColumnDefinition(name); if (column == null) throw new RuntimeException("Unknown column " + UTF8Type.instance.getString(name) + " during deserialization"); } builder.add(column); } return new Columns(builder.build()); }
protected static Holder build(UnfilteredRowIterator iterator, int initialRowCapacity, boolean ordered) { CFMetaData metadata = iterator.metadata(); PartitionColumns columns = iterator.columns(); boolean reversed = iterator.isReverseOrder(); BTree.Builder<Row> builder = BTree.builder(metadata.comparator, initialRowCapacity); builder.auto(!ordered); MutableDeletionInfo.Builder deletionBuilder = MutableDeletionInfo.builder(iterator.partitionLevelDeletion(), metadata.comparator, reversed); while (iterator.hasNext()) { Unfiltered unfiltered = iterator.next(); if (unfiltered.kind() == Unfiltered.Kind.ROW) builder.add((Row)unfiltered); else deletionBuilder.add((RangeTombstoneMarker)unfiltered); } if (reversed) builder.reverse(); return new Holder(columns, builder.build(), deletionBuilder.build(), iterator.staticRow(), iterator.stats()); }
public Columns deserialize(DataInputPlus in, CFMetaData metadata) throws IOException { int length = (int)in.readUnsignedVInt(); BTree.Builder<ColumnDefinition> builder = BTree.builder(Comparator.naturalOrder()); builder.auto(false); for (int i = 0; i < length; i++) { ByteBuffer name = ByteBufferUtil.readWithVIntLength(in); ColumnDefinition column = metadata.getColumnDefinition(name); if (column == null) { // If we don't find the definition, it could be we have data for a dropped column, and we shouldn't // fail deserialization because of that. So we grab a "fake" ColumnDefinition that ensure proper // deserialization. The column will be ignore later on anyway. column = metadata.getDroppedColumnDefinition(name); if (column == null) throw new RuntimeException("Unknown column " + UTF8Type.instance.getString(name) + " during deserialization"); } builder.add(column); } return new Columns(builder.build()); }