/** * This is not a stable API, prefer using {@link Schema#newPartialRow()} * to create a new partial row. * @param schema the schema to use for this row */ public PartialRow(Schema schema) { this.schema = schema; this.columnsBitSet = new BitSet(this.schema.getColumnCount()); this.nullsBitSet = schema.hasNullableColumns() ? new BitSet(this.schema.getColumnCount()) : null; this.rowAlloc = new byte[schema.getRowSize()]; // Pre-fill the array with nulls. We'll only replace cells that have varlen values. this.varLengthData = Arrays.asList(new ByteBuffer[this.schema.getColumnCount()]); }
/** * @throws IllegalArgumentException if the column is null */ private void checkNull(int columnIndex) { if (!schema.hasNullableColumns()) { return; } if (isNull(columnIndex)) { ColumnSchema columnSchema = schema.getColumnByIndex(columnIndex); throw new IllegalArgumentException("The requested column (name: " + columnSchema.getName() + ", index: " + columnIndex + ") is null"); } }
/** * Initializes the state of the encoder based on the schema and number of operations to encode. * * @param schema the schema of the table which the operations belong to. * @param numOperations the number of operations. */ private void init(Schema schema, int numOperations) { this.schema = schema; // Set up the encoded data. // Estimate a maximum size for the data. This is conservative, but avoids // having to loop through all the operations twice. final int columnBitSetSize = Bytes.getBitSetSize(schema.getColumnCount()); int sizePerRow = 1 /* for the op type */ + schema.getRowSize() + columnBitSetSize; if (schema.hasNullableColumns()) { // nullsBitSet is the same size as the columnBitSet sizePerRow += columnBitSetSize; } // TODO: would be more efficient to use a buffer which "chains" smaller allocations // instead of a doubling buffer like BAOS. this.rows = ByteBuffer.allocate(sizePerRow * numOperations) .order(ByteOrder.LITTLE_ENDIAN); this.indirect = new ArrayList<>(schema.getVarLengthColumnCount() * numOperations); }
private void encodeRow(PartialRow row, ChangeType type) { rows.put(type.toEncodedByte()); rows.put(Bytes.fromBitSet(row.getColumnsBitSet(), schema.getColumnCount())); if (schema.hasNullableColumns()) { rows.put(Bytes.fromBitSet(row.getNullsBitSet(), schema.getColumnCount())); } int colIdx = 0; byte[] rowData = row.getRowAlloc(); int currentRowOffset = 0; for (ColumnSchema col : row.getSchema().getColumns()) { // Keys should always be specified, maybe check? if (row.isSet(colIdx) && !row.isSetToNull(colIdx)) { if (col.getType() == Type.STRING || col.getType() == Type.BINARY) { ByteBuffer varLengthData = row.getVarLengthData().get(colIdx); varLengthData.reset(); rows.putLong(indirectWrittenBytes); int bbSize = varLengthData.remaining(); rows.putLong(bbSize); indirect.add(varLengthData); indirectWrittenBytes += bbSize; } else { // This is for cols other than strings rows.put(rowData, currentRowOffset, col.getTypeSize()); } } currentRowOffset += col.getTypeSize(); colIdx++; } }
this.indirectData = indirectData; int columnOffsetsSize = schema.getColumnCount(); if (schema.hasNullableColumns()) { columnOffsetsSize++;
void advancePointerTo(int rowIndex) { this.index = rowIndex; this.offset = this.rowSize * this.index; if (schema.hasNullableColumns() && this.index != INDEX_RESET_LOCATION) { this.nullsBitSet = Bytes.toBitSet( this.rowData.getRawArray(), this.rowData.getRawOffset() + getCurrentRowDataOffsetForColumn(schema.getColumnCount()), schema.getColumnCount()); } }