public void readBinarys(int total, ColumnVector c, int rowId, int level, VectorizedValuesReader data) { int left = total; while (left > 0) { if (this.currentCount == 0) this.readNextGroup(); int n = Math.min(left, this.currentCount); switch (mode) { case RLE: if (currentValue == level) { data.readBinary(n, c, rowId); } else { c.putNulls(rowId, n); } break; case PACKED: for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { data.readBinary(1, c, rowId + i); } else { c.putNull(rowId + i); } } break; } rowId += n; left -= n; currentCount -= n; } }
case RLE: if (currentValue == level) { data.readBinary(n, c, rowId); } else { c.putNulls(rowId, n); for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { data.readBinary(1, c, rowId + i); } else { c.putNull(rowId + i);
case RLE: if (currentValue == level) { data.readBinary(n, c, rowId); } else { c.putNulls(rowId, n); for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { data.readBinary(1, c, rowId + i); } else { c.putNull(rowId + i);
private void readBinaryBatch(int rowId, int num, ColumnVector column) throws IOException { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; if (column.isArray()) { defColumn.readBinarys(num, column, rowId, maxDefLevel, data); } else if (column.dataType() == DataTypes.TimestampType) { for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, // Read 12 bytes for INT96 ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12))); } else { column.putNull(rowId + i); } } } else { throw new UnsupportedOperationException("Unimplemented type: " + column.dataType()); } }
if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putByteArray(rowId + i, data.readBinary(arrayLen).getBytes()); } else { column.putNull(rowId + i);
if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putByteArray(rowId + i, data.readBinary(arrayLen).getBytes()); } else { column.putNull(rowId + i);
if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putByteArray(rowId + i, data.readBinary(arrayLen).getBytes()); } else { column.putNull(rowId + i);
if (defColumn.readInteger() == maxDefLevel) { long rawTime = ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12)); column.putLong(rowId + i, rawTime); } else { if (defColumn.readInteger() == maxDefLevel) { long rawTime = ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12)); long adjTime = DateTimeUtils.convertTz(rawTime, convertTz, UTC); column.putLong(rowId + i, adjTime);
if (defColumn.readInteger() == maxDefLevel) { long rawTime = ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12)); column.putLong(rowId + i, rawTime); } else { if (defColumn.readInteger() == maxDefLevel) { long rawTime = ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12)); long adjTime = DateTimeUtils.convertTz(rawTime, convertTz, UTC); column.putLong(rowId + i, adjTime);
public void readBinarys(int total, ColumnVector c, int rowId, int level, VectorizedValuesReader data) { int left = total; while (left > 0) { if (this.currentCount == 0) this.readNextGroup(); int n = Math.min(left, this.currentCount); switch (mode) { case RLE: if (currentValue == level) { data.readBinary(n, c, rowId); } else { c.putNulls(rowId, n); } break; case PACKED: for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { data.readBinary(1, c, rowId + i); } else { c.putNull(rowId + i); } } break; } rowId += n; left -= n; currentCount -= n; } }
private void readBinaryBatch(int rowId, int num, ColumnVector column) throws IOException { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; if (column.isArray()) { defColumn.readBinarys(num, column, rowId, maxDefLevel, data); } else if (column.dataType() == DataTypes.TimestampType) { for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, // Read 12 bytes for INT96 ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12))); } else { column.putNull(rowId + i); } } } else { throw new UnsupportedOperationException("Unimplemented type: " + column.dataType()); } }
if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else { column.putNull(rowId + i); for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putByteArray(rowId + i, data.readBinary(arrayLen).getBytes()); } else { column.putNull(rowId + i);