@Override public void setLong(int ordinal, long value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putLong(rowId, value); }
public final int appendLong(long v) { reserve(elementsAppended + 1); putLong(elementsAppended, v); return elementsAppended++; }
public final void putDecimal(int rowId, Decimal value, int precision) { if (precision <= Decimal.MAX_INT_DIGITS()) { putInt(rowId, (int) value.toUnscaledLong()); } else if (precision <= Decimal.MAX_LONG_DIGITS()) { putLong(rowId, value.toUnscaledLong()); } else { BigInteger bigInteger = value.toJavaBigDecimal().unscaledValue(); putByteArray(rowId, bigInteger.toByteArray()); } }
public void readLongs(int total, ColumnVector c, int rowId, int level, VectorizedValuesReader data) { int left = total; while (left > 0) { if (this.currentCount == 0) this.readNextGroup(); int n = Math.min(left, this.currentCount); switch (mode) { case RLE: if (currentValue == level) { data.readLongs(n, c, rowId); } else { c.putNulls(rowId, n); } break; case PACKED: for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { c.putLong(rowId + i, data.readLong()); } else { c.putNull(rowId + i); } } break; } rowId += n; left -= n; currentCount -= n; } }
private void readBinaryBatch(int rowId, int num, ColumnVector column) throws IOException { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; if (column.isArray()) { defColumn.readBinarys(num, column, rowId, maxDefLevel, data); } else if (column.dataType() == DataTypes.TimestampType) { for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, // Read 12 bytes for INT96 ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12))); } else { column.putNull(rowId + i); } } } else { throw new UnsupportedOperationException("Unimplemented type: " + column.dataType()); } }
private void readLongBatch(int rowId, int num, ColumnVector column) throws IOException { // This is where we implement support for the valid type conversions. if (column.dataType() == DataTypes.LongType || DecimalType.is64BitDecimalType(column.dataType())) { defColumn.readLongs( num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn); } else if (column.dataType() == DataTypes.TimestampType) { for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, DateTimeUtils.fromMillis(dataColumn.readLong())); } else { column.putNull(rowId + i); } } } else { throw new UnsupportedOperationException("Unsupported conversion to: " + column.dataType()); } }
public void putLong(int rowId, long value) { vector.putLong(rowId, value); }
@Override public void setLong(int ordinal, long value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putLong(rowId, value); }
public final int appendLong(long v) { reserve(elementsAppended + 1); putLong(elementsAppended, v); return elementsAppended++; }
for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else {
for (int i = rowId; i < rowId + num; ++i) { if (!column.isNullAt(i)) { column.putLong(i, dictionary.decodeToLong(dictionaryIds.getDictId(i))); for (int i = rowId; i < rowId + num; ++i) { if (!column.isNullAt(i)) { column.putLong(i, DateTimeUtils.fromMillis(dictionary.decodeToLong(dictionaryIds.getDictId(i)))); column.putLong(i, ParquetRowConverter.binaryToSQLTimestamp(v)); if (!column.isNullAt(i)) { Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(i)); column.putLong(i, ParquetRowConverter.binaryToUnscaledLong(v));
public final void putDecimal(int rowId, Decimal value, int precision) { if (precision <= Decimal.MAX_INT_DIGITS()) { putInt(rowId, (int) value.toUnscaledLong()); } else if (precision <= Decimal.MAX_LONG_DIGITS()) { putLong(rowId, value.toUnscaledLong()); } else { BigInteger bigInteger = value.toJavaBigDecimal().unscaledValue(); putByteArray(rowId, bigInteger.toByteArray()); } }
public void readLongs(int total, ColumnVector c, int rowId, int level, VectorizedValuesReader data) { int left = total; while (left > 0) { if (this.currentCount == 0) this.readNextGroup(); int n = Math.min(left, this.currentCount); switch (mode) { case RLE: if (currentValue == level) { data.readLongs(n, c, rowId); } else { c.putNulls(rowId, n); } break; case PACKED: for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { c.putLong(rowId + i, data.readLong()); } else { c.putNull(rowId + i); } } break; } rowId += n; left -= n; currentCount -= n; } }
private void readBinaryBatch(int rowId, int num, ColumnVector column) throws IOException { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; if (column.isArray()) { defColumn.readBinarys(num, column, rowId, maxDefLevel, data); } else if (column.dataType() == DataTypes.TimestampType) { for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, // Read 12 bytes for INT96 ParquetRowConverter.binaryToSQLTimestamp(data.readBinary(12))); } else { column.putNull(rowId + i); } } } else { throw new UnsupportedOperationException("Unimplemented type: " + column.dataType()); } }
for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putLong(rowId + i, ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else {
CalendarInterval c = (CalendarInterval) value; vector.getChildColumn(0).putInt(rowId, c.months); vector.getChildColumn(1).putLong(rowId, c.microseconds); } else if (t instanceof org.apache.spark.sql.types.DateType) { putInt(rowId, (int) value);
for (int i = rowId; i < rowId + num; ++i) { if (!column.isNullAt(i)) { column.putLong(i, dictionary.decodeToLong(dictionaryIds.getDictId(i))); column.putLong(i, ParquetRowConverter.binaryToSQLTimestamp(v)); if (!column.isNullAt(i)) { Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(i)); column.putLong(i, ParquetRowConverter.binaryToUnscaledLong(v));