@Override public void setInt(int ordinal, int value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putInt(rowId, value); }
public final int appendInt(int v) { reserve(elementsAppended + 1); putInt(elementsAppended, v); return elementsAppended++; }
public final void putDecimal(int rowId, Decimal value, int precision) { if (precision <= Decimal.MAX_INT_DIGITS()) { putInt(rowId, (int) value.toUnscaledLong()); } else if (precision <= Decimal.MAX_LONG_DIGITS()) { putLong(rowId, value.toUnscaledLong()); } else { BigInteger bigInteger = value.toJavaBigDecimal().unscaledValue(); putByteArray(rowId, bigInteger.toByteArray()); } }
for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { values.putInt(rowId + i, data.readInteger()); } else { nulls.putNull(rowId + i);
for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { c.putInt(rowId + i, data.readInteger()); } else { c.putNull(rowId + i);
public void putInt(int rowId, int value) { vector.putInt(rowId, value); }
public final int appendInt(int v) { reserve(elementsAppended + 1); putInt(elementsAppended, v); return elementsAppended++; }
@Override public void setInt(int ordinal, int value) { assert (!columns[ordinal].isConstant); columns[ordinal].putNotNull(rowId); columns[ordinal].putInt(rowId, value); }
public void putDictionaryInt(int rowId, int value) { vector.getDictionaryIds().putInt(rowId, value); }
for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else {
for (int i = rowId; i < rowId + num; ++i) { if (!column.isNullAt(i)) { column.putInt(i, dictionary.decodeToInt(dictionaryIds.getDictId(i))); if (!column.isNullAt(i)) { Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(i)); column.putInt(i, (int) ParquetRowConverter.binaryToUnscaledLong(v));
public final void putDecimal(int rowId, Decimal value, int precision) { if (precision <= Decimal.MAX_INT_DIGITS()) { putInt(rowId, (int) value.toUnscaledLong()); } else if (precision <= Decimal.MAX_LONG_DIGITS()) { putLong(rowId, value.toUnscaledLong()); } else { BigInteger bigInteger = value.toJavaBigDecimal().unscaledValue(); putByteArray(rowId, bigInteger.toByteArray()); } }
for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { values.putInt(rowId + i, data.readInteger()); } else { nulls.putNull(rowId + i);
for (int i = 0; i < n; ++i) { if (currentBuffer[currentBufferIdx++] == level) { c.putInt(rowId + i, data.readInteger()); } else { c.putNull(rowId + i);
for (int i = 0; i < num; i++) { if (defColumn.readInteger() == maxDefLevel) { column.putInt(rowId + i, (int) ParquetRowConverter.binaryToUnscaledLong(data.readBinary(arrayLen))); } else {
vector.getChildColumn(0).putInt(rowId, c.months); vector.getChildColumn(1).putLong(rowId, c.microseconds); } else if (t instanceof org.apache.spark.sql.types.DateType) {
for (int i = rowId; i < rowId + num; ++i) { if (!column.isNullAt(i)) { column.putInt(i, dictionary.decodeToInt(dictionaryIds.getDictId(i))); if (!column.isNullAt(i)) { Binary v = dictionary.decodeToBinary(dictionaryIds.getDictId(i)); column.putInt(i, (int) ParquetRowConverter.binaryToUnscaledLong(v));