@Override protected OnHeapColumnVector reserveNewColumn(int capacity, DataType type) { return new OnHeapColumnVector(capacity, type); } }
public OnHeapColumnVector(int capacity, DataType type) { super(capacity, type); reserveInternal(capacity); reset(); }
@Override public int putByteArray(int rowId, byte[] value, int offset, int length) { int result = arrayData().appendBytes(length, value, offset); arrayOffsets[rowId] = result; arrayLengths[rowId] = length; return result; }
columnVectors = OffHeapColumnVector.allocateColumns(capacity, resultSchema); } else { columnVectors = OnHeapColumnVector.allocateColumns(capacity, resultSchema); OnHeapColumnVector missingCol = new OnHeapColumnVector(capacity, dt); missingCol.putNulls(0, capacity); missingCol.setIsConstant(); orcVectorWrappers[i] = missingCol; } else { for (int i = 0; i < partitionValues.numFields(); i++) { DataType dt = partitionSchema.fields()[i].dataType(); OnHeapColumnVector partitionCol = new OnHeapColumnVector(capacity, dt); ColumnVectorUtils.populate(partitionCol, partitionValues, i); partitionCol.setIsConstant(); orcVectorWrappers[partitionIdx + i] = partitionCol;
/** * Allocates columns to store elements of each field of the schema on heap. * Capacity is the initial capacity of the vector and it will grow as necessary. Capacity is * in number of elements, not number of bytes. */ public static OnHeapColumnVector[] allocateColumns(int capacity, StructType schema) { return allocateColumns(capacity, schema.fields()); }
@Override public void putNotNulls(int rowId, int count) { if (!hasNull()) return; for (int i = 0; i < count; ++i) { nulls[rowId + i] = (byte)0; } }
private boolean equals(int idx, long key1) { return columnVectors[0].getLong(buckets[idx]) == key1; } }
@Override protected void reserveInternal(int newCapacity) { if (isArray() || type instanceof MapType) { int[] newLengths = new int[newCapacity]; int[] newOffsets = new int[newCapacity];
columnVectors = OffHeapColumnVector.allocateColumns(capacity, resultSchema); } else { columnVectors = OnHeapColumnVector.allocateColumns(capacity, resultSchema); OnHeapColumnVector missingCol = new OnHeapColumnVector(capacity, dt); missingCol.putNulls(0, capacity); missingCol.setIsConstant(); orcVectorWrappers[i] = missingCol; } else { for (int i = 0; i < partitionValues.numFields(); i++) { DataType dt = partitionSchema.fields()[i].dataType(); OnHeapColumnVector partitionCol = new OnHeapColumnVector(capacity, dt); ColumnVectorUtils.populate(partitionCol, partitionValues, i); partitionCol.setIsConstant(); orcVectorWrappers[partitionIdx + i] = partitionCol;
/** * Allocates columns to store elements of each field of the schema on heap. * Capacity is the initial capacity of the vector and it will grow as necessary. Capacity is * in number of elements, not number of bytes. */ public static OnHeapColumnVector[] allocateColumns(int capacity, StructType schema) { return allocateColumns(capacity, schema.fields()); }
@Override public void putNotNulls(int rowId, int count) { if (!hasNull()) return; for (int i = 0; i < count; ++i) { nulls[rowId + i] = (byte)0; } }
private boolean equals(int idx, long key1) { return columnVectors[0].getLong(buckets[idx]) == key1; } }
@Override protected void reserveInternal(int newCapacity) { if (isArray() || type instanceof MapType) { int[] newLengths = new int[newCapacity]; int[] newOffsets = new int[newCapacity];
public OnHeapColumnVector(int capacity, DataType type) { super(capacity, type); reserveInternal(capacity); reset(); }
@Override protected OnHeapColumnVector reserveNewColumn(int capacity, DataType type) { return new OnHeapColumnVector(capacity, type); } }
@Override public int putByteArray(int rowId, byte[] value, int offset, int length) { int result = arrayData().appendBytes(length, value, offset); arrayOffsets[rowId] = result; arrayLengths[rowId] = length; return result; }
public AggregateHashMap(StructType schema, int capacity, double loadFactor, int maxSteps) { // We currently only support single key-value pair that are both longs assert (schema.size() == 2 && schema.fields()[0].dataType() == LongType && schema.fields()[1].dataType() == LongType); // capacity should be a power of 2 assert (capacity > 0 && ((capacity & (capacity - 1)) == 0)); this.maxSteps = maxSteps; numBuckets = (int) (capacity / loadFactor); columnVectors = OnHeapColumnVector.allocateColumns(capacity, schema); aggBufferRow = new MutableColumnarRow(columnVectors); buckets = new int[numBuckets]; Arrays.fill(buckets, -1); }
protected OnHeapColumnVector(int capacity, DataType type) { super(capacity, type, MemoryMode.ON_HEAP); reserveInternal(capacity); reset(); }