@Override final long getLong(int rowId) { return accessor.get(rowId); } }
assertEquals(max.intOut[i], ac2New.getObject(i)); assertEquals(min.intOut[i], ac3New.getObject(i)); assertEquals(counts[i], ac4New.get(i)); assertEquals(counts1[i], ac5New.get(i)); assertEquals(counts[i], ac9New.get(i)); assertEquals(counts[i], ac13New.get(i)); assertEquals(counts[i], ac17New.get(i));
@Override final long getLong(int rowId) { return accessor.get(rowId); } }
@Override public long getLong(int index) { return ac.get(index); }
public void getFieldById(int fieldId, NullableBigIntHolder holder) { ((BigIntVector) vectors[fieldId]).get(currentIndex, holder); }
public void getFieldById(int fieldId, BigIntHolder holder) { ((BigIntVector) vectors[fieldId]).get(currentIndex); }
public void read(NullableBigIntHolder h){ vector.get(idx(), h); }
break; case BIGINT: generator.writeNumber(BigIntVector.get(buffer, index)); break; case FLOAT4:
private void decodeTimestamp(IntVector input, TimeStampMilliVector output, BigIntVector dictionary) { // dates are stored as int32 in parquet dictionaries if (hasSv2) { final SelectionVector2 sv2 = incoming.getSelectionVector2(); for (int i = 0; i < recordsConsumedCurrentBatch; ++i) { final int svIndex = sv2.getIndex(i); if (input.isNull(svIndex)) { output.setNull(svIndex); } else { final int id = input.get(svIndex); output.setSafe(svIndex, dictionary.get(id)); } } } else { for (int i = 0; i < recordsConsumedCurrentBatch; ++i) { if (input.isNull(i)) { output.setNull(i); } else { int id = input.get(i); output.setSafe(i, dictionary.get(id)); } } } }
private static VectorContainer buildLongGlobalDictionary(List<Dictionary> dictionaries, VectorContainer existingDict, ColumnDescriptor columnDescriptor, BufferAllocator bufferAllocator) { final Field field = new Field(SchemaPath.getCompoundPath(columnDescriptor.getPath()).getAsUnescapedPath(), true, new ArrowType.Int(64, true), null); final VectorContainer input = new VectorContainer(bufferAllocator); final BigIntVector longVector = input.addOrGet(field); longVector.allocateNew(); SortedSet<Long> values = Sets.newTreeSet(); for (Dictionary dictionary : dictionaries) { for (int i = 0; i <= dictionary.getMaxId(); ++i) { values.add(dictionary.decodeToLong(i)); } } if (existingDict != null) { final BigIntVector existingDictValues = existingDict.getValueAccessorById(BigIntVector.class, 0).getValueVector(); for (int i = 0; i < existingDict.getRecordCount(); ++i) { values.add(existingDictValues.get(i)); } } final Iterator<Long> iter = values.iterator(); int recordCount = 0; while (iter.hasNext()) { longVector.setSafe(recordCount++, iter.next()); } longVector.setValueCount(recordCount); input.setRecordCount(recordCount); input.buildSchema(BatchSchema.SelectionVectorMode.NONE); return input; }
final BigIntVector longVector = vectorContainer.getValueAccessorById(BigIntVector.class, 0).getValueVector(); for (int i = 0; i < vectorContainer.getRecordCount(); ++i) { valueLookup.put(longVector.get(i), i);
@Test public void testRead() throws Exception { List<QueryDataBatch> results = testSqlWithResults( String.format("SELECT count(*) FROM dfs.\"%s\"", dataFile.getPath())); RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator()); for(QueryDataBatch batch : results) { batchLoader.load(batch.getHeader().getDef(), batch.getData()); if (batchLoader.getRecordCount() <= 0) { continue; } BigIntVector countV = batchLoader.getValueAccessorById(BigIntVector.class, 0).getValueVector(); assertTrue("Total of "+ NUM_RECORDS + " records expected in count", countV.get(0) == NUM_RECORDS); batchLoader.clear(); batch.release(); } }
recordsWritten += recordWrittenV.get(i);
previous2 = current2; partPrevious = partCurrent; current1 = vv1.get(i); current2 = vv2.get(i); partCurrent = pVector.get(i);