void transferOut() { for(TransferPair transferPair : transferPairList) { transferPair.transfer(); } }
private void doTransfers(){ for(TransferPair t : tx){ t.transfer(); } }
@Override public void transfer() { to.clear(); to.typeBuffer = typeBuffer.transferOwnership(to.allocator).buffer; internalStructVectorTransferPair.transfer(); to.valueCount = valueCount; clear(); }
@Override public void output(int batchIndex) { pairs[batchIndex].transfer(); }
private void transferOut() { // transfer intermediate outputs to the ValueVectorReadExpression // for other splits to read the output for(ExpressionSplit split : Iterables.concat(javaSplits, gandivaSplits)) { split.transferOut(); } // transfer final output for(TransferPair tp : transferPairs) { tp.transfer(); } }
private void transferAll(int length){ recordCount = length; recordsLeft -= length; for(TransferPair p : transfers){ p.transfer(); } outgoing.setRecordCount(recordCount); state = State.CAN_PRODUCE; }
@Override public void consumeDataLeft(int records) throws Exception { state.is(State.CAN_CONSUME_L); for(TransferPair p : leftTransfers){ p.transfer(); } recordCount = records; state = State.CAN_PRODUCE; }
@Override public int copyRecords(int index, int recordCount) { assert index == 0 && recordCount == incoming.getRecordCount() : "Straight copier cannot split batch"; for(TransferPair tp : pairs){ tp.transfer(); } return recordCount; }
private void transferPartial(int length){ recordCount = length; recordsLeft -= length; for(TransferPair p : transfers){ p.transfer(); } outgoing.setAllCount(recordCount); state = State.CAN_PRODUCE; }
@Override public void consumeDataRight(int records) throws Exception { state.is(State.CAN_CONSUME_R); for(TransferPair p : rightTransfers){ p.transfer(); } recordCount = records; state = State.CAN_PRODUCE; }
@Override public void transfer() { for (final TransferPair p : pairs) { p.transfer(); } to.valueCount = from.valueCount; from.clear(); }
private void transitionOnDeckToAtBat(){ for (TransferPair p : onDeckAtBatTransfers) { p.transfer(); } atBatInput.setRecordCount(onDeckInput.getRecordCount()); }
/** * Transfer vectors to destination HyperVectorWrapper. * Both this and destination must be of same type and have same number of vectors. * @param destination destination HyperVectorWrapper. */ @Override public void transfer(VectorWrapper<?> destination) { Preconditions.checkArgument(destination instanceof HyperVectorWrapper); Preconditions.checkArgument(getField().getType().equals(destination.getField().getType())); Preconditions.checkArgument(vectors.size() == ((HyperVectorWrapper<?>)destination).vectors.size()); List<ValueVector> destionationVectors = (List<ValueVector>)(((HyperVectorWrapper<?>)destination).vectors); for (int i = 0; i < vectors.size(); ++i) { vectors.get(i).makeTransferPair(destionationVectors.get(i)).transfer(); } } }
@Override public void transfer(VectorWrapper<?> destination) { Preconditions.checkArgument(destination instanceof SimpleVectorWrapper); Preconditions.checkArgument(getField().getType().equals(destination.getField().getType())); vector.makeTransferPair(((SimpleVectorWrapper<?>)destination).vector).transfer(); }
@Override public void transfer() { to.clear(); dataPair.transfer(); to.validityBuffer = validityBuffer.transferOwnership(to.allocator).buffer; to.setValueCount(valueCount); clear(); }
public FieldVector addVector(FieldVector v) { String name = v.getMinorType().name().toLowerCase(); Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); v.makeTransferPair(newVector).transfer(); internalStruct.putChild(name, newVector); if (callBack != null) { callBack.doWork(); } return newVector; }
@SuppressWarnings("unchecked") @Override public VectorWrapper<T> cloneAndTransfer(BufferAllocator allocator, CallBack callback) { try{ TransferPair tp = vector.getTransferPair(vector.getField().getName(), allocator, callback); tp.transfer(); return new SimpleVectorWrapper<T>((T) tp.getTo()); }catch(RuntimeException ex){ throw ex; } }
private VectorContainer transferBatch(VectorAccessible batch) { @SuppressWarnings("resource") // TODO better way to write this? VectorContainer container = new VectorContainer(); final List<ValueVector> vectors = Lists.newArrayList(); for (VectorWrapper<?> v : batch) { if (v.isHyper()) { throw new UnsupportedOperationException("Record batch data can't be created based on a hyper batch."); } TransferPair tp = v.getValueVector().getTransferPair(allocator); tp.transfer(); vectors.add(tp.getTo()); } container.addCollection(vectors); container.setRecordCount(batch.getRecordCount()); container.buildSchema(SelectionVectorMode.NONE); return container; }
@Override public int outputData() { final int count = incoming.getRecordCount(); if (randomVector == null) { // if nothing is projected, just set the count and return } else if (straightCopy || count == randomVector.getValueCount()){ for(TransferPair tp : transferPairs){ tp.transfer(); } } else { final long addr = sv2.memoryAddress(); for (FieldBufferCopier copier : copiers) { copier.copy(addr, count); } } state = State.CAN_CONSUME; output.setAllCount(count); return count; }
@Override public int outputData() throws Exception { state.is(State.CAN_PRODUCE); final int records = incoming.getRecordCount(); for(JsonConverter<?> converter : converters){ converter.convert(records); } for(TransferPair transfer : transfers){ transfer.transfer(); transfer.getTo().setValueCount(records); } outgoing.setRecordCount(records); state = State.CAN_CONSUME; return records; }