public Builder mergeFrom(org.apache.drill.exec.proto.UserBitShared.SerializedField other) { if (other == org.apache.drill.exec.proto.UserBitShared.SerializedField.getDefaultInstance()) return this; if (other.hasMajorType()) { mergeMajorType(other.getMajorType()); if (other.hasNamePart()) { mergeNamePart(other.getNamePart()); if (other.hasValueCount()) { setValueCount(other.getValueCount()); if (other.hasVarByteLength()) { setVarByteLength(other.getVarByteLength()); if (other.hasBufferLength()) { setBufferLength(other.getBufferLength()); this.mergeUnknownFields(other.getUnknownFields()); return this;
@Override public void load(SerializedField metadata, DrillBuf buffer) { final List<SerializedField> children = metadata.getChildList(); final SerializedField offsetField = children.get(0); offsets.load(offsetField, buffer); int bufOffset = offsetField.getBufferLength(); for (int i = 1; i < children.size(); i++) { final SerializedField child = children.get(i); final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } final int vectorLength = child.getBufferLength(); vector.load(child, buffer.slice(bufOffset, vectorLength)); bufOffset += vectorLength; } assert bufOffset == buffer.writerIndex(); }
public org.apache.drill.exec.proto.UserBitShared.SerializedField buildPartial() { org.apache.drill.exec.proto.UserBitShared.SerializedField result = new org.apache.drill.exec.proto.UserBitShared.SerializedField(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
! isSameSchema(vector.getField().getChildren(), field.getChildList())) { } else if (field.getValueCount() == 0) { AllocationHelper.allocate(vector, 0, 0, 0); } else { vector.load(field, buf.slice(bufOffset, field.getBufferLength())); bufOffset += field.getBufferLength(); newVectors.add(vector);
final MajorType majorType = field.getMajorType(); final MinorType minorType = majorType.getMinorType(); builder.setColumnName(field.getNamePart().getName()); builder.setLabel(field.getNamePart().getName()); builder.setPrecision(Types.getPrecision(field.getMajorType()));
final Map<String, String> record = Maps.newHashMap(); for (VectorWrapper<?> vw : loader) { final String field = vw.getValueVector().getMetadata().getNamePart().getName(); final TypeProtos.MinorType fieldMinorType = vw.getValueVector().getMetadata().getMajorType().getMinorType(); final Accessor accessor = vw.getValueVector().getAccessor(); final Object value = i < accessor.getValueCount() ? accessor.getObject(i) : null;
public org.apache.drill.exec.proto.UserBitShared.SerializedField buildPartial() { org.apache.drill.exec.proto.UserBitShared.SerializedField result = new org.apache.drill.exec.proto.UserBitShared.SerializedField(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
public Builder mergeFrom(org.apache.drill.exec.proto.UserBitShared.SerializedField other) { if (other == org.apache.drill.exec.proto.UserBitShared.SerializedField.getDefaultInstance()) return this; if (other.hasMajorType()) { mergeMajorType(other.getMajorType()); if (other.hasNamePart()) { mergeNamePart(other.getNamePart()); if (other.hasValueCount()) { setValueCount(other.getValueCount()); if (other.hasVarByteLength()) { setVarByteLength(other.getVarByteLength()); if (other.hasBufferLength()) { setBufferLength(other.getBufferLength()); this.mergeUnknownFields(other.getUnknownFields()); return this;
MaterializedField currentChild = childMap.get(newChild.getNamePart().getName()); if (! currentChild.getType().equals(newChild.getMajorType())) { return false; if (currentChild.getChildren().size() != newChild.getChildCount()) { return false; if (!isSameSchema(currentChild.getChildren(), newChild.getChildList())) { return false;
builder.addChild(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.SerializedField.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.SerializedField.MERGE));
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserBitShared.RecordBatchDef.Builder builder) throws java.io.IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: builder.setRecordCount(input.readInt32()); break; case 2: builder.addField(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.SerializedField.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.SerializedField.MERGE)); break; case 3: builder.setCarriesTwoByteSelectionVector(input.readBool()); break; case 4: builder.setAffectedRowsCount(input.readInt32()); break; default: input.handleUnknownField(number, this); } } } public boolean isInitialized(org.apache.drill.exec.proto.UserBitShared.RecordBatchDef.Builder builder)
@Override public void load(SerializedField metadata, DrillBuf buf) { final List<SerializedField> fields = metadata.getChildList(); valueCount = metadata.getValueCount(); int bufOffset = 0; for (final SerializedField child : fields) { final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } if (child.getValueCount() == 0) { vector.clear(); } else { vector.load(child, buf.slice(bufOffset, child.getBufferLength())); } bufOffset += child.getBufferLength(); } // We should have consumed all bytes written into the buffer // during deserialization. assert bufOffset == buf.writerIndex(); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { final List<SerializedField> children = metadata.getChildList(); final SerializedField offsetField = children.get(0); offsets.load(offsetField, buffer); int bufOffset = offsetField.getBufferLength(); for (int i = 1; i < children.size(); i++) { final SerializedField child = children.get(i); final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } final int vectorLength = child.getBufferLength(); vector.load(child, buffer.slice(bufOffset, vectorLength)); bufOffset += vectorLength; } assert bufOffset == buffer.writerIndex(); }
private UserBitShared.SerializedField createDummyField(UserBitShared.SerializedField field) { UserBitShared.SerializedField.Builder newDummyFieldBuilder = UserBitShared.SerializedField.newBuilder() .setVarByteLength(0) .setBufferLength(0) .setValueCount(0) .setNamePart(field.getNamePart()) .setMajorType(field.getMajorType()); int index = 0; for (UserBitShared.SerializedField childField : field.getChildList()) { // make sure we make a copy of all children, so we do not corrupt the // original fieldList. This will recursively call itself. newDummyFieldBuilder.addChild(index, createDummyField(childField)); index++; } UserBitShared.SerializedField newDummyField = newDummyFieldBuilder.build(); return newDummyField; }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.drill.exec.proto.UserBitShared.SerializedField prototype) {
public Builder toBuilder() { return newBuilder(this); }
@Override public void load(UserBitShared.SerializedField metadata, DrillBuf buffer) { final UserBitShared.SerializedField offsetMetadata = metadata.getChild(0); offsets.load(offsetMetadata, buffer); final int offsetLength = offsetMetadata.getBufferLength(); final UserBitShared.SerializedField bitMetadata = metadata.getChild(1); final int bitLength = bitMetadata.getBufferLength(); bits.load(bitMetadata, buffer.slice(offsetLength, bitLength)); final UserBitShared.SerializedField vectorMetadata = metadata.getChild(2); if (isEmptyType()) { addOrGetVector(VectorDescriptor.create(vectorMetadata.getMajorType())); } final int vectorLength = vectorMetadata.getBufferLength(); vector.load(vectorMetadata, buffer.slice(offsetLength + bitLength, vectorLength)); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { Preconditions.checkArgument(this.field.getName().equals(metadata.getNamePart().getName()), "The field %s doesn't match the provided metadata %s.", this.field, metadata); final int actualLength = metadata.getBufferLength(); final int valueCount = metadata.getValueCount(); final int expectedLength = valueCount * VALUE_WIDTH; assert actualLength == expectedLength : String.format("Expected to load %d bytes but actually loaded %d bytes", expectedLength, actualLength); clear(); if (data != null) { data.release(1); } data = buffer.slice(0, actualLength); data.retain(1); data.writerIndex(actualLength); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { Preconditions.checkArgument(this.field.getName().equals(metadata.getNamePart().getName()), "The field %s doesn't match the provided metadata %s.", this.field, metadata); final int actualLength = metadata.getBufferLength(); final int valueCount = metadata.getValueCount(); final int expectedLength = valueCount * VALUE_WIDTH; assert actualLength == expectedLength : String.format("Expected to load %d bytes but actually loaded %d bytes", expectedLength, actualLength); clear(); if (data != null) { data.release(1); } data = buffer.slice(0, actualLength); data.retain(1); data.writerIndex(actualLength); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { Preconditions.checkArgument(this.field.getName().equals(metadata.getNamePart().getName()), "The field %s doesn't match the provided metadata %s.", this.field, metadata); final int actualLength = metadata.getBufferLength(); final int valueCount = metadata.getValueCount(); final int expectedLength = valueCount * VALUE_WIDTH; assert actualLength == expectedLength : String.format("Expected to load %d bytes but actually loaded %d bytes", expectedLength, actualLength); clear(); if (data != null) { data.release(1); } data = buffer.slice(0, actualLength); data.retain(1); data.writerIndex(actualLength); }