public static MaterializedField create(SerializedField serField) { LinkedHashSet<MaterializedField> children = new LinkedHashSet<>(); for (SerializedField sf : serField.getChildList()) { children.add(MaterializedField.create(sf)); } return new MaterializedField(serField.getNamePart().getName(), serField.getMajorType(), children); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { final List<SerializedField> children = metadata.getChildList(); final SerializedField offsetField = children.get(0); offsets.load(offsetField, buffer); int bufOffset = offsetField.getBufferLength(); for (int i = 1; i < children.size(); i++) { final SerializedField child = children.get(i); final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } final int vectorLength = child.getBufferLength(); vector.load(child, buffer.slice(bufOffset, vectorLength)); bufOffset += vectorLength; } assert bufOffset == buffer.writerIndex(); }
public static MaterializedField create(SerializedField serField) { LinkedHashSet<MaterializedField> children = new LinkedHashSet<>(); for (SerializedField sf : serField.getChildList()) { children.add(MaterializedField.create(sf)); } return new MaterializedField(serField.getNamePart().getName(), serField.getMajorType(), children); }
! isSameSchema(vector.getField().getChildren(), field.getChildList())) {
if (!isSameSchema(currentChild.getChildren(), newChild.getChildList())) { return false;
@Override public void load(SerializedField metadata, DrillBuf buf) { final List<SerializedField> fields = metadata.getChildList(); valueCount = metadata.getValueCount(); int bufOffset = 0; for (final SerializedField child : fields) { final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } if (child.getValueCount() == 0) { vector.clear(); } else { vector.load(child, buf.slice(bufOffset, child.getBufferLength())); } bufOffset += child.getBufferLength(); } // We should have consumed all bytes written into the buffer // during deserialization. assert bufOffset == buf.writerIndex(); }
@Override public void load(SerializedField metadata, DrillBuf buffer) { final List<SerializedField> children = metadata.getChildList(); final SerializedField offsetField = children.get(0); offsets.load(offsetField, buffer); int bufOffset = offsetField.getBufferLength(); for (int i = 1; i < children.size(); i++) { final SerializedField child = children.get(i); final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } final int vectorLength = child.getBufferLength(); vector.load(child, buffer.slice(bufOffset, vectorLength)); bufOffset += vectorLength; } assert bufOffset == buffer.writerIndex(); }
private UserBitShared.SerializedField createDummyField(UserBitShared.SerializedField field) { UserBitShared.SerializedField.Builder newDummyFieldBuilder = UserBitShared.SerializedField.newBuilder() .setVarByteLength(0) .setBufferLength(0) .setValueCount(0) .setNamePart(field.getNamePart()) .setMajorType(field.getMajorType()); int index = 0; for (UserBitShared.SerializedField childField : field.getChildList()) { // make sure we make a copy of all children, so we do not corrupt the // original fieldList. This will recursively call itself. newDummyFieldBuilder.addChild(index, createDummyField(childField)); index++; } UserBitShared.SerializedField newDummyField = newDummyFieldBuilder.build(); return newDummyField; }
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserBitShared.SerializedField message) throws java.io.IOException { if(message.hasMajorType()) output.writeObject(1, message.getMajorType(), org.apache.drill.common.types.SchemaTypeProtos.MajorType.WRITE, false); if(message.hasNamePart()) output.writeObject(2, message.getNamePart(), org.apache.drill.exec.proto.SchemaUserBitShared.NamePart.WRITE, false); for(org.apache.drill.exec.proto.UserBitShared.SerializedField child : message.getChildList()) output.writeObject(3, child, org.apache.drill.exec.proto.SchemaUserBitShared.SerializedField.WRITE, true); if(message.hasValueCount()) output.writeInt32(4, message.getValueCount(), false); if(message.hasVarByteLength()) output.writeInt32(5, message.getVarByteLength(), false); if(message.hasBufferLength()) output.writeInt32(7, message.getBufferLength(), false); } public boolean isInitialized(org.apache.drill.exec.proto.UserBitShared.SerializedField message)
@Override public void load(SerializedField metadata, DrillBuf buf) { final List<SerializedField> fields = metadata.getChildList(); valueCount = metadata.getValueCount(); int bufOffset = 0; for (final SerializedField child : fields) { final MaterializedField fieldDef = MaterializedField.create(child); ValueVector vector = getChild(fieldDef.getName()); if (vector == null) { // if we arrive here, we didn't have a matching vector. vector = BasicTypeHelper.getNewVector(fieldDef, allocator); putChild(fieldDef.getName(), vector); } if (child.getValueCount() == 0) { vector.clear(); } else { vector.load(child, buf.slice(bufOffset, child.getBufferLength())); } bufOffset += child.getBufferLength(); } // We should have consumed all bytes written into the buffer // during deserialization. assert bufOffset == buf.writerIndex(); }
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserBitShared.SerializedField message) throws java.io.IOException { if(message.hasMajorType()) output.writeObject(1, message.getMajorType(), org.apache.drill.common.types.SchemaTypeProtos.MajorType.WRITE, false); if(message.hasNamePart()) output.writeObject(2, message.getNamePart(), org.apache.drill.exec.proto.SchemaUserBitShared.NamePart.WRITE, false); for(org.apache.drill.exec.proto.UserBitShared.SerializedField child : message.getChildList()) output.writeObject(3, child, org.apache.drill.exec.proto.SchemaUserBitShared.SerializedField.WRITE, true); if(message.hasValueCount()) output.writeInt32(4, message.getValueCount(), false); if(message.hasVarByteLength()) output.writeInt32(5, message.getVarByteLength(), false); if(message.hasBufferLength()) output.writeInt32(7, message.getBufferLength(), false); } public boolean isInitialized(org.apache.drill.exec.proto.UserBitShared.SerializedField message)