public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_;
public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto) { return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto)other); } else { super.mergeFrom(other); return this; } }
subBuilder.mergeFrom(mergedInputDescriptor_); mergedInputDescriptor_ = subBuilder.buildPartial();
subBuilder.mergeFrom(processorDescriptor_); processorDescriptor_ = subBuilder.buildPartial();
subBuilder.mergeFrom(ioDescriptor_); ioDescriptor_ = subBuilder.buildPartial();
/** * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> */ public Builder mergeProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { if (processorDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000100) == 0x00000100) && processorDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(processorDescriptor_).mergeFrom(value).buildPartial(); } else { processorDescriptor_ = value; } onChanged(); } else { processorDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000100; return this; } /**
/** * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> */ public Builder mergeMergedInputDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { if (mergedInputDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && mergedInputDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { mergedInputDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(mergedInputDescriptor_).mergeFrom(value).buildPartial(); } else { mergedInputDescriptor_ = value; } onChanged(); } else { mergedInputDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /**
/** * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> */ public Builder mergeIoDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { if (ioDescriptorBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && ioDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { ioDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(ioDescriptor_).mergeFrom(value).buildPartial(); } else { ioDescriptor_ = value; } onChanged(); } else { ioDescriptorBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
public Builder clone() { return create().mergeFrom(buildPartial()); }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }