/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } taskAttemptReport_ = value; onChanged(); } else { taskAttemptReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } taskAttemptReport_ = value; onChanged(); } else { taskAttemptReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } taskAttemptReport_ = value; onChanged(); } else { taskAttemptReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder(taskAttemptReport_).mergeFrom(value).buildPartial(); } else { taskAttemptReport_ = value; } onChanged(); } else { taskAttemptReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder(taskAttemptReport_).mergeFrom(value).buildPartial(); } else { taskAttemptReport_ = value; } onChanged(); } else { taskAttemptReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (value == null) { throw new NullPointerException(); } taskAttemptReport_ = value; onChanged(); } else { taskAttemptReportBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder(taskAttemptReport_).mergeFrom(value).buildPartial(); } else { taskAttemptReport_ = value; } onChanged(); } else { taskAttemptReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder mergeTaskAttemptReport(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto value) { if (taskAttemptReportBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && taskAttemptReport_ != org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.newBuilder(taskAttemptReport_).mergeFrom(value).buildPartial(); } else { taskAttemptReport_ = value; } onChanged(); } else { taskAttemptReportBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = builderForValue.build(); onChanged(); } else { taskAttemptReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = builderForValue.build(); onChanged(); } else { taskAttemptReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder clearTaskAttemptReport() { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance(); onChanged(); } else { taskAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = builderForValue.build(); onChanged(); } else { taskAttemptReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder clearTaskAttemptReport() { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance(); onChanged(); } else { taskAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder clearTaskAttemptReport() { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance(); onChanged(); } else { taskAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder setTaskAttemptReport( org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder builderForValue) { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = builderForValue.build(); onChanged(); } else { taskAttemptReportBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder clearTaskAttemptReport() { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance(); onChanged(); } else { taskAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTaskAttemptReportFieldBuilder().getBuilder(); } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTaskAttemptReportFieldBuilder().getBuilder(); } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTaskAttemptReportFieldBuilder().getBuilder(); } /**
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.Builder getTaskAttemptReportBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTaskAttemptReportFieldBuilder().getBuilder(); } /**