public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
/** * <code>optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;</code> */ public Builder clearTaskAttemptReport() { if (taskAttemptReportBuilder_ == null) { taskAttemptReport_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance(); onChanged(); } else { taskAttemptReportBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
.computeBytesSize(7, getDiagnosticInfoBytes()); .computeBytesSize(8, getStateStringBytes()); .computeBytesSize(12, getNodeManagerHostBytes()); .computeMessageSize(15, containerId_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); result = result && (hasTaskAttemptState() == other.hasTaskAttemptState()); if (hasTaskAttemptState()) { result = result && (getTaskAttemptState() == other.getTaskAttemptState()); result = result && (hasProgress() == other.hasProgress()); if (hasProgress()) { result = result && (Float.floatToIntBits(getProgress()) == Float.floatToIntBits(other.getProgress())); result = result && (hasStartTime() == other.hasStartTime()); if (hasStartTime()) { result = result && (getStartTime() == other.getStartTime()); result = result && (hasFinishTime() == other.hasFinishTime()); if (hasFinishTime()) { result = result && (getFinishTime() == other.getFinishTime()); result = result && (hasCounters() == other.hasCounters()); if (hasCounters()) { result = result && getCounters() .equals(other.getCounters());
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); result = result && (hasTaskAttemptState() == other.hasTaskAttemptState()); if (hasTaskAttemptState()) { result = result && (getTaskAttemptState() == other.getTaskAttemptState()); result = result && (hasProgress() == other.hasProgress()); if (hasProgress()) { result = result && (Float.floatToIntBits(getProgress()) == Float.floatToIntBits(other.getProgress())); result = result && (hasStartTime() == other.hasStartTime()); if (hasStartTime()) { result = result && (getStartTime() == other.getStartTime()); result = result && (hasFinishTime() == other.hasFinishTime()); if (hasFinishTime()) { result = result && (getFinishTime() == other.getFinishTime()); result = result && (hasCounters() == other.hasCounters()); if (hasCounters()) { result = result && getCounters() .equals(other.getCounters());
result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); result = result && (hasTaskAttemptState() == other.hasTaskAttemptState()); if (hasTaskAttemptState()) { result = result && (getTaskAttemptState() == other.getTaskAttemptState()); result = result && (hasProgress() == other.hasProgress()); if (hasProgress()) { result = result && (Float.floatToIntBits(getProgress()) == Float.floatToIntBits(other.getProgress())); result = result && (hasStartTime() == other.hasStartTime()); if (hasStartTime()) { result = result && (getStartTime() == other.getStartTime()); result = result && (hasFinishTime() == other.hasFinishTime()); if (hasFinishTime()) { result = result && (getFinishTime() == other.getFinishTime()); result = result && (hasCounters() == other.hasCounters()); if (hasCounters()) { result = result && getCounters() .equals(other.getCounters());
result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); result = result && (hasTaskAttemptState() == other.hasTaskAttemptState()); if (hasTaskAttemptState()) { result = result && (getTaskAttemptState() == other.getTaskAttemptState()); result = result && (hasProgress() == other.hasProgress()); if (hasProgress()) { result = result && (Float.floatToIntBits(getProgress()) == Float.floatToIntBits(other.getProgress())); result = result && (hasStartTime() == other.hasStartTime()); if (hasStartTime()) { result = result && (getStartTime() == other.getStartTime()); result = result && (hasFinishTime() == other.hasFinishTime()); if (hasFinishTime()) { result = result && (getFinishTime() == other.getFinishTime()); result = result && (hasCounters() == other.hasCounters()); if (hasCounters()) { result = result && getCounters() .equals(other.getCounters());
.computeBytesSize(7, getDiagnosticInfoBytes()); .computeBytesSize(8, getStateStringBytes()); .computeBytesSize(12, getNodeManagerHostBytes()); .computeMessageSize(15, containerId_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTaskAttemptId()) { hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getTaskAttemptId().hashCode(); if (hasTaskAttemptState()) { hash = (37 * hash) + TASK_ATTEMPT_STATE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskAttemptState()); if (hasProgress()) { hash = (37 * hash) + PROGRESS_FIELD_NUMBER; hash = (53 * hash) + Float.floatToIntBits( getProgress()); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getStartTime()); if (hasFinishTime()) { hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getFinishTime()); if (hasCounters()) { hash = (37 * hash) + COUNTERS_FIELD_NUMBER; hash = (53 * hash) + getCounters().hashCode(); if (hasDiagnosticInfo()) { hash = (37 * hash) + DIAGNOSTIC_INFO_FIELD_NUMBER; hash = (53 * hash) + getDiagnosticInfo().hashCode();
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTaskAttemptId()) { hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getTaskAttemptId().hashCode(); if (hasTaskAttemptState()) { hash = (37 * hash) + TASK_ATTEMPT_STATE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskAttemptState()); if (hasProgress()) { hash = (37 * hash) + PROGRESS_FIELD_NUMBER; hash = (53 * hash) + Float.floatToIntBits( getProgress()); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getStartTime()); if (hasFinishTime()) { hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getFinishTime()); if (hasCounters()) { hash = (37 * hash) + COUNTERS_FIELD_NUMBER; hash = (53 * hash) + getCounters().hashCode(); if (hasDiagnosticInfo()) { hash = (37 * hash) + DIAGNOSTIC_INFO_FIELD_NUMBER; hash = (53 * hash) + getDiagnosticInfo().hashCode();
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasTaskAttemptId()) { hash = (37 * hash) + TASK_ATTEMPT_ID_FIELD_NUMBER; hash = (53 * hash) + getTaskAttemptId().hashCode(); if (hasTaskAttemptState()) { hash = (37 * hash) + TASK_ATTEMPT_STATE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskAttemptState()); if (hasProgress()) { hash = (37 * hash) + PROGRESS_FIELD_NUMBER; hash = (53 * hash) + Float.floatToIntBits( getProgress()); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getStartTime()); if (hasFinishTime()) { hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER; hash = (53 * hash) + hashLong(getFinishTime()); if (hasCounters()) { hash = (37 * hash) + COUNTERS_FIELD_NUMBER; hash = (53 * hash) + getCounters().hashCode(); if (hasDiagnosticInfo()) { hash = (37 * hash) + DIAGNOSTIC_INFO_FIELD_NUMBER; hash = (53 * hash) + getDiagnosticInfo().hashCode();
.computeBytesSize(7, getDiagnosticInfoBytes()); .computeBytesSize(8, getStateStringBytes()); .computeBytesSize(12, getNodeManagerHostBytes()); .computeMessageSize(15, containerId_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
.computeBytesSize(7, getDiagnosticInfoBytes()); .computeBytesSize(8, getStateStringBytes()); .computeBytesSize(12, getNodeManagerHostBytes()); .computeMessageSize(15, containerId_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, taskAttemptId_); output.writeBytes(7, getDiagnosticInfoBytes()); output.writeBytes(8, getStateStringBytes()); output.writeBytes(12, getNodeManagerHostBytes()); output.writeMessage(15, containerId_); getUnknownFields().writeTo(output);
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) return this; if (other.hasTaskAttemptId()) { mergeTaskAttemptId(other.getTaskAttemptId()); if (other.hasTaskAttemptState()) { setTaskAttemptState(other.getTaskAttemptState()); if (other.hasProgress()) { setProgress(other.getProgress()); if (other.hasStartTime()) { setStartTime(other.getStartTime()); if (other.hasFinishTime()) { setFinishTime(other.getFinishTime()); if (other.hasCounters()) { mergeCounters(other.getCounters()); if (other.hasDiagnosticInfo()) { bitField0_ |= 0x00000040; diagnosticInfo_ = other.diagnosticInfo_; onChanged(); if (other.hasStateString()) { bitField0_ |= 0x00000080; stateString_ = other.stateString_; onChanged();
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptReportProto.getDefaultInstance()) return this; if (other.hasTaskAttemptId()) { mergeTaskAttemptId(other.getTaskAttemptId()); if (other.hasTaskAttemptState()) { setTaskAttemptState(other.getTaskAttemptState()); if (other.hasProgress()) { setProgress(other.getProgress()); if (other.hasStartTime()) { setStartTime(other.getStartTime()); if (other.hasFinishTime()) { setFinishTime(other.getFinishTime()); if (other.hasCounters()) { mergeCounters(other.getCounters()); if (other.hasDiagnosticInfo()) { bitField0_ |= 0x00000040; diagnosticInfo_ = other.diagnosticInfo_; onChanged(); if (other.hasStateString()) { bitField0_ |= 0x00000080; stateString_ = other.stateString_; onChanged();
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, taskAttemptId_); output.writeBytes(7, getDiagnosticInfoBytes()); output.writeBytes(8, getStateStringBytes()); output.writeBytes(12, getNodeManagerHostBytes()); output.writeMessage(15, containerId_); getUnknownFields().writeTo(output);