public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (taskIdBuilder_ == null) { result.taskId_ = taskId_; } else { result.taskId_ = taskIdBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.id_ = id_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
/** * <code>optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;</code> */ public Builder clearSuccessfulAttempt() { if (successfulAttemptBuilder_ == null) { successfulAttempt_ = org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto.getDefaultInstance(); onChanged(); } else { successfulAttemptBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); return this; } /**
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, taskId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, id_); } getUnknownFields().writeTo(output); }
if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId());
if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId());
if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId());
if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId());
if (hasSuccessfulAttempt()) { result = result && getSuccessfulAttempt() .equals(other.getSuccessfulAttempt());
if (hasSuccessfulAttempt()) { result = result && getSuccessfulAttempt() .equals(other.getSuccessfulAttempt());
if (hasSuccessfulAttempt()) { result = result && getSuccessfulAttempt() .equals(other.getSuccessfulAttempt());
if (hasSuccessfulAttempt()) { result = result && getSuccessfulAttempt() .equals(other.getSuccessfulAttempt());
if (hasAttemptId()) { result = result && getAttemptId() .equals(other.getAttemptId());
if (hasAttemptId()) { result = result && getAttemptId() .equals(other.getAttemptId());
if (hasAttemptId()) { result = result && getAttemptId() .equals(other.getAttemptId());
if (hasAttemptId()) { result = result && getAttemptId() .equals(other.getAttemptId());
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto) obj; boolean result = true; result = result && (hasTaskId() == other.hasTaskId()); if (hasTaskId()) { result = result && getTaskId() .equals(other.getTaskId()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto) obj; boolean result = true; result = result && (hasTaskId() == other.hasTaskId()); if (hasTaskId()) { result = result && getTaskId() .equals(other.getTaskId()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) obj; boolean result = true; result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto other = (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto) obj; boolean result = true; result = result && (hasTaskAttemptId() == other.hasTaskAttemptId()); if (hasTaskAttemptId()) { result = result && getTaskAttemptId() .equals(other.getTaskAttemptId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto buildPartial() { org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto result = new org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (taskIdBuilder_ == null) { result.taskId_ = taskId_; } else { result.taskId_ = taskIdBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.id_ = id_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }