public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, id_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, id_); } getUnknownFields().writeTo(output); }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, id_); } getUnknownFields().writeTo(output); }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && getJobId() .equals(other.getJobId()); } result = result && (hasTaskType() == other.hasTaskType()); if (hasTaskType()) { result = result && (getTaskType() == other.getTaskType()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && getJobId() .equals(other.getJobId()); } result = result && (hasTaskType() == other.hasTaskType()); if (hasTaskType()) { result = result && (getTaskType() == other.getTaskType()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && getJobId() .equals(other.getJobId()); } result = result && (hasTaskType() == other.hasTaskType()); if (hasTaskType()) { result = result && (getTaskType() == other.getTaskType()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto)) { return super.equals(obj); } org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other = (org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && getJobId() .equals(other.getJobId()); } result = result && (hasTaskType() == other.hasTaskType()); if (hasTaskType()) { result = result && (getTaskType() == other.getTaskType()); } result = result && (hasId() == other.hasId()); if (hasId()) { result = result && (getId() == other.getId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOB_ID_FIELD_NUMBER; hash = (53 * hash) + getJobId().hashCode(); } if (hasTaskType()) { hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskType()); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOB_ID_FIELD_NUMBER; hash = (53 * hash) + getJobId().hashCode(); } if (hasTaskType()) { hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskType()); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOB_ID_FIELD_NUMBER; hash = (53 * hash) + getJobId().hashCode(); } if (hasTaskType()) { hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskType()); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, id_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, id_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOB_ID_FIELD_NUMBER; hash = (53 * hash) + getJobId().hashCode(); } if (hasTaskType()) { hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getTaskType()); } if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, id_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) return this; if (other.hasJobId()) { mergeJobId(other.getJobId()); } if (other.hasTaskType()) { setTaskType(other.getTaskType()); } if (other.hasId()) { setId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, id_); } getUnknownFields().writeTo(output); }
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) return this; if (other.hasJobId()) { mergeJobId(other.getJobId()); } if (other.hasTaskType()) { setTaskType(other.getTaskType()); } if (other.hasId()) { setId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) return this; if (other.hasJobId()) { mergeJobId(other.getJobId()); } if (other.hasTaskType()) { setTaskType(other.getTaskType()); } if (other.hasId()) { setId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, taskType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, id_); } getUnknownFields().writeTo(output); }
public Builder mergeFrom(org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto other) { if (other == org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto.getDefaultInstance()) return this; if (other.hasJobId()) { mergeJobId(other.getJobId()); } if (other.hasTaskType()) { setTaskType(other.getTaskType()); } if (other.hasId()) { setId(other.getId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }