public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, chunkOffset_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, chunkOffset_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, chunkOffset_); } getUnknownFields().writeTo(output); }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto) obj; boolean result = true; result = result && (hasChecksum() == other.hasChecksum()); if (hasChecksum()) { result = result && getChecksum() .equals(other.getChecksum()); } result = result && (hasChunkOffset() == other.hasChunkOffset()); if (hasChunkOffset()) { result = result && (getChunkOffset() == other.getChunkOffset()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto) obj; boolean result = true; result = result && (hasChecksum() == other.hasChecksum()); if (hasChecksum()) { result = result && getChecksum() .equals(other.getChecksum()); } result = result && (hasChunkOffset() == other.hasChunkOffset()); if (hasChunkOffset()) { result = result && (getChunkOffset() == other.getChunkOffset()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto) obj; boolean result = true; result = result && (hasChecksum() == other.hasChecksum()); if (hasChecksum()) { result = result && getChecksum() .equals(other.getChecksum()); } result = result && (hasChunkOffset() == other.hasChunkOffset()); if (hasChunkOffset()) { result = result && (getChunkOffset() == other.getChunkOffset()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChecksum()) { hash = (37 * hash) + CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getChecksum().hashCode(); } if (hasChunkOffset()) { hash = (37 * hash) + CHUNKOFFSET_FIELD_NUMBER; hash = (53 * hash) + hashLong(getChunkOffset()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChecksum()) { hash = (37 * hash) + CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getChecksum().hashCode(); } if (hasChunkOffset()) { hash = (37 * hash) + CHUNKOFFSET_FIELD_NUMBER; hash = (53 * hash) + hashLong(getChunkOffset()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasChecksum()) { hash = (37 * hash) + CHECKSUM_FIELD_NUMBER; hash = (53 * hash) + getChecksum().hashCode(); } if (hasChunkOffset()) { hash = (37 * hash) + CHUNKOFFSET_FIELD_NUMBER; hash = (53 * hash) + hashLong(getChunkOffset()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeMessageSize(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, chunkOffset_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, chunkOffset_); } getUnknownFields().writeTo(output); }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public void writeTo(io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, checksum_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, chunkOffset_); } getUnknownFields().writeTo(output); }