public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) return this; if (other.hasChecksum()) { mergeChecksum(other.getChecksum()); } if (other.hasChunkOffset()) { setChunkOffset(other.getChunkOffset()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }