public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getOutputPathBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, compressed_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCompressTypeBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCompressCodecBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, fsToken_); } getUnknownFields().writeTo(output); }
.computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
hash = (53 * hash) + getFsToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash;
getUnknownFields().equals(other.getUnknownFields()); return result;
getUnknownFields().equals(other.getUnknownFields()); return result;
hash = (53 * hash) + getFsToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash;
hash = (53 * hash) + getFsToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash;
.computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
.computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getOutputPathBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, compressed_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCompressTypeBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCompressCodecBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, fsToken_); } getUnknownFields().writeTo(output); }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getOutputPathBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, compressed_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCompressTypeBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCompressCodecBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, fsToken_); } getUnknownFields().writeTo(output); }
getUnknownFields().equals(other.getUnknownFields()); return result;
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }