private static ExportProtos.ExportRequest getConfiguredRequest(Configuration conf, Path dir, final Scan scan, final Token<?> userToken) throws IOException { boolean compressed = conf.getBoolean(FileOutputFormat.COMPRESS, false); String compressionType = conf.get(FileOutputFormat.COMPRESS_TYPE, DEFAULT_TYPE.toString()); String compressionCodec = conf.get(FileOutputFormat.COMPRESS_CODEC, DEFAULT_CODEC.getName()); DelegationToken protoToken = null; if (userToken != null) { protoToken = DelegationToken.newBuilder() .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) .setPassword(ByteStringer.wrap(userToken.getPassword())) .setKind(userToken.getKind().toString()) .setService(userToken.getService().toString()).build(); } LOG.info("compressed=" + compressed + ", compression type=" + compressionType + ", compression codec=" + compressionCodec + ", userToken=" + userToken); ExportProtos.ExportRequest.Builder builder = ExportProtos.ExportRequest.newBuilder() .setScan(ProtobufUtil.toScan(scan)) .setOutputPath(dir.toString()) .setCompressed(compressed) .setCompressCodec(compressionCodec) .setCompressType(compressionType); if (protoToken != null) { builder.setFsToken(protoToken); } return builder.build(); }
if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
public Builder clear() { super.clear(); if (regionBuilder_ == null) { region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); } else { regionBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (familyPathBuilder_ == null) { familyPath_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { familyPathBuilder_.clear(); } assignSeqNum_ = false; bitField0_ = (bitField0_ & ~0x00000004); if (fsTokenBuilder_ == null) { fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); } else { fsTokenBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000008); bulkToken_ = ""; bitField0_ = (bitField0_ & ~0x00000010); copyFile_ = false; bitField0_ = (bitField0_ & ~0x00000020); return this; }
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = fsToken_.toBuilder();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = fsToken_.toBuilder();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000020) == 0x00000020)) { subBuilder = fsToken_.toBuilder();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000020) == 0x00000020)) { subBuilder = fsToken_.toBuilder();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = fsToken_.toBuilder();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = fsToken_.toBuilder();
if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
DelegationToken.newBuilder().build(); if(userToken != null) { protoDT = DelegationToken.newBuilder() .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) .setPassword(ByteStringer.wrap(userToken.getPassword()))
DelegationToken.newBuilder().build(); if(userToken != null) { protoDT = DelegationToken.newBuilder() .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) .setPassword(ByteStringer.wrap(userToken.getPassword()))
if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
hash = (53 * hash) + getFsToken().hashCode();
hash = (53 * hash) + getFsToken().hashCode();
hash = (53 * hash) + getFsToken().hashCode();
private static ExportProtos.ExportRequest getConfiguredRequest(Configuration conf, Path dir, final Scan scan, final Token<?> userToken) throws IOException { boolean compressed = conf.getBoolean(FileOutputFormat.COMPRESS, false); String compressionType = conf.get(FileOutputFormat.COMPRESS_TYPE, DEFAULT_TYPE.toString()); String compressionCodec = conf.get(FileOutputFormat.COMPRESS_CODEC, DEFAULT_CODEC.getName()); DelegationToken protoToken = null; if (userToken != null) { protoToken = DelegationToken.newBuilder() .setIdentifier(ByteStringer.wrap(userToken.getIdentifier())) .setPassword(ByteStringer.wrap(userToken.getPassword())) .setKind(userToken.getKind().toString()) .setService(userToken.getService().toString()).build(); } LOG.info("compressed=" + compressed + ", compression type=" + compressionType + ", compression codec=" + compressionCodec + ", userToken=" + userToken); ExportProtos.ExportRequest.Builder builder = ExportProtos.ExportRequest.newBuilder() .setScan(ProtobufUtil.toScan(scan)) .setOutputPath(dir.toString()) .setCompressed(compressed) .setCompressCodec(compressionCodec) .setCompressType(compressionType); if (protoToken != null) { builder.setFsToken(protoToken); } return builder.build(); }
hash = (53 * hash) + getFsToken().hashCode();