@Override public void export(RpcController controller, ExportProtos.ExportRequest request, RpcCallback<ExportProtos.ExportResponse> done) { Region region = env.getRegion(); Configuration conf = HBaseConfiguration.create(env.getConfiguration()); conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); try { Scan scan = validateKey(region.getRegionInfo(), request); Token userToken = null; if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) { LOG.warn("Hadoop security is enable, but no found of user token"); } else if (userProvider.isHadoopSecurityEnabled()) { userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService())); } ExportProtos.ExportResponse response = processData(region, conf, userProvider, scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request)); done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); LOG.error(e.toString(), e); } }
hash = (53 * hash) + getFsToken().hashCode();
result = result && getFsToken() .equals(other.getFsToken());
result = result && getFsToken() .equals(other.getFsToken());
result = result && getFsToken() .equals(other.getFsToken());
hash = (53 * hash) + getFsToken().hashCode();
hash = (53 * hash) + getFsToken().hashCode();
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
@Override public void export(RpcController controller, ExportProtos.ExportRequest request, RpcCallback<ExportProtos.ExportResponse> done) { Region region = env.getRegion(); Configuration conf = HBaseConfiguration.create(env.getConfiguration()); conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); try { Scan scan = validateKey(region.getRegionInfo(), request); Token userToken = null; if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) { LOG.warn("Hadoop security is enable, but no found of user token"); } else if (userProvider.isHadoopSecurityEnabled()) { userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService())); } ExportProtos.ExportResponse response = processData(region, conf, userProvider, scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request)); done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); LOG.error(e.toString(), e); } }
@Override public void export(RpcController controller, ExportProtos.ExportRequest request, RpcCallback<ExportProtos.ExportResponse> done) { Region region = env.getRegion(); Configuration conf = HBaseConfiguration.create(env.getConfiguration()); conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); try { Scan scan = validateKey(region.getRegionInfo(), request); Token userToken = null; if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) { LOG.warn("Hadoop security is enable, but no found of user token"); } else if (userProvider.isHadoopSecurityEnabled()) { userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService())); } ExportProtos.ExportResponse response = processData(region, conf, userProvider, scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request)); done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); LOG.error(e.toString(), e); } }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }