public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); result = result && (hasOutputPath() == other.hasOutputPath()); if (hasOutputPath()) { result = result && getOutputPath() .equals(other.getOutputPath()); result = result && (hasCompressed() == other.hasCompressed()); if (hasCompressed()) { result = result && (getCompressed() == other.getCompressed()); result = result && (hasCompressType() == other.hasCompressType()); if (hasCompressType()) { result = result && getCompressType() .equals(other.getCompressType()); result = result && (hasCompressCodec() == other.hasCompressCodec()); if (hasCompressCodec()) { result = result && getCompressCodec() .equals(other.getCompressCodec()); result = result && (hasFsToken() == other.hasFsToken()); if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
.computeBytesSize(2, getOutputPathBytes()); .computeBytesSize(4, getCompressTypeBytes()); .computeBytesSize(5, getCompressCodecBytes()); .computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); result = result && (hasOutputPath() == other.hasOutputPath()); if (hasOutputPath()) { result = result && getOutputPath() .equals(other.getOutputPath()); result = result && (hasCompressed() == other.hasCompressed()); if (hasCompressed()) { result = result && (getCompressed() == other.getCompressed()); result = result && (hasCompressType() == other.hasCompressType()); if (hasCompressType()) { result = result && getCompressType() .equals(other.getCompressType()); result = result && (hasCompressCodec() == other.hasCompressCodec()); if (hasCompressCodec()) { result = result && getCompressCodec() .equals(other.getCompressCodec()); result = result && (hasFsToken() == other.hasFsToken()); if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0;
result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); result = result && (hasOutputPath() == other.hasOutputPath()); if (hasOutputPath()) { result = result && getOutputPath() .equals(other.getOutputPath()); result = result && (hasCompressed() == other.hasCompressed()); if (hasCompressed()) { result = result && (getCompressed() == other.getCompressed()); result = result && (hasCompressType() == other.hasCompressType()); if (hasCompressType()) { result = result && getCompressType() .equals(other.getCompressType()); result = result && (hasCompressCodec() == other.hasCompressCodec()); if (hasCompressCodec()) { result = result && getCompressCodec() .equals(other.getCompressCodec()); result = result && (hasFsToken() == other.hasFsToken()); if (hasFsToken()) { result = result && getFsToken() .equals(other.getFsToken());
.computeBytesSize(2, getOutputPathBytes()); .computeBytesSize(4, getCompressTypeBytes()); .computeBytesSize(5, getCompressCodecBytes()); .computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); if (hasOutputPath()) { hash = (37 * hash) + OUTPUTPATH_FIELD_NUMBER; hash = (53 * hash) + getOutputPath().hashCode(); if (hasCompressed()) { hash = (37 * hash) + COMPRESSED_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getCompressed()); if (hasCompressType()) { hash = (37 * hash) + COMPRESSTYPE_FIELD_NUMBER; hash = (53 * hash) + getCompressType().hashCode(); if (hasCompressCodec()) { hash = (37 * hash) + COMPRESSCODEC_FIELD_NUMBER; hash = (53 * hash) + getCompressCodec().hashCode(); if (hasFsToken()) { hash = (37 * hash) + FSTOKEN_FIELD_NUMBER; hash = (53 * hash) + getFsToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash;
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); if (hasOutputPath()) { hash = (37 * hash) + OUTPUTPATH_FIELD_NUMBER; hash = (53 * hash) + getOutputPath().hashCode(); if (hasCompressed()) { hash = (37 * hash) + COMPRESSED_FIELD_NUMBER; hash = (53 * hash) + hashBoolean(getCompressed()); if (hasCompressType()) { hash = (37 * hash) + COMPRESSTYPE_FIELD_NUMBER; hash = (53 * hash) + getCompressType().hashCode(); if (hasCompressCodec()) { hash = (37 * hash) + COMPRESSCODEC_FIELD_NUMBER; hash = (53 * hash) + getCompressCodec().hashCode(); if (hasFsToken()) { hash = (37 * hash) + FSTOKEN_FIELD_NUMBER; hash = (53 * hash) + getFsToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash;
.computeBytesSize(2, getOutputPathBytes()); .computeBytesSize(4, getCompressTypeBytes()); .computeBytesSize(5, getCompressCodecBytes()); .computeMessageSize(6, fsToken_); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size;
@Override public void export(RpcController controller, ExportProtos.ExportRequest request, RpcCallback<ExportProtos.ExportResponse> done) { Region region = env.getRegion(); Configuration conf = HBaseConfiguration.create(env.getConfiguration()); conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); try { Scan scan = validateKey(region.getRegionInfo(), request); Token userToken = null; if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) { LOG.warn("Hadoop security is enable, but no found of user token"); } else if (userProvider.isHadoopSecurityEnabled()) { userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService())); } ExportProtos.ExportResponse response = processData(region, conf, userProvider, scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request)); done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); LOG.error(e.toString(), e); } }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasOutputPath()) { bitField0_ |= 0x00000002; outputPath_ = other.outputPath_; onChanged(); } if (other.hasCompressed()) { setCompressed(other.getCompressed()); } if (other.hasCompressType()) { bitField0_ |= 0x00000008; compressType_ = other.compressType_; onChanged(); } if (other.hasCompressCodec()) { bitField0_ |= 0x00000010; compressCodec_ = other.compressCodec_; onChanged(); } if (other.hasFsToken()) { mergeFsToken(other.getFsToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
@Override public void export(RpcController controller, ExportProtos.ExportRequest request, RpcCallback<ExportProtos.ExportResponse> done) { Region region = env.getRegion(); Configuration conf = HBaseConfiguration.create(env.getConfiguration()); conf.setStrings("io.serializations", conf.get("io.serializations"), ResultSerialization.class.getName()); try { Scan scan = validateKey(region.getRegionInfo(), request); Token userToken = null; if (userProvider.isHadoopSecurityEnabled() && !request.hasFsToken()) { LOG.warn("Hadoop security is enable, but no found of user token"); } else if (userProvider.isHadoopSecurityEnabled()) { userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService())); } ExportProtos.ExportResponse response = processData(region, conf, userProvider, scan, userToken, getWriterOptions(conf, region.getRegionInfo(), request)); done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); LOG.error(e.toString(), e); } }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getOutputPathBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, compressed_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCompressTypeBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCompressCodecBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, fsToken_); } getUnknownFields().writeTo(output); }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getOutputPathBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBool(3, compressed_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeBytes(4, getCompressTypeBytes()); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBytes(5, getCompressCodecBytes()); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, fsToken_); } getUnknownFields().writeTo(output); }
private static CompressionCodec getCompressionCodec(final Configuration conf, final ExportProtos.ExportRequest request) { try { Class<? extends CompressionCodec> codecClass; if (request.hasCompressCodec()) { codecClass = conf.getClassByName(request.getCompressCodec()) .asSubclass(CompressionCodec.class); } else { codecClass = DEFAULT_CODEC; } return ReflectionUtils.newInstance(codecClass, conf); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Compression codec " + request.getCompressCodec() + " was not found.", e); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }