long totalVersionsDeleted = 0L; Region region = env.getRegion(); int rowBatchSize = request.getRowBatchSize(); Long timestamp = null; if (request.hasTimestamp()) { timestamp = request.getTimestamp(); DeleteType deleteType = request.getDeleteType(); boolean hasMore = true; RegionScanner scanner = null; try { Scan scan = ProtobufUtil.toScan(request.getScan()); if (scan.getFilter() == null && deleteType == DeleteType.ROW) {
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (scanBuilder_ == null) { result.scan_ = scan_; } else { result.scan_ = scanBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.deleteType_ = deleteType_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.rowBatchSize_ = rowBatchSize_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
long totalVersionsDeleted = 0L; Region region = env.getRegion(); int rowBatchSize = request.getRowBatchSize(); Long timestamp = null; if (request.hasTimestamp()) { timestamp = request.getTimestamp(); DeleteType deleteType = request.getDeleteType(); boolean hasMore = true; RegionScanner scanner = null; try { Scan scan = ProtobufUtil.toScan(request.getScan()); if (scan.getFilter() == null && deleteType == DeleteType.ROW) {
result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); result = result && (hasDeleteType() == other.hasDeleteType()); if (hasDeleteType()) { result = result && (getDeleteType() == other.getDeleteType()); result = result && (hasTimestamp() == other.hasTimestamp()); if (hasTimestamp()) { result = result && (getTimestamp() == other.getTimestamp()); result = result && (hasRowBatchSize() == other.hasRowBatchSize()); if (hasRowBatchSize()) { result = result && (getRowBatchSize() == other.getRowBatchSize()); getUnknownFields().equals(other.getUnknownFields()); return result;
result = result && (hasScan() == other.hasScan()); if (hasScan()) { result = result && getScan() .equals(other.getScan()); result = result && (hasDeleteType() == other.hasDeleteType()); if (hasDeleteType()) { result = result && (getDeleteType() == other.getDeleteType()); result = result && (hasTimestamp() == other.hasTimestamp()); if (hasTimestamp()) { result = result && (getTimestamp() == other.getTimestamp()); result = result && (hasRowBatchSize() == other.hasRowBatchSize()); if (hasRowBatchSize()) { result = result && (getRowBatchSize() == other.getRowBatchSize()); getUnknownFields().equals(other.getUnknownFields()); return result;
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (scanBuilder_ == null) { result.scan_ = scan_; } else { result.scan_ = scanBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.deleteType_ = deleteType_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.rowBatchSize_ = rowBatchSize_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (scanBuilder_ == null) { result.scan_ = scan_; } else { result.scan_ = scanBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.deleteType_ = deleteType_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.timestamp_ = timestamp_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.rowBatchSize_ = rowBatchSize_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasDeleteType()) { hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getDeleteType()); } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + hashLong(getTimestamp()); } if (hasRowBatchSize()) { hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER; hash = (53 * hash) + getRowBatchSize(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasScan()) { hash = (37 * hash) + SCAN_FIELD_NUMBER; hash = (53 * hash) + getScan().hashCode(); } if (hasDeleteType()) { hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getDeleteType()); } if (hasTimestamp()) { hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; hash = (53 * hash) + hashLong(getTimestamp()); } if (hasRowBatchSize()) { hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER; hash = (53 * hash) + getRowBatchSize(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasScan()) { memoizedIsInitialized = 0; return false; } if (!hasDeleteType()) { memoizedIsInitialized = 0; return false; } if (!hasRowBatchSize()) { memoizedIsInitialized = 0; return false; } if (!getScan().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; }
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasScan()) { memoizedIsInitialized = 0; return false; } if (!hasDeleteType()) { memoizedIsInitialized = 0; return false; } if (!hasRowBatchSize()) { memoizedIsInitialized = 0; return false; } if (!getScan().isInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, deleteType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt32(4, rowBatchSize_); } getUnknownFields().writeTo(output); }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, deleteType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt32(4, rowBatchSize_); } getUnknownFields().writeTo(output); }
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasDeleteType()) { setDeleteType(other.getDeleteType()); } if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } if (other.hasRowBatchSize()) { setRowBatchSize(other.getRowBatchSize()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) { if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this; if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasDeleteType()) { setDeleteType(other.getDeleteType()); } if (other.hasTimestamp()) { setTimestamp(other.getTimestamp()); } if (other.hasRowBatchSize()) { setRowBatchSize(other.getRowBatchSize()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }