public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 3: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 4: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); case 6: return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; if (other.hasInterpreterClassName()) { bitField0_ |= 0x00000001; interpreterClassName_ = other.interpreterClassName_; onChanged(); } if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasInterpreterSpecificBytes()) { setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; if (other.hasInterpreterClassName()) { bitField0_ |= 0x00000001; interpreterClassName_ = other.interpreterClassName_; onChanged(); } if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasInterpreterSpecificBytes()) { setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; if (other.hasInterpreterClassName()) { bitField0_ |= 0x00000001; interpreterClassName_ = other.interpreterClassName_; onChanged(); } if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasInterpreterSpecificBytes()) { setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); }
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); }
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); }
/** * <code>optional bytes interpreter_specific_bytes = 3;</code> */ public Builder clearInterpreterSpecificBytes() { bitField0_ = (bitField0_ & ~0x00000004); interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); onChanged(); return this; }
/** * <code>optional bytes interpreter_specific_bytes = 3;</code> */ public Builder clearInterpreterSpecificBytes() { bitField0_ = (bitField0_ & ~0x00000004); interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); onChanged(); return this; }
/** * <code>optional bytes interpreter_specific_bytes = 3;</code> */ public Builder clearInterpreterSpecificBytes() { bitField0_ = (bitField0_ & ~0x00000004); interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); onChanged(); return this; }
/** * <code>required string interpreter_class_name = 1;</code> * * <pre> ** The request passed to the AggregateService consists of three parts * (1) the (canonical) classname of the ColumnInterpreter implementation * (2) the Scan query * (3) any bytes required to construct the ColumnInterpreter object * properly * </pre> */ public Builder clearInterpreterClassName() { bitField0_ = (bitField0_ & ~0x00000001); interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); onChanged(); return this; } /**
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; if (other.hasInterpreterClassName()) { bitField0_ |= 0x00000001; interpreterClassName_ = other.interpreterClassName_; onChanged(); } if (other.hasScan()) { mergeScan(other.getScan()); } if (other.hasInterpreterSpecificBytes()) { setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); }
/** * <code>optional bytes interpreter_specific_bytes = 3;</code> */ public Builder clearInterpreterSpecificBytes() { bitField0_ = (bitField0_ & ~0x00000004); interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); onChanged(); return this; }