/** * Constructor for a UTF-8 prefix string. */ public ColumnPrefixFilter(final String prefix) { this(Bytes.UTF8(prefix)); }
/** * Sets a regular expression to filter results based on the row key. * @param regexp The regular expression with which to filter the row keys. * @param charset The charset used to decode the bytes of the row key into a * string. The RegionServer must support this charset, otherwise it will * unexpectedly close the connection the first time you attempt to use this * scanner. * @see #KeyRegexpFilter(byte[], Charset) */ public KeyRegexpFilter(final String regexp, final Charset charset) { this(Bytes.UTF8(regexp), charset); }
/** * Constructor for UTF-8 strings. * Equivalent to {@link #ColumnRangeFilter(byte[], boolean, byte[], boolean) * ColumnRangeFilter}{@code (start_column, true, stop_inclusive, true)} */ public ColumnRangeFilter(final String start_column, final String stop_column) { this(Bytes.UTF8(start_column), true, Bytes.UTF8(stop_column), true); }
/** * Constructor for UTF-8 strings. * @param start_column The column from which to start returning values. * If {@code null}, start scanning from the beginning of the row. * @param start_inclusive If {@code true}, the start column is inclusive. * @param stop_column The column up to which to return values. * If {@code null}, continue scanning until the end of the row. * @param stop_inclusive If {@code true}, the stop column is inclusive. */ public ColumnRangeFilter(final String start_column, final boolean start_inclusive, final String stop_column, final boolean stop_inclusive) { this(Bytes.UTF8(start_column), start_inclusive, Bytes.UTF8(stop_column), stop_inclusive); }
public ColumnPaginationFilter(final int limit, final String columnOffset) { this.limit = limit; this.columnOffset = Bytes.UTF8(columnOffset); }
/** * Sets a regular expression to filter results based on the row key. * @param regexp The regular expression with which to filter the row keys. * @param charset The charset used to decode the bytes of the row key into a * string. The RegionServer must support this charset, otherwise it will * unexpectedly close the connection the first time you attempt to use this * scanner. */ public KeyRegexpFilter(final byte[] regexp, final Charset charset) { this.regexp = regexp; this.charset = Bytes.UTF8(charset.name()); }
/** * Constructor for UTF-8 prefix strings. */ public MultipleColumnPrefixFilter(final String[] prefixes) { this.prefixes = new byte[prefixes.length][]; for (int i = 0; i < prefixes.length; i++) { this.prefixes[i] = Bytes.UTF8(prefixes[i]); } this.prefixesLength = estimatePrefixesLength(); }
@Override int predictSerializedSize() { return super.predictSerializedSize() + 1 + 1 + NAME.length + 2 + Bytes.UTF8(substr).length; }
@Override int predictSerializedSize() { return 1 + 3 + value.length + 2 + Bytes.UTF8(bit_operator.name()).length; }
/** CDH3b3-specific header for Hadoop "security". */ private ChannelBuffer headerCDH3b3() { // CDH3 b3 includes a temporary patch that is non-backwards compatible // and results in clients getting disconnected as soon as they send the // header, because the HBase RPC protocol provides no mechanism to send // an error message back to the client during the initial "hello" stage // of a connection. final byte[] user = Bytes.UTF8(System.getProperty("user.name", "asynchbase")); final byte[] buf = new byte[4 + 1 + 4 + 4 + user.length]; final ChannelBuffer header = commonHeader(buf, HRPC3); // Length of the encoded string (useless). header.writeInt(4 + user.length); // 4 // String as encoded by `WritableUtils.writeString'. header.writeInt(user.length); // 4 header.writeBytes(user); // length bytes return header; }
@Override void serializeOld(ChannelBuffer buf) { super.serializeOld(buf); // super.predictSerializedSize() // Write code buf.writeByte(0); // 1 buf.writeByte((byte) NAME.length); // 1 buf.writeBytes(NAME); // NAME.length // writeUTF the expr byte[] expr_bytes = Bytes.UTF8(expr); buf.writeShort(expr_bytes.length); // 2 buf.writeBytes(expr_bytes); // expr.length // writeUTF the charset byte[] charset_bytes = Bytes.UTF8(charset.name()); buf.writeShort(charset_bytes.length); // 2 buf.writeBytes(charset_bytes); // charset.length }
@Override void serializeOld(ChannelBuffer buf) { super.serializeOld(buf); // super.predictSerializedSize() // Write class code buf.writeByte(CODE); // 1 // Write value HBaseRpc.writeByteArray(buf, value); // 3 + value.length // Write op final byte[] op_name = Bytes.UTF8(bit_operator.name()); buf.writeShort(op_name.length); // 2 buf.writeBytes(op_name); // op_name.length }
@Override void serializeOld(ChannelBuffer buf) { super.serializeOld(buf); // super.predictSerializedSize() // Write code buf.writeByte(0); // 1 buf.writeByte((byte) NAME.length); // 1 buf.writeBytes(NAME); // NAME.length // writeUTF the substring byte[] expr_bytes = Bytes.UTF8(substr); buf.writeShort(expr_bytes.length); // 2 buf.writeBytes(expr_bytes); // expr.length }
@Override void serializeOld(ChannelBuffer buf) { // Write the filter name buf.writeByte((byte) name().length); // 1 buf.writeBytes(name()); // name().length // writeUTF comparison operation buf.writeShort(compare_op.name().length()); // 2 buf.writeBytes(Bytes.UTF8(compare_op.name())); // compare_op.name().length // Write the comparator comparator.serializeOld(buf); }
private void sendRPCHeader(final Channel channel) { final byte[] user_bytes = Bytes.UTF8(client_auth_provider.getClientUsername()); final String klass = "org.apache.hadoop.hbase.ipc.HRegionInterface"; final byte[] class_bytes = Bytes.UTF8(klass); final byte[] buf = new byte[ 4 + 1 + class_bytes.length + 1 + 2 + user_bytes.length + 1]; ChannelBuffer out_buffer = ChannelBuffers.wrappedBuffer(buf); out_buffer.clear(); out_buffer.writerIndex(out_buffer.writerIndex() + 4); out_buffer.writeByte(class_bytes.length); // 1 out_buffer.writeBytes(class_bytes); // 44 //This is part of protocol header //true if a user field exists //1 is true in boolean out_buffer.writeByte(1); out_buffer.writeShort(user_bytes.length); out_buffer.writeBytes(user_bytes); //true if a realUser field exists out_buffer.writeByte(0); //write length out_buffer.setInt(0, out_buffer.writerIndex() - 4); out_buffer = wrap(out_buffer); //if(LOG.isDebugEnabled()) { // LOG.debug("Sending RPC Header: " + Bytes.pretty(out_buffer)); //} Channels.write(channel, out_buffer); }