PrefixFilter.<init>
Code IndexAdd Codota to your IDE (free)

Best code snippets using org.apache.hadoop.hbase.filter.PrefixFilter.<init>(Showing top 15 results out of 315)

  • Common ways to obtain PrefixFilter
private void myMethod () {
PrefixFilter p =
  • new PrefixFilter(prefix)
  • AI code suggestions by Codota
}
origin: apache/hbase

@Test
public void testPrefixFilter() throws Exception {
 // Grab rows from group one (half of total)
 long expectedRows = numRows / 2;
 long expectedKeys = colsPerRow;
 Scan s = new Scan();
 s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
 verifyScan(s, expectedRows, expectedKeys);
}
origin: apache/hbase

@Test
public void testPrefixFilter() throws Exception {
 // null prefix
 PrefixFilter prefixFilter = new PrefixFilter(null);
 assertTrue(prefixFilter.areSerializedFieldsEqual(
  ProtobufUtil.toFilter(ProtobufUtil.toFilter(prefixFilter))));
 // non-null prefix
 prefixFilter = new PrefixFilter(Bytes.toBytes("abc"));
 assertTrue(prefixFilter.areSerializedFieldsEqual(
  ProtobufUtil.toFilter(ProtobufUtil.toFilter(prefixFilter))));
}
origin: apache/hbase

public static Filter createFilterFromArguments(ArrayList<byte []> filterArguments) {
 Preconditions.checkArgument(filterArguments.size() == 1,
               "Expected 1 but got: %s", filterArguments.size());
 byte [] prefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0));
 return new PrefixFilter(prefix);
}
origin: apache/hbase

private static Filter getExportFilter(String[] args) {
 Filter exportFilter;
 String filterCriteria = (args.length > 5) ? args[5]: null;
 if (filterCriteria == null) return null;
 if (filterCriteria.startsWith("^")) {
  String regexPattern = filterCriteria.substring(1, filterCriteria.length());
  exportFilter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(regexPattern));
 } else {
  exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
 }
 return exportFilter;
}
origin: apache/hbase

public Filter getOrderingFilter() {
 List<Filter> filters = new ArrayList<>();
 filters.add(new PrefixFilter(Bytes.toBytes("yyy")));
 filters.add(new PageFilter(MAX_PAGES));
 Filter filterMPONE =
  new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
 return filterMPONE;
}
origin: apache/hbase

/**
 * @param pbBytes A pb serialized {@link PrefixFilter} instance
 * @return An instance of {@link PrefixFilter} made from <code>bytes</code>
 * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
 * @see #toByteArray
 */
public static PrefixFilter parseFrom(final byte [] pbBytes)
throws DeserializationException {
 FilterProtos.PrefixFilter proto;
 try {
  proto = FilterProtos.PrefixFilter.parseFrom(pbBytes);
 } catch (InvalidProtocolBufferException e) {
  throw new DeserializationException(e);
 }
 return new PrefixFilter(proto.hasPrefix()?proto.getPrefix().toByteArray():null);
}
origin: apache/hbase

private static void setRowPrefixFilter(Scan scan, String rowPrefixes) {
 if (rowPrefixes != null && !rowPrefixes.isEmpty()) {
  String[] rowPrefixArray = rowPrefixes.split(",");
  Arrays.sort(rowPrefixArray);
  FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
  for (String prefix : rowPrefixArray) {
   Filter filter = new PrefixFilter(Bytes.toBytes(prefix));
   filterList.addFilter(filter);
  }
  scan.setFilter(filterList);
  byte[] startPrefixRow = Bytes.toBytes(rowPrefixArray[0]);
  byte[] lastPrefixRow = Bytes.toBytes(rowPrefixArray[rowPrefixArray.length -1]);
  setStartAndStopRows(scan, startPrefixRow, lastPrefixRow);
 }
}
origin: apache/hbase

private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
  throws IOException {
 // Defaults FilterList.Operator.MUST_PASS_ALL.
 FilterList allFilters = new FilterList();
 allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
 // Only return rows where this column value exists in the row.
 SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
   Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
 filter.setFilterIfMissing(true);
 allFilters.addFilter(filter);
 Scan scan = new Scan();
 scan.addFamily(Bytes.toBytes("trans-blob"));
 scan.addFamily(Bytes.toBytes("trans-type"));
 scan.addFamily(Bytes.toBytes("trans-date"));
 scan.addFamily(Bytes.toBytes("trans-tags"));
 scan.addFamily(Bytes.toBytes("trans-group"));
 scan.setFilter(allFilters);
 return r.getScanner(scan);
}
origin: apache/hbase

@Test
public void testFilters() throws IOException {
 try {
  this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
  HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
  byte [] prefix = Bytes.toBytes("ab");
  Filter newFilter = new PrefixFilter(prefix);
  Scan scan = new Scan();
  scan.setFilter(newFilter);
  rowPrefixFilter(scan);
  byte[] stopRow = Bytes.toBytes("bbc");
  newFilter = new WhileMatchFilter(new InclusiveStopFilter(stopRow));
  scan = new Scan();
  scan.setFilter(newFilter);
  rowInclusiveStopFilter(scan, stopRow);
 } finally {
  HBaseTestingUtility.closeRegionAndWAL(this.region);
 }
}
origin: apache/hbase

private Filter getMPALLFilter() {
 List<Filter> filters = new ArrayList<>();
 filters.add(new PageFilter(MAX_PAGES));
 filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
 Filter filterMPALL =
  new FilterList(FilterList.Operator.MUST_PASS_ALL, filters);
 return filterMPALL;
}
origin: apache/hbase

@Test
public void testPrefixFilterWithReverseScan() throws Exception {
 // Grab rows from group one (half of total)
 long expectedRows = this.numRows / 2;
 long expectedKeys = this.colsPerRow;
 Scan s = new Scan();
 s.setReversed(true);
 s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
 verifyScan(s, expectedRows, expectedKeys);
}
origin: apache/hbase

@Before
public void setUp() throws Exception {
 this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX));
}
origin: apache/hbase

@Test
public void testPrefixFilter() throws Exception {
 // Grab rows from group one (half of total)
 long expectedRows = this.numRows / 2;
 long expectedKeys = this.colsPerRow;
 Scan s = new Scan();
 s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
 verifyScan(s, expectedRows, expectedKeys);
}
origin: apache/hbase

private static Filter getRowFilter(String[] args) {
 Filter rowFilter = null;
 String filterCriteria = (args.length > 3) ? args[3]: null;
 if (filterCriteria == null) return null;
 if (filterCriteria.startsWith("^")) {
  String regexPattern = filterCriteria.substring(1, filterCriteria.length());
  rowFilter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator(regexPattern));
 } else {
  rowFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
 }
 return rowFilter;
}
origin: apache/hbase

private Filter getFilterMPONE() {
 List<Filter> filters = new ArrayList<>();
 filters.add(new PageFilter(MAX_PAGES));
 filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
 Filter filterMPONE =
  new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
 return filterMPONE;
}
org.apache.hadoop.hbase.filterPrefixFilter<init>

Popular methods of PrefixFilter

  • getPrefix
  • areSerializedFieldsEqual
  • filterAllRemaining
  • filterCell
  • isReversed
  • parseFrom
  • setReversed

Popular classes and methods

  • compareTo (BigDecimal)
    Compares this BigDecimal with val. Returns one of the three values 1, 0, or -1. The method behaves a
  • runOnUiThread (Activity)
  • setRequestProperty (URLConnection)
    Sets the value of the specified request header field. The value will only be used by the current URL
  • FileOutputStream (java.io)
    An output stream that writes bytes to a file. If the output file exists, it can be replaced or appen
  • Socket (java.net)
    Provides a client-side TCP socket.
  • ByteBuffer (java.nio)
    A buffer for bytes. A byte buffer can be created in either one of the following ways: * #allocate
  • GregorianCalendar (java.util)
    GregorianCalendar is a concrete subclass of Calendarand provides the standard calendar used by most
  • Hashtable (java.util)
    A plug-in replacement for JDK1.5 java.util.Hashtable. This version is based on org.cliffc.high_scale
  • ReentrantLock (java.util.concurrent.locks)
    A reentrant mutual exclusion Lock with the same basic behavior and semantics as the implicit monitor
  • Logger (org.slf4j)
    The org.slf4j.Logger interface is the main user entry point of SLF4J API. It is expected that loggin

For IntelliJ IDEA and
Android Studio

  • Codota IntelliJ IDEA pluginCodota Android Studio pluginCode IndexSign in
  • EnterpriseFAQAboutContact Us
  • Terms of usePrivacy policyCodeboxFind Usages
Add Codota to your IDE (free)