final int limit) throws IOException { Scan scan = tableUtil.buildScan() .setStartRow(startRow) .setStopRow(stopRow) .setCaching(scanCacheRows) .build();
@Override protected StateScanner scanStates(byte[] startRow, byte[] stopRow) throws IOException { Scan scan = tableUtil.buildScan() .setStartRow(startRow) .setStopRow(stopRow) .setMaxVersions(1) .addColumn(QueueEntryRow.COLUMN_FAMILY, stateColumnName) .setCaching(MAX_SCAN_ROWS) .build();
private static ScanBuilder getScanBuilder(HBaseTableUtil tableUtil, String rowType) { ScanBuilder scan = tableUtil.buildScan(); // FIX: get scan based on start row and stop row // ReplicationStatusKey startKey = new ReplicationStatusKey(Bytes.toBytes(prefix)); // scan.setStartRow(startKey.getKey()); // scan.setStopRow(Bytes.stopKeyForPrefix(startKey.getKey())); scan.addColumn(Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.TIME_FAMILY), Bytes.toBytes(rowType)); scan.setMaxVersions(1); return scan; }
@Override public QueueScanner createScanner(ConsumerConfig consumerConfig, HTable hTable, Scan scan, int numRows) throws IOException { // we should roughly divide by number of buckets, but don't want another RPC for the case we are not exactly right ScanBuilder distributedScan = tableUtil.buildScan(scan); int caching = (int) (1.1 * numRows / distributorBuckets); distributedScan.setCaching(caching); ResultScanner scanner = DistributedScanner.create(hTable, distributedScan.build(), rowKeyDistributor, scansExecutor); return new HBaseQueueScanner(scanner, numRows, rowKeyConverter); }
protected Scanner scanPersisted(co.cask.cdap.api.dataset.table.Scan scan) throws Exception { ScanBuilder hScan = tableUtil.buildScan(); hScan.addFamily(columnFamily); hScan.setCacheBlocks(Boolean.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) { hScan.setCacheBlocks(Boolean.valueOf(arguments.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else if (properties.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) { hScan.setCacheBlocks(Boolean.valueOf(properties.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else { hScan.setCacheBlocks(false); hScan.setCaching(Integer.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) { hScan.setCaching(Integer.valueOf(arguments.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else if (properties.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) { hScan.setCaching(Integer.valueOf(properties.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else { hScan.setCaching(1000); byte[] stopRow = scan.getStopRow(); if (startRow != null) { hScan.setStartRow(startRow); hScan.setStopRow(stopRow); hScan.setAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY, getEncodedTx());
private ScanBuilder configureRangeScan(ScanBuilder scan, @Nullable byte[] startRow, @Nullable byte[] stopRow, @Nullable FuzzyRowFilter filter) { // todo: should be configurable scan.setCaching(1000); if (startRow != null) { scan.setStartRow(startRow); } if (stopRow != null) { scan.setStopRow(stopRow); } scan.addFamily(columnFamily); if (filter != null) { List<Pair<byte[], byte[]>> fuzzyPairs = Lists.newArrayListWithExpectedSize(filter.getFuzzyKeysData().size()); for (ImmutablePair<byte[], byte[]> pair : filter.getFuzzyKeysData()) { if (rowKeyDistributor != null) { fuzzyPairs.addAll(rowKeyDistributor.getDistributedFilterPairs(pair)); } else { // Make a copy of filter pair because the key and mask will get modified in HBase FuzzyRowFilter. fuzzyPairs.add(Pair.newPair(Arrays.copyOf(pair.getFirst(), pair.getFirst().length), Arrays.copyOf(pair.getSecond(), pair.getSecond().length))); } } scan.setFilter(new org.apache.hadoop.hbase.filter.FuzzyRowFilter(fuzzyPairs)); } return scan; }
/** * Scans the HBase table to get a list of {@link TopicId}. */ private List<TopicId> scanTopics(ScanBuilder scanBuilder) throws IOException { Scan scan = scanBuilder.setFilter(new FirstKeyOnlyFilter()).setCaching(scanCacheRows).build(); try { List<TopicId> topicIds = new ArrayList<>(); try (ResultScanner resultScanner = hTable.getScanner(scan)) { for (Result result : resultScanner) { TopicId topicId = MessagingUtils.toTopicId(result.getRow()); byte[] value = result.getValue(columnFamily, COL); Map<String, String> properties = GSON.fromJson(Bytes.toString(value), MAP_TYPE); TopicMetadata metadata = new TopicMetadata(topicId, properties); if (metadata.exists()) { topicIds.add(topicId); } } } return topicIds; } catch (IOException e) { throw exceptionHandler.handle(e); } }
private static void dumpReplicationStateTable() throws Exception { System.out.println("\nThis is all the HBase regions on the Cluster:"); HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get(); HTable hTable = tableUtil.createHTable(hConf, getReplicationStateTableId(tableUtil)); ScanBuilder scan = tableUtil.buildScan(); scan.addColumn(Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.TIME_FAMILY), Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.WRITE_TIME_ROW_TYPE)); scan.addColumn(Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.TIME_FAMILY), Bytes.toBytes(ReplicationConstants.ReplicationStatusTool.REPLICATE_TIME_ROW_TYPE)); Result result; try (ResultScanner resultScanner = hTable.getScanner(scan.build())) { while ((result = resultScanner.next()) != null) { ReplicationStatusKey key = new ReplicationStatusKey(result.getRow()); String rowType = key.getRowType(); String region = key.getRegionName(); UUID rsID = key.getRsID(); Long writeTime = getTimeFromResult(result, ReplicationConstants.ReplicationStatusTool.WRITE_TIME_ROW_TYPE); Long replicateTime = getTimeFromResult(result, ReplicationConstants.ReplicationStatusTool.REPLICATE_TIME_ROW_TYPE); System.out.println("Key=>rowType:" + rowType + ":region:" + region + ":RSID:" + rsID + " writeTime:" + writeTime + ":replicateTime:" + replicateTime); } } finally { hTable.close(); } }
@Override public List<TopicId> listTopics(NamespaceId namespaceId) throws IOException { byte[] startRow = MessagingUtils.topicScanKey(namespaceId); ScanBuilder scanBuilder = tableUtil.buildScan() .setStartRow(startRow) .setStopRow(Bytes.stopKeyForPrefix(startRow)); return scanTopics(scanBuilder); }
private void setFilterIfNeeded(ScanBuilder scan, @Nullable Filter filter) { if (filter == null) { return; } if (filter instanceof FuzzyRowFilter) { FuzzyRowFilter fuzzyRowFilter = (FuzzyRowFilter) filter; List<Pair<byte[], byte[]>> fuzzyPairs = Lists.newArrayListWithExpectedSize(fuzzyRowFilter.getFuzzyKeysData().size()); for (ImmutablePair<byte[], byte[]> pair : fuzzyRowFilter.getFuzzyKeysData()) { fuzzyPairs.add(Pair.newPair(pair.getFirst(), pair.getSecond())); } scan.setFilter(new org.apache.hadoop.hbase.filter.FuzzyRowFilter(fuzzyPairs)); } else { throw new IllegalArgumentException("Unsupported filter: " + filter); } }
protected Scanner scanPersisted(co.cask.cdap.api.dataset.table.Scan scan) throws Exception { ScanBuilder hScan = tableUtil.buildScan(); hScan.addFamily(columnFamily); hScan.setCacheBlocks(Boolean.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) { hScan.setCacheBlocks(Boolean.valueOf(arguments.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else if (properties.containsKey(CONFIG_HBASE_CLIENT_CACHE_BLOCKS)) { hScan.setCacheBlocks(Boolean.valueOf(properties.get(CONFIG_HBASE_CLIENT_CACHE_BLOCKS))); } else { hScan.setCacheBlocks(false); hScan.setCaching(Integer.valueOf(scan.getProperties().get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else if (arguments.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) { hScan.setCaching(Integer.valueOf(arguments.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else if (properties.containsKey(CONFIG_HBASE_CLIENT_SCANNER_CACHING)) { hScan.setCaching(Integer.valueOf(properties.get(CONFIG_HBASE_CLIENT_SCANNER_CACHING))); } else { hScan.setCaching(1000); byte[] stopRow = scan.getStopRow(); if (startRow != null) { hScan.setStartRow(startRow); hScan.setStopRow(stopRow); hScan.setAttribute(TxConstants.TX_OPERATION_ATTRIBUTE_KEY, getEncodedTx());
private ScanBuilder configureRangeScan(ScanBuilder scan, @Nullable byte[] startRow, @Nullable byte[] stopRow, @Nullable FuzzyRowFilter filter) { // todo: should be configurable scan.setCaching(1000); if (startRow != null) { scan.setStartRow(startRow); } if (stopRow != null) { scan.setStopRow(stopRow); } scan.addFamily(columnFamily); if (filter != null) { List<Pair<byte[], byte[]>> fuzzyPairs = Lists.newArrayListWithExpectedSize(filter.getFuzzyKeysData().size()); for (ImmutablePair<byte[], byte[]> pair : filter.getFuzzyKeysData()) { if (rowKeyDistributor != null) { fuzzyPairs.addAll(rowKeyDistributor.getDistributedFilterPairs(pair)); } else { // Make a copy of filter pair because the key and mask will get modified in HBase FuzzyRowFilter. fuzzyPairs.add(Pair.newPair(Arrays.copyOf(pair.getFirst(), pair.getFirst().length), Arrays.copyOf(pair.getSecond(), pair.getSecond().length))); } } scan.setFilter(new org.apache.hadoop.hbase.filter.FuzzyRowFilter(fuzzyPairs)); } return scan; }
/** * Scans the HBase table to get a list of {@link TopicId}. */ private List<TopicId> scanTopics(ScanBuilder scanBuilder) throws IOException { Scan scan = scanBuilder.setFilter(new FirstKeyOnlyFilter()).setCaching(scanCacheRows).build(); try { List<TopicId> topicIds = new ArrayList<>(); try (ResultScanner resultScanner = hTable.getScanner(scan)) { for (Result result : resultScanner) { TopicId topicId = MessagingUtils.toTopicId(result.getRow()); byte[] value = result.getValue(columnFamily, COL); Map<String, String> properties = GSON.fromJson(Bytes.toString(value), MAP_TYPE); TopicMetadata metadata = new TopicMetadata(topicId, properties); if (metadata.exists()) { topicIds.add(topicId); } } } return topicIds; } catch (IOException e) { throw exceptionHandler.handle(e); } }
Scan scan = scanBuilder.setCaching(metadataScanSize).build(); try (ResultScanner scanner = metadataTable.getScanner(scan)) { for (Result result : scanner) {
@Override public List<TopicId> listTopics(NamespaceId namespaceId) throws IOException { byte[] startRow = MessagingUtils.topicScanKey(namespaceId); ScanBuilder scanBuilder = tableUtil.buildScan() .setStartRow(startRow) .setStopRow(Bytes.stopKeyForPrefix(startRow)); return scanTopics(scanBuilder); }
private void setFilterIfNeeded(ScanBuilder scan, @Nullable Filter filter) { if (filter == null) { return; } if (filter instanceof FuzzyRowFilter) { FuzzyRowFilter fuzzyRowFilter = (FuzzyRowFilter) filter; List<Pair<byte[], byte[]>> fuzzyPairs = Lists.newArrayListWithExpectedSize(fuzzyRowFilter.getFuzzyKeysData().size()); for (ImmutablePair<byte[], byte[]> pair : fuzzyRowFilter.getFuzzyKeysData()) { fuzzyPairs.add(Pair.newPair(pair.getFirst(), pair.getSecond())); } scan.setFilter(new org.apache.hadoop.hbase.filter.FuzzyRowFilter(fuzzyPairs)); } else { throw new IllegalArgumentException("Unsupported filter: " + filter); } }
final int limit) throws IOException { Scan scan = tableUtil.buildScan() .setStartRow(startRow) .setStopRow(stopRow) .setCaching(scanCacheRows) .build();
Scan scan = scanBuilder.setCaching(metadataScanSize).build(); try (ResultScanner scanner = metadataTable.getScanner(scan)) { for (Result result : scanner) {