@Override public RowScanner build() { return new RtxRowScanner(rsb.build()); }
@Override public RowScannerBuilder byRow() { String scanId = Integer.toHexString(Math.abs(Objects.hash(span, columns, txid))); return () -> { log.trace("txid: {} scanId: {} scanner().over({}).fetch({}).byRow().build()", txid, scanId, Hex.encNonAscii(span), Hex.encNonAscii(columns)); if (TracingCellScanner.log.isTraceEnabled()) { return new TracingRowScanner(wrappedBuilder.byRow().build(), txid, scanId); } else { return wrappedBuilder.byRow().build(); } }; } }
@Override public RowScanner build() { log.trace("txid: {} scanId: {} scanner().over({}).fetch({}).byRow().build()", txid, scanId, Hex.encNonAscii(span), Hex.encNonAscii(columns)); if (TracingCellScanner.log.isTraceEnabled()) { return new TracingRowScanner(wrappedBuilder.byRow().build(), txid, scanId); } else { return wrappedBuilder.byRow().build(); } } };
/** * Get the number of RDF Statements that have been loaded into the Fluo app * that have not been processed yet. * * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null) * @return The number of RDF Statements that have been loaded into the Fluo * app that have not been processed yet. */ public BigInteger countStatements(final FluoClient fluo) { checkNotNull(fluo); try(Snapshot sx = fluo.newSnapshot()) { // Limit the scan to the Triples binding set column. final Iterator<ColumnScanner> rows = sx.scanner().fetch(FluoQueryColumns.TRIPLES).byRow().build().iterator(); BigInteger count = BigInteger.valueOf(0L); while(rows.hasNext()) { rows.next(); count = count.add( BigInteger.ONE ); } return count; } } }
/** * Get the number of RDF Statements that have been loaded into the Fluo app * that have not been processed yet. * * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null) * @return The number of RDF Statements that have been loaded into the Fluo * app that have not been processed yet. */ public BigInteger countStatements(final FluoClient fluo) { checkNotNull(fluo); try(Snapshot sx = fluo.newSnapshot()) { // Limit the scan to the Triples binding set column. final Iterator<ColumnScanner> rows = sx.scanner().fetch(FluoQueryColumns.TRIPLES).byRow().build().iterator(); BigInteger count = BigInteger.valueOf(0L); while(rows.hasNext()) { rows.next(); count = count.add( BigInteger.ONE ); } return count; } } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } rowIterator = ti.scanner().over(span).fetch(columns).byRow().build().iterator(); } catch (Exception e) { throw new IOException(e); } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } rowIterator = ti.scanner().over(span).fetch(columns).byRow().build().iterator(); } catch (Exception e) { throw new IOException(e); } }
private BigInteger countBindingSets(final SnapshotBase sx, final String nodeId, final Column bindingSetColumn) { checkNotNull(sx); checkNotNull(nodeId); checkNotNull(bindingSetColumn); NodeType type = NodeType.fromNodeId(nodeId).get(); Bytes prefixBytes = Bytes.of(type.getNodeTypePrefix()); // Limit the scan to the binding set column and node id. final RowScanner rows = sx.scanner().over(Span.prefix(prefixBytes)).fetch(bindingSetColumn).byRow().build(); BigInteger count = BigInteger.valueOf(0L); for (ColumnScanner columns : rows) { String row = BindingSetRow.makeFromShardedRow(prefixBytes, columns.getRow()).getNodeId(); if (row.equals(nodeId)) { count = count.add(BigInteger.ONE); } } return count; }
private BigInteger countBindingSets(final SnapshotBase sx, final String nodeId, final Column bindingSetColumn) { checkNotNull(sx); checkNotNull(nodeId); checkNotNull(bindingSetColumn); NodeType type = NodeType.fromNodeId(nodeId).get(); Bytes prefixBytes = Bytes.of(type.getNodeTypePrefix()); // Limit the scan to the binding set column and node id. final RowScanner rows = sx.scanner().over(Span.prefix(prefixBytes)).fetch(bindingSetColumn).byRow().build(); BigInteger count = BigInteger.valueOf(0L); for (ColumnScanner columns : rows) { String row = BindingSetRow.makeFromShardedRow(prefixBytes, columns.getRow()).getNodeId(); if (row.equals(nodeId)) { count = count.add(BigInteger.ONE); } } return count; }
/** * Retrieve all of the information about Periodic Query results already registered * with Fluo. This is returned in the form of {@link CommandNotification}s that * can be registered with the {@link NotificationCoordinatorExecutor}. * @param sx - snapshot for reading results from Fluo * @return - collection of CommandNotifications that indicate Periodic Query information registered with system */ public Collection<CommandNotification> getNotifications(Snapshot sx) { Set<PeriodicQueryMetadata> periodicMetadata = new HashSet<>(); RowScanner scanner = sx.scanner().fetch(FluoQueryColumns.PERIODIC_QUERY_NODE_ID) .over(Span.prefix(IncrementalUpdateConstants.PERIODIC_QUERY_PREFIX)).byRow().build(); Iterator<ColumnScanner> colScannerIter = scanner.iterator(); while (colScannerIter.hasNext()) { ColumnScanner colScanner = colScannerIter.next(); Iterator<ColumnValue> values = colScanner.iterator(); while (values.hasNext()) { PeriodicQueryMetadata metadata = dao.readPeriodicQueryMetadata(sx, values.next().getsValue()); periodicMetadata.add(metadata); } } return getCommandNotifications(sx, periodicMetadata); }
/** * Retrieve all of the information about Periodic Query results already registered * with Fluo. This is returned in the form of {@link CommandNotification}s that * can be registered with the {@link NotificationCoordinatorExecutor}. * @param sx - snapshot for reading results from Fluo * @return - collection of CommandNotifications that indicate Periodic Query information registered with system */ public Collection<CommandNotification> getNotifications(Snapshot sx) { Set<PeriodicQueryMetadata> periodicMetadata = new HashSet<>(); RowScanner scanner = sx.scanner().fetch(FluoQueryColumns.PERIODIC_QUERY_NODE_ID) .over(Span.prefix(IncrementalUpdateConstants.PERIODIC_QUERY_PREFIX)).byRow().build(); Iterator<ColumnScanner> colScannerIter = scanner.iterator(); while (colScannerIter.hasNext()) { ColumnScanner colScanner = colScannerIter.next(); Iterator<ColumnValue> values = colScanner.iterator(); while (values.hasNext()) { PeriodicQueryMetadata metadata = dao.readPeriodicQueryMetadata(sx, values.next().getsValue()); periodicMetadata.add(metadata); } } return getCommandNotifications(sx, periodicMetadata); }
private Optional<RowColumn> deleteBatch(TransactionBase tx, Optional<String> nodeId, Span span, Column column, int batchSize) { RowScanner rs = tx.scanner().over(span).fetch(column).byRow().build(); try { Iterator<ColumnScanner> colScannerIter = rs.iterator();
final RowScanner rs = tx.scanner().over(siblingSpan).fetch(siblingColumn).byRow().build(); final Iterator<ColumnScanner> colScannerIter = rs.iterator();
private Optional<RowColumn> deleteBatch(TransactionBase tx, Optional<String> nodeId, Span span, Column column, int batchSize) { RowScanner rs = tx.scanner().over(span).fetch(column).byRow().build(); try { Iterator<ColumnScanner> colScannerIter = rs.iterator();
final RowScanner rs = tx.scanner().over(siblingSpan).fetch(siblingColumn).byRow().build(); final Iterator<ColumnScanner> colScannerIter = rs.iterator();
final int batchSize = batch.getBatchSize(); final RowScanner rs = tx.scanner().over(span).fetch(column).byRow().build(); final Iterator<ColumnScanner> colScannerIter = rs.iterator();
@Test public void testMultipleIteratorsFromSameRowScanner() { Set<RowColumnValue> expected = genData(); try (Snapshot snap = client.newSnapshot()) { RowScanner rowScanner = snap.scanner().byRow().build(); Iterator<ColumnScanner> iter1 = rowScanner.iterator(); Iterator<ColumnScanner> iter2 = rowScanner.iterator(); HashSet<RowColumnValue> actual1 = new HashSet<>(); HashSet<RowColumnValue> actual2 = new HashSet<>(); while (iter1.hasNext()) { ColumnScanner cs1 = iter1.next(); Assert.assertTrue(iter2.hasNext()); ColumnScanner cs2 = iter2.next(); for (ColumnValue cv : cs1) { actual1.add(new RowColumnValue(cs1.getRow(), cv.getColumn(), cv.getValue())); } for (ColumnValue cv : cs2) { actual2.add(new RowColumnValue(cs2.getRow(), cv.getColumn(), cv.getValue())); } } Assert.assertFalse(iter2.hasNext()); Assert.assertEquals(expected, actual1); Assert.assertEquals(expected, actual2); } }
final int batchSize = batch.getBatchSize(); final RowScanner rs = tx.scanner().over(span).fetch(column).byRow().build(); final Iterator<ColumnScanner> colScannerIter = rs.iterator();
private List<Bytes> getFluoTableEntries(final FluoClient fluoClient) { try (Snapshot snapshot = fluoClient.newSnapshot()) { final List<Bytes> rows = new ArrayList<>(); final RowScanner rscanner = snapshot.scanner().over(Span.prefix("")).byRow().build(); for(final ColumnScanner cscanner: rscanner) { rows.add(cscanner.getRow()); } return rows; } } }
private List<Bytes> getFluoTableEntries(final FluoClient fluoClient) { try (Snapshot snapshot = fluoClient.newSnapshot()) { final List<Bytes> rows = new ArrayList<>(); final RowScanner rscanner = snapshot.scanner().over(Span.prefix("")).byRow().build(); for (final ColumnScanner cscanner : rscanner) { rows.add(cscanner.getRow()); } return rows; } }