@Override public ScannerBuilder fetch(Collection<Column> columns) { sb = sb.fetch(columns); return this; }
@Override public ScannerBuilder fetch(Column... columns) { sb = sb.fetch(columns); return this; }
@Override public ScannerBuilder fetch(Collection<Column> columns) { Objects.requireNonNull(columns); this.columns = ImmutableSet.copyOf(columns); wrappedBuilder.fetch(this.columns); return this; }
@Override public ScannerBuilder fetch(Column... columns) { Objects.requireNonNull(columns); this.columns = ImmutableSet.copyOf(columns); wrappedBuilder.fetch(this.columns); return this; }
@Override public ScannerBuilder fetch(Collection<Column> columns) { Objects.requireNonNull(columns); this.columns = ImmutableSet.copyOf(columns); wrappedBuilder.fetch(this.columns); return this; }
@Override public ScannerBuilder fetch(Column... columns) { Objects.requireNonNull(columns); this.columns = ImmutableSet.copyOf(columns); wrappedBuilder.fetch(this.columns); return this; }
/** * Get the number of RDF Statements that have been loaded into the Fluo app * that have not been processed yet. * * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null) * @return The number of RDF Statements that have been loaded into the Fluo * app that have not been processed yet. */ public BigInteger countStatements(final FluoClient fluo) { checkNotNull(fluo); try(Snapshot sx = fluo.newSnapshot()) { // Limit the scan to the Triples binding set column. final Iterator<ColumnScanner> rows = sx.scanner().fetch(FluoQueryColumns.TRIPLES).byRow().build().iterator(); BigInteger count = BigInteger.valueOf(0L); while(rows.hasNext()) { rows.next(); count = count.add( BigInteger.ONE ); } return count; } } }
/** * Get the number of RDF Statements that have been loaded into the Fluo app * that have not been processed yet. * * @param fluo - The connection to Fluo that will be used to fetch the metadata. (not null) * @return The number of RDF Statements that have been loaded into the Fluo * app that have not been processed yet. */ public BigInteger countStatements(final FluoClient fluo) { checkNotNull(fluo); try(Snapshot sx = fluo.newSnapshot()) { // Limit the scan to the Triples binding set column. final Iterator<ColumnScanner> rows = sx.scanner().fetch(FluoQueryColumns.TRIPLES).byRow().build().iterator(); BigInteger count = BigInteger.valueOf(0L); while(rows.hasNext()) { rows.next(); count = count.add( BigInteger.ONE ); } return count; } } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } cellIterator = ti.scanner().over(span).fetch(columns).build().iterator(); } catch (Exception e) { throw new IOException(e); } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } rowIterator = ti.scanner().over(span).fetch(columns).byRow().build().iterator(); } catch (Exception e) { throw new IOException(e); } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } cellIterator = ti.scanner().over(span).fetch(columns).build().iterator(); } catch (Exception e) { throw new IOException(e); } }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { try { ByteArrayInputStream bais = new ByteArrayInputStream( context.getConfiguration().get(PROPS_CONF_KEY).getBytes(StandardCharsets.UTF_8)); env = new Environment(new FluoConfiguration(bais)); ti = new TransactionImpl(env, context.getConfiguration().getLong(TIMESTAMP_CONF_KEY, -1)); // TODO this uses non public Accumulo API! RangeInputSplit ris = (RangeInputSplit) split; Span span = SpanUtil.toSpan(ris.getRange()); HashSet<Column> columns = new HashSet<>(); for (String fam : context.getConfiguration().getStrings(FAMS_CONF_KEY, new String[0])) { columns.add(new Column(fam)); } rowIterator = ti.scanner().over(span).fetch(columns).byRow().build().iterator(); } catch (Exception e) { throw new IOException(e); } }
/** * Print all statements in the repo for demo and diagnostic purposes. * @param fluoClient * @throws Exception */ public static void printTriples(final FluoClient fluoClient) throws Exception { try (Snapshot snapshot = fluoClient.newSnapshot()) { final CellScanner cscanner = snapshot.scanner().fetch(new Column("triples", "SPO")).build(); for (final RowColumnValue rcv : cscanner) { System.out.println("Triple: "+rcv.getsRow()); } } }
/** * Print all statements in the repo for demo and diagnostic purposes. * @param fluoClient * @throws Exception */ public static void printTriples(final FluoClient fluoClient) throws Exception { try (Snapshot snapshot = fluoClient.newSnapshot()) { final CellScanner cscanner = snapshot.scanner().fetch(new Column("triples", "SPO")).build(); for (final RowColumnValue rcv : cscanner) { System.out.println("Triple: "+rcv.getsRow()); } } }
private BigInteger countBindingSets(final SnapshotBase sx, final String nodeId, final Column bindingSetColumn) { checkNotNull(sx); checkNotNull(nodeId); checkNotNull(bindingSetColumn); NodeType type = NodeType.fromNodeId(nodeId).get(); Bytes prefixBytes = Bytes.of(type.getNodeTypePrefix()); // Limit the scan to the binding set column and node id. final RowScanner rows = sx.scanner().over(Span.prefix(prefixBytes)).fetch(bindingSetColumn).byRow().build(); BigInteger count = BigInteger.valueOf(0L); for (ColumnScanner columns : rows) { String row = BindingSetRow.makeFromShardedRow(prefixBytes, columns.getRow()).getNodeId(); if (row.equals(nodeId)) { count = count.add(BigInteger.ONE); } } return count; }
private BigInteger countBindingSets(final SnapshotBase sx, final String nodeId, final Column bindingSetColumn) { checkNotNull(sx); checkNotNull(nodeId); checkNotNull(bindingSetColumn); NodeType type = NodeType.fromNodeId(nodeId).get(); Bytes prefixBytes = Bytes.of(type.getNodeTypePrefix()); // Limit the scan to the binding set column and node id. final RowScanner rows = sx.scanner().over(Span.prefix(prefixBytes)).fetch(bindingSetColumn).byRow().build(); BigInteger count = BigInteger.valueOf(0L); for (ColumnScanner columns : rows) { String row = BindingSetRow.makeFromShardedRow(prefixBytes, columns.getRow()).getNodeId(); if (row.equals(nodeId)) { count = count.add(BigInteger.ONE); } } return count; }
/** * Retrieve all of the information about Periodic Query results already registered * with Fluo. This is returned in the form of {@link CommandNotification}s that * can be registered with the {@link NotificationCoordinatorExecutor}. * @param sx - snapshot for reading results from Fluo * @return - collection of CommandNotifications that indicate Periodic Query information registered with system */ public Collection<CommandNotification> getNotifications(Snapshot sx) { Set<PeriodicQueryMetadata> periodicMetadata = new HashSet<>(); RowScanner scanner = sx.scanner().fetch(FluoQueryColumns.PERIODIC_QUERY_NODE_ID) .over(Span.prefix(IncrementalUpdateConstants.PERIODIC_QUERY_PREFIX)).byRow().build(); Iterator<ColumnScanner> colScannerIter = scanner.iterator(); while (colScannerIter.hasNext()) { ColumnScanner colScanner = colScannerIter.next(); Iterator<ColumnValue> values = colScanner.iterator(); while (values.hasNext()) { PeriodicQueryMetadata metadata = dao.readPeriodicQueryMetadata(sx, values.next().getsValue()); periodicMetadata.add(metadata); } } return getCommandNotifications(sx, periodicMetadata); }
public static void scanFluo(ScanOpts options, FluoConfiguration sConfig, PrintStream out) throws IOException { try (FluoClient client = FluoFactory.newClient(sConfig)) { try (Snapshot s = client.newSnapshot()) { Span span = getSpan(options); Collection<Column> columns = getColumns(options); CellScanner cellScanner = s.scanner().over(span).fetch(columns).build(); scan(options, out, cellScanner); } } }
public Iterator<ExportEntry> getExportIterator(Bytes continueRow) { Span span; if (continueRow != null) { Span tmpSpan = Span.prefix(bucketRow); Span nextSpan = new Span(new RowColumn(continueRow, EXPORT_COL), true, tmpSpan.getEnd(), tmpSpan.isEndInclusive()); span = nextSpan; } else { span = Span.prefix(bucketRow); } CellScanner scanner = ttx.scanner().over(span).fetch(EXPORT_COL).build(); return new ExportIterator(scanner); }
private int countResults(FluoClient fluoClient, String nodeId, Column bsColumn) { try (Transaction tx = fluoClient.newTransaction()) { int count = 0; Optional<NodeType> type = NodeType.fromNodeId(nodeId); Bytes prefixBytes = Bytes.of(type.get().getNodeTypePrefix()); RowScanner scanner = tx.scanner().over(Span.prefix(prefixBytes)).fetch(bsColumn).byRow().build(); Iterator<ColumnScanner> colScanners = scanner.iterator(); while (colScanners.hasNext()) { ColumnScanner colScanner = colScanners.next(); BindingSetRow bsRow = BindingSetRow.makeFromShardedRow(prefixBytes, colScanner.getRow()); if (bsRow.getNodeId().equals(nodeId)) { Iterator<ColumnValue> vals = colScanner.iterator(); while (vals.hasNext()) { vals.next(); count++; } } } tx.commit(); return count; } }