/** * SingleRowIter must be passed a PeekingIterator so that it can peek at the next entry to see * if it belongs in the current row or not. */ public SingleRowIter(PeekingIterator<Entry<Key,Value>> source) { this.source = source; if (source.hasNext()) currentRow = source.peek().getKey().getRow(); }
private Entry<Key,Value> _next() { if (currentRow == null) throw new NoSuchElementException(); count++; Entry<Key,Value> kv = source.next(); if (!source.hasNext() || !source.peek().getKey().getRow().equals(currentRow)) { currentRow = null; } return kv; }
/** * Returns true if there is at least one more row to get. * * If the last row hasn't been fully read, this method will read through the end of the last row * so it can determine if the underlying iterator has a next row. The last row is disabled from * future use. */ @Override public boolean hasNext() { if (lastIter != null) { lastIter.consume(); count += lastIter.getCount(); lastIter = null; } return iter.hasNext(); }
while (lmi.hasNext()) { loadMapEntry = lmi.next(); List<TabletMetadata> tablets = findOverlappingTablets(loadMapEntry.getKey(), tabletIter);
@Override public boolean hasTop() { if (!inner.hasNext()) return false; Key k = inner.peek().getKey(); return seekRng.contains(k); // do not return entries past the seek() range }
private Entry<Key,Value> _next() { if (currentRow == null) throw new NoSuchElementException(); count++; Entry<Key,Value> kv = source.next(); if (!source.hasNext() || !source.peek().getKey().getRow().equals(currentRow)) { currentRow = null; } return kv; }
/** * SingleRowIter must be passed a PeekingIterator so that it can peek at the next entry to see * if it belongs in the current row or not. */ public SingleRowIter(PeekingIterator<Entry<Key,Value>> source) { this.source = source; if (source.hasNext()) currentRow = source.peek().getKey().getRow(); }
/** * Pulls the Key-Value pairs representing the next row from the supplied <code>PeekingIterator</code> and returns * them as a <code>SortedMap</code> * * @param row * @return */ public static SortedMap<Key, Value> buildRowMap(PeekingIterator<Entry<Key, Value>> row) { TreeMap<Key, Value> aggregatedRow = new TreeMap<>(); Text rowid = null; while (row.hasNext() && (rowid == null || rowid.equals(row.peek().getKey().getRow()))) { Entry<Key, Value> entry = row.next(); if (rowid == null) { rowid = entry.getKey().getRow(); } aggregatedRow.put(entry.getKey(), entry.getValue()); } return aggregatedRow; }
@Override protected void safeMap(Text row, PeekingIterator<Map.Entry<Key, Value>> value, Context context) throws IOException, InterruptedException { context.setStatus(row.toString()); String propertyVisibility = null; while (value.hasNext()) { Map.Entry<Key, Value> column = value.next(); if (column.getKey().getColumnFamily().toString().equals("PROP")) { propertyVisibility = AccumuloGraph.accumuloVisibilityToVisibility(column.getKey().getColumnVisibilityParsed()).getVisibilityString(); } else if (column.getKey().getColumnFamily().toString().equals("PROPMETA")) { updateMetadata(column.getKey(), column.getValue(), propertyVisibility, context); } } }
/** * Returns true if there is at least one more row to get. * * If the last row hasn't been fully read, this method will read through the end of the last row * so it can determine if the underlying iterator has a next row. The last row is disabled from * future use. */ @Override public boolean hasNext() { if (lastIter != null) { lastIter.consume(); count += lastIter.getCount(); lastIter = null; } return iter.hasNext(); }
public int read() throws IOException { if (source == null) return -1; log.debug("pos: " + pos + " count: " + count); if (pos >= count) { if (fill() <= 0) { log.debug("done reading input stream at key: " + (currentKey == null ? "null" : currentKey.toString())); if (source != null && source.hasNext()) log.debug("next key: " + source.peek().getKey()); clear(); return -1; } } return buf[pos++] & 0xff; }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { currentK.clear(); if (peekingScannerIterator.hasNext()) { ++numKeysRead; Entry<Key,Value> entry = peekingScannerIterator.peek(); while (!entry.getKey().getColumnFamily().equals(FileDataIngest.CHUNK_CF)) { currentK.add(entry); peekingScannerIterator.next(); if (!peekingScannerIterator.hasNext()) return true; entry = peekingScannerIterator.peek(); } currentKey = entry.getKey(); ((ChunkInputStream) currentV).setSource(peekingScannerIterator); if (log.isTraceEnabled()) log.trace("Processing key/value pair: " + DefaultFormatter.formatEntry(entry, true)); return true; } return false; } };
protected void peekInSource() { while (kvIter.hasNext()) { Entry<Key,Value> currentKeyValue = kvIter.peek();
if (fill() <= 0) { log.debug("done reading input stream at key: " + (currentKey == null ? "null" : currentKey.toString())); if (source != null && source.hasNext()) log.debug("next key: " + source.peek().getKey()); clear();
@Override public boolean next(Text rowKey, AccumuloHiveRow row) throws IOException { Text key = recordReader.createKey(); PeekingIterator<Map.Entry<Key,Value>> iter = recordReader.createValue(); if (recordReader.next(key, iter)) { row.clear(); row.setRowId(key.toString()); List<Key> keys = Lists.newArrayList(); List<Value> values = Lists.newArrayList(); while (iter.hasNext()) { // collect key/values for this row. Map.Entry<Key,Value> kv = iter.next(); keys.add(kv.getKey()); values.add(kv.getValue()); } if (iteratorCount == 0) { // no encoded values, we can push directly to row. pushToValue(keys, values, row); } else { for (int i = 0; i < iteratorCount; i++) { // each iterator creates a level of encoding. SortedMap<Key,Value> decoded = PrimitiveComparisonFilter.decodeRow(keys.get(0), values.get(0)); keys = Lists.newArrayList(decoded.keySet()); values = Lists.newArrayList(decoded.values()); } pushToValue(keys, values, row); // after decoding we can push to value. } return true; } else { return false; } }
@Override public boolean next(Text rowKey, AccumuloHiveRow row) throws IOException { Text key = recordReader.createKey(); PeekingIterator<Map.Entry<Key,Value>> iter = recordReader.createValue(); if (recordReader.next(key, iter)) { row.clear(); row.setRowId(key.toString()); List<Key> keys = Lists.newArrayList(); List<Value> values = Lists.newArrayList(); while (iter.hasNext()) { // collect key/values for this row. Map.Entry<Key,Value> kv = iter.next(); keys.add(kv.getKey()); values.add(kv.getValue()); } if (iteratorCount == 0) { // no encoded values, we can push directly to row. pushToValue(keys, values, row); } else { for (int i = 0; i < iteratorCount; i++) { // each iterator creates a level of encoding. SortedMap<Key,Value> decoded = PrimitiveComparisonFilter.decodeRow(keys.get(0), values.get(0)); keys = Lists.newArrayList(decoded.keySet()); values = Lists.newArrayList(decoded.values()); } pushToValue(keys, values, row); // after decoding we can push to value. } return true; } else { return false; } }
currentVis = new TreeSet<Text>(); count = pos = 0; if (!source.hasNext()) { log.debug("source has no next"); gotEndMarker = true; while (!currentKey.getColumnFamily().equals(FileDataIngest.CHUNK_CF)) { log.debug("skipping key: " + currentKey.toString()); if (!source.hasNext()) return; entry = source.next();
/** * Using the provided PeekingIterator, construct an AccumuloItem from the Key-Value pairs it returns. If skipBadRow * is true, this method will skip rows not containing AccumuloItems until there are now more Key-Value pairs or an * Accumulo Item is found, otherwise the method will return null; * * @param iter * @param skipBadRow * @return */ public static AccumuloItem buildFromIterator(PeekingIterator<Entry<Key, Value>> iter, boolean skipBadRow) { while (iter.hasNext()) { Entry<Key, Value> pair = iter.peek(); AccumuloItem i = constructItemFromFirstPair(pair.getKey(), pair.getValue()); if (i != null) { iter.next(); i.populateFromRow(iter); return i; } else if (skipBadRow) { buildRowMap(iter); } else { break; } } return null; }
public ChunkInputStream getData(String hash) throws IOException { scanner.setRange(new Range(hash)); scanner.setBatchSize(1); lastRefs.clear(); PeekingIterator<Entry<Key,Value>> pi = new PeekingIterator<Entry<Key,Value>>(scanner.iterator()); if (pi.hasNext()) { while (!pi.peek().getKey().getColumnFamily().equals(FileDataIngest.CHUNK_CF)) { lastRefs.add(pi.peek()); pi.next(); } } cis.clear(); cis.setSource(pi); return cis; }