public static <T> List<LocalProperty<T>> stripLeadingConstants(List<? extends LocalProperty<T>> properties) { PeekingIterator<? extends LocalProperty<T>> iterator = peekingIterator(properties.iterator()); while (iterator.hasNext() && iterator.peek() instanceof ConstantProperty) { iterator.next(); } return ImmutableList.copyOf(iterator); }
@Override protected Entry<Cut<C>, Range<C>> computeNext() { if (!backingItr.hasNext()) { return endOfData(); } Range<C> range = backingItr.next(); return upperBoundWindow.lowerBound.isLessThan(range.upperBound) ? Maps.immutableEntry(range.upperBound, range) : endOfData(); } };
@Override protected void _run(String[] arguments) throws Exception { PeekingIterator<String> a=Iterators.peekingIterator(Iterators.forArray(arguments)); if (!a.hasNext()) usage(); while(a.hasNext() && a.peek().startsWith("-")) { String flagName=a.next().substring(1).intern(); if (!a.hasNext()) usage(); String flagValue=a.next(); if (flagName=="clusterId") { cluster=applicationContext.getBean(flagValue,Cluster.class); if (!a.hasNext()) usage(); String flowId=a.next(); Flow f=enrichedContext.getBean(flowId,Flow.class); List<String> flowArgs=Lists.newArrayList(); Iterators.addAll(flowArgs, a); cluster.runFlow(jar, f, flowArgs);
@Override public T next() { if (!hasNext()) { throw new NoSuchElementException(); } final PeekingIterator<T> littleIt; if (!iterOfIterators.hasNext()) { littleIt = firstElementComparedPQueue.remove(); } else if (firstElementComparedPQueue.isEmpty()) { littleIt = iterOfIterators.next(); } else { T pQueueValue = firstElementComparedPQueue.peek().peek(); T iterItersValue = iterOfIterators.peek().peek(); if (comparator.compare(pQueueValue, iterItersValue) <= 0) { littleIt = firstElementComparedPQueue.remove(); } else { littleIt = iterOfIterators.next(); } } T retVal = littleIt.next(); if (littleIt.hasNext()) { firstElementComparedPQueue.add(littleIt); } return retVal; }
private WriteBatch getBatchOfWrites(SweepableCellsRow row, PeekingIterator<Map.Entry<Cell, Value>> resultIterator, long sweepTs) { WriteBatch writeBatch = new WriteBatch(); while (resultIterator.hasNext() && writeBatch.writesByStartTs.size() < SweepQueueUtils.SWEEP_BATCH_SIZE) { Map.Entry<Cell, Value> entry = resultIterator.next(); SweepableCellsTable.SweepableCellsColumn col = computeColumn(entry); long startTs = getTimestamp(row, col); if (knownToBeCommittedAfterSweepTs(startTs, sweepTs)) { writeBatch.add(ImmutableList.of(getWriteInfo(startTs, entry.getValue()))); return writeBatch; } writeBatch.merge(getWrites(row, col, entry.getValue())); } // there may be entries remaining with the same start timestamp as the last processed one. If that is the case // we want to include these ones as well. This is OK since there are at most MAX_CELLS_GENERIC - 1 of them. while (resultIterator.hasNext()) { Map.Entry<Cell, Value> entry = resultIterator.peek(); SweepableCellsTable.SweepableCellsColumn col = computeColumn(entry); long timestamp = getTimestamp(row, col); if (writeBatch.writesByStartTs.containsKey(timestamp)) { writeBatch.merge(getWrites(row, col, entry.getValue())); resultIterator.next(); } else { break; } } return writeBatch; }
@Override public InType next() { if (!hasNext()) { throw new NoSuchElementException(); } InType res = null; while (hasNext()) { if (res == null) { res = fn.apply(it.next(), null); continue; } if (comparator.compare(res, it.peek()) == 0) { res = fn.apply(res, it.next()); } else { break; } } return res; }
public void insertMaps(Iterator<Map<String,Object>> values, int batchSize) { if (! values.hasNext() ) return; PeekingIterator<Map<String,Object>> vs = peekingIterator(values); Map<String,Object> first = vs.peek(); final String sql = writerStrategy.insertStatement(new StringBuilder(), definition, first).toString(); ImmutableList<String> keys = ImmutableList.copyOf(vs.peek().keySet()); Iterator<List<Map<String,Object>>> it = partition(vs, batchSize); while (it.hasNext()) { List<Map<String,Object>> batch = it.next(); final List<Object[]> batchValues = Lists.newArrayListWithExpectedSize(batch.size()); for (Map<String,Object> b : batch) { ImmutableList<String> actualKeys = ImmutableList.copyOf(b.keySet()); check.state(actualKeys.equals(keys), "Keys don't match up to {} for {}", keys, actualKeys); batchValues.add(writerStrategy.fillValues(definition, b).toArray()); } /* * TODO this will keep making a prepared statementS. * Hopefully the JDBC driver has some caching for this. */ sqlExecutor.batchUpdate(sql, batchValues); } }
public void testGet_collections() { assertEquals(ImmutableSet.of().iterator(), ArbitraryInstances.get(Iterator.class)); assertFalse(ArbitraryInstances.get(PeekingIterator.class).hasNext()); assertFalse(ArbitraryInstances.get(ListIterator.class).hasNext()); assertEquals(ImmutableSet.of(), ArbitraryInstances.get(Iterable.class)); assertEquals(ImmutableSortedSet.of(), ArbitraryInstances.get(SortedSet.class)); assertEquals(ImmutableSortedSet.of(), ArbitraryInstances.get(ImmutableSortedSet.class)); assertEquals(ImmutableList.of(), ArbitraryInstances.get(Collection.class)); assertEquals(ImmutableList.of(), ArbitraryInstances.get(ImmutableCollection.class)); assertEquals(ImmutableList.of(), ArbitraryInstances.get(List.class)); assertEquals(ImmutableList.of(), ArbitraryInstances.get(ImmutableList.class)); assertEquals(ImmutableMap.of(), ArbitraryInstances.get(Map.class)); assertTrue(ArbitraryInstances.get(MapDifference.class).areEqual()); assertTrue(ArbitraryInstances.get(SortedMapDifference.class).areEqual()); assertEquals(Range.all(), ArbitraryInstances.get(Range.class)); assertTrue(ArbitraryInstances.get(NavigableSet.class).isEmpty()); assertTrue(ArbitraryInstances.get(NavigableMap.class).isEmpty());
@Test public void testNext() { boolean expected = true; EasyMock.expect(peekIterator.hasNext()).andReturn(expected).times(4); String defaultString = "S1"; String resString = "S2"; EasyMock.expect(peekIterator.next()).andReturn(defaultString); EasyMock.expect(binaryFn.apply(EasyMock.eq(defaultString), EasyMock.isNull())) .andReturn(resString); EasyMock.expect(peekIterator.next()).andReturn(defaultString); EasyMock.expect(comparator.compare(EasyMock.eq(resString), EasyMock.eq(defaultString))) .andReturn(0); EasyMock.expect(peekIterator.next()).andReturn(defaultString); EasyMock.expect(binaryFn.apply(EasyMock.eq(resString), EasyMock.eq(defaultString))) .andReturn(resString); EasyMock.expect(comparator.compare(EasyMock.eq(resString), EasyMock.eq(defaultString))) .andReturn(1); EasyMock.replay(peekIterator); EasyMock.replay(binaryFn); EasyMock.replay(comparator); String actual = testingIterator.next(); Assert.assertEquals(resString, actual); EasyMock.verify(peekIterator); EasyMock.verify(comparator); EasyMock.verify(binaryFn); }
new ImmutableList.Builder<>(ranges.size()); Collections.sort(ranges, Range.<C>rangeLexOrdering()); PeekingIterator<Range<C>> peekingItr = Iterators.peekingIterator(ranges.iterator()); while (peekingItr.hasNext()) { Range<C> range = peekingItr.next(); while (peekingItr.hasNext()) { Range<C> nextRange = peekingItr.peek(); if (range.isConnected(nextRange)) { checkArgument( range.intersection(nextRange).isEmpty(), "Overlapping ranges not permitted but found %s overlapping %s", range, nextRange); range = range.span(peekingItr.next()); } else { break; mergedRangesBuilder.add(range); ImmutableList<Range<C>> mergedRanges = mergedRangesBuilder.build(); if (mergedRanges.isEmpty()) { return of(); } else if (mergedRanges.size() == 1 && Iterables.getOnlyElement(mergedRanges).equals(Range.all())) { return all();
if (complementLowerBoundWindow.hasLowerBound()) { positiveRanges = positiveRangesByUpperBound .tailMap( complementLowerBoundWindow.lowerEndpoint(), complementLowerBoundWindow.lowerBoundType() == BoundType.CLOSED) .values(); } else { Iterators.peekingIterator(positiveRanges.iterator()); final Cut<C> firstComplementRangeLowerBound; if (complementLowerBoundWindow.contains(Cut.<C>belowAll()) && (!positiveItr.hasNext() || positiveItr.peek().lowerBound != Cut.<C>belowAll())) { firstComplementRangeLowerBound = Cut.belowAll(); } else if (positiveItr.hasNext()) { firstComplementRangeLowerBound = positiveItr.next().upperBound; } else { return Iterators.emptyIterator();
private Expression parseCase(PeekingIterator<Token> tokens) throws ParserException { Token token = tokens.peek(); assertNext(tokens, Tokens.CASE); ImmutableList.Builder<Expression> conditions = ImmutableList.builder(); ImmutableList.Builder<Expression> values = ImmutableList.builder(); while (tokens.peek().is(Tokens.WHEN)) { tokens.next(); conditions.add(parseExpression(tokens)); assertNext(tokens, Tokens.THEN); values.add(parseExpression(tokens)); } assertNext(tokens, Tokens.ELSE); Expression elseValue = parseExpression(tokens); assertNext(tokens, Tokens.END); return new CaseExpression(conditions.build(), values.build(), elseValue, token.getLoc()); }
public ImmutableList<Statement> parseStatements(PeekingIterator<Token> tokens) throws ParserException { ImmutableList.Builder<Statement> roots = ImmutableList.builder(); while (tokens.hasNext()) { Statement root = parseStatement(tokens); roots.add(root); assertNext(tokens, String.valueOf(Tokens.COMMA_COLON)); } return roots.build(); }
complementLowerBoundWindow.hasUpperBound() ? complementLowerBoundWindow.upperEndpoint() : Cut.<C>aboveAll(); boolean inclusive = complementLowerBoundWindow.hasUpperBound() && complementLowerBoundWindow.upperBoundType() == BoundType.CLOSED; final PeekingIterator<Range<C>> positiveItr = Iterators.peekingIterator( positiveRangesByUpperBound .headMap(startingPoint, inclusive) .iterator()); Cut<C> cut; if (positiveItr.hasNext()) { cut = (positiveItr.peek().upperBound == Cut.<C>aboveAll()) ? positiveItr.next().lowerBound : positiveRangesByLowerBound.higherKey(positiveItr.peek().upperBound); } else if (!complementLowerBoundWindow.contains(Cut.<C>belowAll()) || positiveRangesByLowerBound.containsKey(Cut.belowAll())) { return Iterators.emptyIterator(); } else { cut = positiveRangesByLowerBound.higherKey(Cut.<C>belowAll());
@Override Iterator<Entry<Cut<C>, Range<C>>> descendingEntryIterator() { Collection<Range<C>> candidates; if (upperBoundWindow.hasUpperBound()) { candidates = rangesByLowerBound .headMap(upperBoundWindow.upperEndpoint(), false) .descendingMap() .values(); } else { candidates = rangesByLowerBound.descendingMap().values(); } final PeekingIterator<Range<C>> backingItr = Iterators.peekingIterator(candidates.iterator()); if (backingItr.hasNext() && upperBoundWindow.upperBound.isLessThan(backingItr.peek().upperBound)) { backingItr.next(); } return new AbstractIterator<Entry<Cut<C>, Range<C>>>() { @Override protected Entry<Cut<C>, Range<C>> computeNext() { if (!backingItr.hasNext()) { return endOfData(); } Range<C> range = backingItr.next(); return upperBoundWindow.lowerBound.isLessThan(range.upperBound) ? Maps.immutableEntry(range.upperBound, range) : endOfData(); } }; }
List<Revision> pmax = newArrayListWithCapacity(capacity); PeekingIterator<Revision> it = peekingIterator(vector.iterator()); for (Revision r : revisions) { while (it.hasNext() && it.peek().getClusterId() < r.getClusterId()) { pmax.add(it.next()); it.next(); } else { Iterators.addAll(pmax, it); return new RevisionVector(toArray(pmax, Revision.class), false, false);
@Override public boolean process(Node node) { sum += node.weightedCount; while (iterator.hasNext() && sum > iterator.peek() * weightedCount) { iterator.next(); // we know the max value ever seen, so cap the percentile to provide better error // bounds in this case long value = Math.min(node.getUpperBound(), max); builder.add(value); } return iterator.hasNext(); } });
public void merge(List<StackTraceElement> stackTraceElements, Thread.State threadState) { PeekingIterator<StackTraceElement> i = Iterators.peekingIterator(Lists.reverse(stackTraceElements).iterator()); ProfileNode lastMatchedNode = null; List<ProfileNode> mergeIntoNodes = rootNodes; while (i.hasNext()) { StackTraceElement stackTraceElement = i.next(); String fullClassName = stackTraceElement.getClassName(); int index = fullClassName.lastIndexOf('.'); int lineNumber = stackTraceElement.getLineNumber(); Profile.LeafThreadState leafThreadState = i.hasNext() ? Profile.LeafThreadState.NONE : getThreadState(threadState);