Refine search
private String columnDefinitions(List<DataTypeTest.Input<?>> inputs) { List<String> columnTypeDefinitions = inputs.stream() .map(DataTypeTest.Input::getInsertType) .collect(toList()); Stream<String> columnDefinitions = range(0, columnTypeDefinitions.size()) .mapToObj(i -> format("col_%d %s", i, columnTypeDefinitions.get(i))); return Joiner.on(",\n").join(columnDefinitions.iterator()); } }
private static void dropColumns(StringBuilder sql, String columnPrefix, String... columnNames) { Iterator<String> columnNamesIterator = Arrays.stream(columnNames).iterator(); while (columnNamesIterator.hasNext()) { sql.append(columnPrefix); sql.append(columnNamesIterator.next()); if (columnNamesIterator.hasNext()) { sql.append(", "); } } }
private void files(Path dir, Throwing.Consumer<Path> consumer) throws Exception { try (Stream<Path> stream = Files.walk(dir)) { Iterator<Path> files = stream.filter(Files::isRegularFile) .filter(it -> it.toString().endsWith(".svg")) .sorted() .iterator(); while (files.hasNext()) { consumer.accept(files.next()); } } }
private Iterator<InternalHiveSplit> createInternalHiveSplitIterator(Path path, FileSystem fileSystem, InternalHiveSplitFactory splitFactory, boolean splittable) { return Streams.stream(new HiveFileIterator(path, fileSystem, directoryLister, namenodeStats, recursiveDirWalkerEnabled ? RECURSE : IGNORED)) .map(status -> splitFactory.createInternalHiveSplit(status, splittable)) .filter(Optional::isPresent) .map(Optional::get) .iterator(); }
private static List<Integer> readSlots(List<String> slotStrings) { List<Integer> slots = new ArrayList<>(); for (String slotString : slotStrings) { if (slotString.startsWith(TOKEN_SLOT_IN_TRANSITION)) { // not interesting continue; } if (slotString.contains("-")) { // slot range Iterator<String> it = DASH_PATTERN.splitAsStream(slotString).iterator(); int from = Integer.parseInt(it.next()); int to = Integer.parseInt(it.next()); for (int slot = from; slot <= to; slot++) { slots.add(slot); } continue; } slots.add(Integer.parseInt(slotString)); } return Collections.unmodifiableList(slots); }
public void send(Map<Integer, NodeInfo> taskToNode, Map<NodeInfo, IConnection> connections) { HashMap<NodeInfo, Stream<TaskMessage>> bundleMapByDestination = groupBundleByDestination(taskToNode); for (Map.Entry<NodeInfo, Stream<TaskMessage>> entry : bundleMapByDestination.entrySet()) { NodeInfo node = entry.getKey(); IConnection conn = connections.get(node); if (conn != null) { Iterator<TaskMessage> iter = entry.getValue().iterator(); if (iter.hasNext()) { conn.send(iter); } } else { LOG.warn("Connection not available for hostPort {}", node); } } }
count += compile(pipeline, files.stream().filter(styles).iterator(), MediaType.css, css, shouldProcess, count, total); List<File> result = new ArrayList<>(css.getResult()); count += compile(pipeline, files.stream().filter(scripts).iterator(), MediaType.js, js, shouldProcess, count, total); result.addAll(js.getResult());
/** * Joins each elem in the {@link Stream} with the given glue. * For example, given a list of {@code Integers}, you can create * a comma-separated list by calling {@code join(numbers, ", ")}. * * @see StringUtils#join(Iterable, String) */ public static <X> String join(Stream<X> l, String glue) { StringBuilder sb = new StringBuilder(); boolean first = true; Iterator<X> iter = l.iterator(); while (iter.hasNext()) { if ( ! first) { sb.append(glue); } else { first = false; } sb.append(iter.next()); } return sb.toString(); }
MultiCriteriaLabelSetting stationRouter = new MultiCriteriaLabelSetting(accessEgressGraphExplorer, flagEncoder, reverse, maxWalkDistancePerLeg, false, false, false, maxVisitedNodesForRequest, new ArrayList<>()); stationRouter.setBetaWalkTime(betaWalkTime); Iterator<Label> stationIterator = stationRouter.calcLabels(destNode, startNode, initialTime, blockedRouteTypes).iterator(); List<Label> stationLabels = new ArrayList<>(); while (stationIterator.hasNext()) { Label label = stationIterator.next(); if (label.adjNode == startNode) { stationLabels.add(label); smallestStationLabelWeight = Long.MAX_VALUE; Iterator<Label> iterator = router.calcLabels(startNode, destNode, initialTime, blockedRouteTypes).iterator(); Map<Label, Label> originalSolutions = new HashMap<>(); highestWeightForDominationTest = router.weight(discoveredSolutions.get(discoveredSolutions.size()-1)); } else { highestWeightForDominationTest = discoveredSolutions.stream().filter(s -> !s.impossible && (ignoreTransfers || s.nTransfers <= 1)).mapToLong(router::weight).min().orElse(Long.MAX_VALUE); .map(originalSolutions::get) .map(l -> new TripFromLabel(gtfsStorage, realtimeFeed).getTransitions(arriveBy, flagEncoder, graphExplorer, l)).collect(Collectors.toList()); List<List<Label.Transition>> paths = pathsToStations.stream().map(p -> { if (arriveBy) { List<Label.Transition> pp = new ArrayList<>(p.subList(1, p.size()));
@SafeVarargs @Override public final Iterable<EntityRef> getEntitiesWith(Class<? extends Component>... componentClasses) { return () -> entityStore.keySet().stream() //Keep entities which have all of the required components .filter(id -> Arrays.stream(componentClasses) .allMatch(component -> componentStore.get(id, component) != null)) .map(id -> getEntity(id)) .iterator(); }
@SuppressWarnings("unchecked") void assertInnerSubscriberBefore(FluxZip.ZipCoordinator c) { FluxZip.ZipInner s = (FluxZip.ZipInner) c.inners() .iterator() .next(); assertThat(s.scan(Scannable.Attr.TERMINATED)).isFalse(); assertThat(s.scan(Scannable.Attr.PREFETCH)).isEqualTo(123); assertThat(s.scan(Scannable.Attr.BUFFERED)).isEqualTo(0); assertThat(s.scan(Scannable.Attr.CANCELLED)).isFalse(); }
public Stream<String> searchAllLogs(Pattern pattern, Function<Matcher, String> matchProcessor) throws IOException { final List<String> matches = new ArrayList<>(2); try (Stream<Path> logFilesStream = Files.list(log)) { final Iterator<Path> logFiles = logFilesStream.iterator(); while (logFiles.hasNext()) { final Path logFile = logFiles.next(); if (!logFile.getFileName().toString().endsWith(".log")) { // ignore logs for previous runs that have a number suffix continue; } try (BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile.toFile()), StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { Matcher matcher = pattern.matcher(line); if (matcher.matches()) { matches.add(matchProcessor.apply(matcher)); } } } } } return matches.stream(); } }
private Stream<TransferWithTime> getType0TransferWithTimes(GTFSFeed gtfsFeed) { return gtfsFeed.transfers.entrySet() .parallelStream() .filter(e -> e.getValue().transfer_type == 0) .map(e -> { PointList points = new PointList(2, false); final int fromnode = gtfsStorage.getStationNodes().get(e.getValue().from_stop_id); Iterator<Label> iterator = router.calcLabels(fromnode, tonode, Instant.ofEpochMilli(0), 0).iterator(); Label solution = null; while (iterator.hasNext()) { Label label = iterator.next(); if (tonode == label.adjNode) { solution = label;
private Iterator<LocatedFileStatus> statusFromObjects(List<S3ObjectSummary> objects) { // NOTE: for encrypted objects, S3ObjectSummary.size() used below is NOT correct, // however, to get the correct size we'd need to make an additional request to get // user metadata, and in this case it doesn't matter. return objects.stream() .filter(object -> !object.getKey().endsWith(PATH_SEPARATOR)) .map(object -> new FileStatus( object.getSize(), false, 1, BLOCK_SIZE.toBytes(), object.getLastModified().getTime(), qualifiedPath(new Path(PATH_SEPARATOR + object.getKey())))) .map(this::createLocatedFileStatus) .iterator(); }
@SuppressWarnings("unchecked") void assertInnerSubscriber(FluxZip.ZipCoordinator c) { FluxZip.ZipInner s = (FluxZip.ZipInner) c.inners() .iterator() .next(); assertThat(s.scan(Scannable.Attr.TERMINATED)).isFalse(); assertThat(s.scan(Scannable.Attr.PREFETCH)).isEqualTo(123); assertThat(c.inners()).hasSize(3); assertThat(s.scan(Scannable.Attr.CANCELLED)).isTrue(); }
.range(1, splits.size() - 3) .mapToObj(dataSourceDelimiterOrder -> DELIMITER_JOINER.join(splits.subList(0, dataSourceDelimiterOrder))) .filter(dataSource -> dataSource.length() != probableDataSource.length()) .flatMap(dataSource -> iteratePossibleParsingsWithDataSource(dataSource, segmentId).stream()) .iterator(); return Iterables.concat(probableParsings, otherPossibleParsings); } else { }) .flatMap(List::stream) .iterator();