@Override public Stream<E> parallelStream() { return stream().parallel(); }
@Override public void stop() { Stream.<Runnable>of(super::stop, proxy::stop).parallel().forEach(Runnable::run); } }
@Override public Stream<E> parallelStream() { return stream().parallel(); }
/** 8. Using Java 8 Stream Api parallel **/ @Benchmark public long test8_UsingJava8StreamApiParallel() throws IOException { final long[] i = {0}; map.entrySet().stream().parallel().forEach(e -> i[0] += e.getKey() + e.getValue()); return i[0]; }
@Override public void stop() { stopCluster(); Stream.<Runnable>of(super::stop, proxy::stop).parallel().forEach(Runnable::run); }
/** * Scans the given package names. * * @param annotation The annotation name to scan * @param packages The package names * @return A stream of classes */ default Stream<Class> scan(String annotation, Stream<String> packages) { return packages .parallel() .flatMap(pkg -> scan(annotation, pkg)); }
@Override @SuppressWarnings("unchecked") public CompletableFuture<Boolean> verify() { final CompletableFuture<Boolean>[] futures; futures = rules.stream().parallel() .map( Rule::verify ) .toArray( CompletableFuture[]::new ); return CompletableFutureUtil.allOf(Boolean.TRUE, Boolean::logicalAnd, futures); } }
/** * Scan the given packages. * * @param annotation The annotation to scan for * @param packages The packages to scan * @return A stream of classes */ default Stream<Class> scan(String annotation, Package... packages) { return Arrays.stream(packages) .parallel() .flatMap(pkg -> scan(annotation, pkg.getName())); }
@Override public Stream<E> parallelStream() { return stream().parallel(); }
@Override public Stream<T> parallel() { return toStream().parallel(); }
@Benchmark public String jdkJava8parallel() throws IOException { mark(); String result = new BufferedReader(new InputStreamReader(inputStream)).lines().parallel().collect(Collectors.joining("\n")); reset(); return result; }
@Benchmark public String test5_jdkJava8parallel() throws IOException { mark(); String result = new BufferedReader(new InputStreamReader(inputStream)).lines().parallel().collect(Collectors.joining("\n")); reset(); return result; }
@Benchmark public void parallel_lazy_jdk() { Map<Alphagram, Set<String>> groupBy = this.jdkWords.parallelStream().collect(Collectors.groupingBy(Alphagram::new, Collectors.<String>toSet())); groupBy.entrySet() .parallelStream() .map(Map.Entry::getValue) .filter(list -> list.size() >= SIZE_THRESHOLD) .sorted(Comparator.<Set<String>>comparingInt(Set::size).reversed()) .parallel() .map(list -> list.size() + ": " + list) .forEach(e -> Assert.assertFalse(e.isEmpty())); }
@Benchmark public void parallel_lazy_streams_gsc() { Map<Alphagram, Set<String>> groupBy = this.gscWords.parallelStream().collect(Collectors.groupingBy(Alphagram::new, Collectors.<String>toSet())); groupBy.entrySet() .parallelStream() .map(Map.Entry::getValue) .filter(list -> list.size() >= SIZE_THRESHOLD) .sorted(Comparator.<Set<String>>comparingInt(Set::size).reversed()) .parallel() .map(list -> list.size() + ": " + list) .forEach(e -> Assert.assertFalse(e.isEmpty())); }
private void drainResponses(final Set<NodeResponse> responses, final NodeResponse exclude) { responses.stream() .parallel() // "parallelize" the draining of the responses, since we have multiple streams to consume .filter(response -> response != exclude) // don't include the explicitly excluded node .filter(response -> response.getStatus() != RequestReplicator.NODE_CONTINUE_STATUS_CODE) // don't include any continue responses because they contain no content .forEach(response -> drainResponse(response)); // drain all node responses that didn't get filtered out }
public void testForEachPair_parallel() { Stream<String> streamA = IntStream.range(0, 100000).mapToObj(String::valueOf).parallel(); Stream<Integer> streamB = IntStream.range(0, 100000).mapToObj(i -> i).parallel(); AtomicInteger count = new AtomicInteger(0); Streams.forEachPair( streamA, streamB, (a, b) -> { count.incrementAndGet(); Truth.assertThat(a.equals(String.valueOf(b))).isTrue(); }); Truth.assertThat(count.get()).isEqualTo(100000); // of course, this test doesn't prove that anything actually happened in parallel... }
@Override public Stream<T> parallel() { return wrap(stream().parallel()); }
public void testConcat_refStream_parallel() { Truth.assertThat( Streams.concat(Stream.of("a"), Stream.of("b"), Stream.empty(), Stream.of("c", "d")) .parallel() .toArray()) .asList() .containsExactly("a", "b", "c", "d") .inOrder(); }
@Test public void invokeTestContextManagerFromConcurrentThreads() { TestContextManager tcm = new TestContextManager(TestCase.class); // Run the actual test several times in order to increase the chance of threads // stepping on each others' toes by overwriting the same mutable state in the // TestContext. IntStream.range(1, 20).forEach(i -> { actualMethods.clear(); // Execute TestExecutionListener in parallel, thereby simulating parallel // test method execution. stream(TestCase.class.getDeclaredMethods()).parallel().forEach(testMethod -> { try { tcm.beforeTestClass(); tcm.beforeTestMethod(testInstance, testMethod); // no need to invoke the actual test method tcm.afterTestMethod(testInstance, testMethod, null); tcm.afterTestClass(); } catch (Exception ex) { throw new RuntimeException(ex); } }); assertThat(actualMethods, equalTo(expectedMethods)); }); assertEquals(0, tcm.getTestContext().attributeNames().length); }
public void generate() throws Exception { initPlanTest(); try { getQueryResourcePaths() .parallel() .forEach(queryResourcePath -> { try { Path queryPlanWritePath = Paths.get( getSourcePath().toString(), "src/test/resources", getQueryPlanResourcePath(queryResourcePath)); createParentDirs(queryPlanWritePath.toFile()); write(generateQueryPlan(read(queryResourcePath)).getBytes(UTF_8), queryPlanWritePath.toFile()); System.out.println("Generated expected plan for query: " + queryResourcePath); } catch (IOException e) { throw new UncheckedIOException(e); } }); } finally { destroyPlanTest(); } }