@Override public Void call(final JavaRDDLike rdd) { rdd.foreachPartition(new VoidFunction<Iterator<?>>() { @Override public void call(Iterator<?> results) throws Exception { if (messageSender == null) { messageSender = sender; messageSender.start(); } while (results.hasNext()) { Object next = results.next(); Message message = (next instanceof Message) ? (Message) next : MessageBuilder.withPayload(next).build(); messageSender.send(message); } } }); return null; } });
private JavaRDDLike<?, ?> getJavaRDDLikeInternal(SparkRuntime runtime) { JavaRDDLike<?, ?> parentRDD = ((SparkCollection) getOnlyParent()).getJavaRDDLike(runtime); fn.configure(runtime.getConfiguration()); return parentRDD.mapPartitionsWithIndex( new FlatMapIndexFn(fn, parentRDD instanceof JavaPairRDD, runtime.getRuntimeContext()), false); } }
Iterable<WindowedValue<T>> getValues(PCollection<T> pcollection) { if (windowedValues == null) { WindowFn<?, ?> windowFn = pcollection.getWindowingStrategy().getWindowFn(); Coder<? extends BoundedWindow> windowCoder = windowFn.windowCoder(); final WindowedValue.WindowedValueCoder<T> windowedValueCoder; if (windowFn instanceof GlobalWindows) { windowedValueCoder = WindowedValue.ValueOnlyWindowedValueCoder.of(pcollection.getCoder()); } else { windowedValueCoder = WindowedValue.FullWindowedValueCoder.of(pcollection.getCoder(), windowCoder); } JavaRDDLike<byte[], ?> bytesRDD = rdd.map(CoderHelpers.toByteFunction(windowedValueCoder)); List<byte[]> clientBytes = bytesRDD.collect(); windowedValues = clientBytes .stream() .map(bytes -> CoderHelpers.fromByteArray(bytes, windowedValueCoder)) .collect(Collectors.toList()); } return windowedValues; }
private JavaRDDLike<?, ?> getJavaRDDLikeInternal(SparkRuntime runtime) { JavaRDDLike<?, ?> parentRDD = ((SparkCollection) getOnlyParent()).getJavaRDDLike(runtime); fn.configure(runtime.getConfiguration()); return parentRDD.mapPartitionsWithIndex( new FlatMapIndexFn(fn, parentRDD instanceof JavaPairRDD, runtime.getRuntimeContext()), false); } }
private JavaRDDLike<?, ?> getJavaRDDLikeInternal(SparkRuntime runtime) { if (combineFn instanceof CombineFn && getOnlyParent() instanceof PGroupedTableImpl) { runtime.setCombineFn((CombineFn) combineFn); } JavaRDDLike<?, ?> parentRDD = ((SparkCollection) getOnlyParent()).getJavaRDDLike(runtime); fn.configure(runtime.getConfiguration()); return parentRDD .mapPartitionsWithIndex( new FlatMapIndexFn(fn, parentRDD instanceof JavaPairRDD, runtime.getRuntimeContext()), false) .mapPartitionsToPair(new CrunchPairTuple2()); } }
private JavaRDDLike<?, ?> getJavaRDDLikeInternal(SparkRuntime runtime) { if (combineFn instanceof CombineFn && getOnlyParent() instanceof PGroupedTableImpl) { runtime.setCombineFn((CombineFn) combineFn); } JavaRDDLike<?, ?> parentRDD = ((SparkCollection) getOnlyParent()).getJavaRDDLike(runtime); fn.configure(runtime.getConfiguration()); return parentRDD .mapPartitionsWithIndex( new FlatMapIndexFn(fn, parentRDD instanceof JavaPairRDD, runtime.getRuntimeContext()), false) .mapPartitionsToPair(new CrunchPairTuple2()); } }
Set<Target> targets = outputTargets.get(e.getKey()); if (targets.size() > 1) { rdd.rdd().cache();
Set<Target> targets = outputTargets.get(e.getKey()); if (targets.size() > 1) { rdd.rdd().cache();