@Override public void addAccum(AccumT accum) { isCleared = false; this.accum = combineFn.mergeAccumulators(Arrays.asList(this.accum, accum)); }
@Override public Object[] mergeAccumulators(Iterable<Object[]> accumulators) { Iterator<Object[]> iter = accumulators.iterator(); if (!iter.hasNext()) { return createAccumulator(); } else { // Reuses the first accumulator, and overwrites its values. // It is safe because {@code accum[i]} only depends on // the i-th component of each accumulator. Object[] accum = iter.next(); for (int i = 0; i < combineFnCount; ++i) { accum[i] = combineFns.get(i).mergeAccumulators(new ProjectionIterable(accumulators, i)); } return accum; } }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsWithEmptyAccumulators( CombineFn<InputT, AccumT, OutputT> fn, Iterable<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { List<AccumT> accumulators = combineInputs(fn, shards); accumulators.add(0, fn.createAccumulator()); accumulators.add(fn.createAccumulator()); AccumT merged = fn.mergeAccumulators(accumulators); assertThat(fn.extractOutput(merged), matcher); }
@Override public void processElement(WindowedValue<KV<K, Iterable<AccumT>>> element) throws Exception { checkState( element.getWindows().size() == 1, "Expected inputs to %s to be in exactly one window. Got %s", MergeAccumulatorsAndExtractOutputEvaluator.class.getSimpleName(), element.getWindows().size()); Iterable<AccumT> inputAccumulators = element.getValue().getValue(); try { AccumT first = combineFn.createAccumulator(); AccumT merged = combineFn.mergeAccumulators( Iterables.concat( Collections.singleton(first), inputAccumulators, Collections.singleton(combineFn.createAccumulator()))); OutputT extracted = combineFn.extractOutput(merged); output.add(element.withValue(KV.of(element.getValue().getKey(), extracted))); } catch (Exception e) { throw UserCodeException.wrap(e); } }
@Override public void addAccum(AccumT accum) { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT current = state.value(); if (current == null) { state.update(accum); } else { current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum)); state.update(current); } } catch (Exception e) { throw new RuntimeException("Error adding to state.", e); } }
@Override public void addAccum(AccumT accum) { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT current = state.value(); if (current == null) { state.update(accum); } else { current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum)); state.update(current); } } catch (Exception e) { throw new RuntimeException("Error adding to state.", e); } }
@Override public void addAccum(AccumT accum) { try { org.apache.flink.api.common.state.ValueState<AccumT> state = flinkStateBackend.getPartitionedState( namespace.stringKey(), StringSerializer.INSTANCE, flinkStateDescriptor); AccumT current = state.value(); if (current == null) { state.update(accum); } else { current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum)); state.update(current); } } catch (Exception e) { throw new RuntimeException("Error adding to state.", e); } }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsIncrementalMerging( CombineFn<InputT, AccumT, OutputT> fn, List<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { AccumT accumulator = shards.isEmpty() ? fn.createAccumulator() : null; for (AccumT inputAccum : combineInputs(fn, shards)) { if (accumulator == null) { accumulator = inputAccum; } else { accumulator = fn.mergeAccumulators(Arrays.asList(accumulator, inputAccum)); } fn.extractOutput(accumulator); // Extract output to simulate multiple firings } assertThat(fn.extractOutput(accumulator), matcher); }
@Override public Object mergeAccumulators(Iterable<Object> accumulators) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return fn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return fn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators, Context c) { return combineFn.mergeAccumulators(accumulators); }
@Override public OutputT read() { return combineFn.extractOutput( combineFn.mergeAccumulators(Arrays.asList(combineFn.createAccumulator(), accum))); }
@Override public AccumT mergeAccumulators( Iterable<AccumT> accumulators, PipelineOptions options, SideInputReader sideInputReader, Collection<? extends BoundedWindow> windows) { return combineFn.mergeAccumulators(accumulators); }
@Override public AccumT addInput(AccumT accumulator, InputOrAccum<InputT, AccumT> value) { if (value.accum == null) { return fn.addInput(accumulator, value.input); } else { return fn.mergeAccumulators(ImmutableList.of(accumulator, value.accum)); } }
private static <InputT, AccumT, OutputT> void checkCombineFnShardsSingleMerge( CombineFn<InputT, AccumT, OutputT> fn, Iterable<? extends Iterable<InputT>> shards, Matcher<? super OutputT> matcher) { List<AccumT> accumulators = combineInputs(fn, shards); AccumT merged = fn.mergeAccumulators(accumulators); assertThat(fn.extractOutput(merged), matcher); }