@SuppressWarnings("unchecked") AccumulatorV2<Integer, Integer> intAccum = (AccumulatorV2<Integer, Integer>) accu.get(); intAccum.add(blocks.size());
@Override public OUT value() { return accumulatorV2.value(); }
@Override public boolean isZero() { return accumulatorV2.isZero(); }
List<HoodieLogFile> logFiles = s.getLogFiles().sorted(HoodieLogFile .getBaseInstantAndLogVersionComparator().reversed()).collect(Collectors.toList()); totalLogFiles.add((long) logFiles.size()); totalFileSlices.add(1L); .collect(toList()); log.info("Total of " + operations.size() + " compactions are retrieved"); log.info("Total number of latest files slices " + totalFileSlices.value()); log.info("Total number of log files " + totalLogFiles.value()); log.info("Total number of file slices " + totalFileSlices.value());
@Override public void onTaskEnd(SparkListenerTaskEnd taskEnd) { Iterator<AccumulatorV2<?, ?>> iterator = taskEnd.taskMetrics().accumulators().iterator(); while (iterator.hasNext()) { AccumulatorV2 accumulator = iterator.next(); if (taskEnd.stageId() == 1 && accumulator.isRegistered() && accumulator.name().isDefined() && accumulator.name().get().equals("internal.metrics.shuffle.read.recordsRead")) { stageOneShuffleReadTaskRecordsCountMap.put(taskEnd.taskInfo().taskId(), (Long) accumulator.value()); } } } });
@Override public Optional<String> name() { try { return Optional.ofNullable(accumulatorV2.name().getOrElse(new AbstractFunction0<String>() { @Override public String apply() { return null; } })); } catch (IllegalAccessError e) { return Optional.empty(); } }
@Override public void register() { if (accumulatorV2.isRegistered()) { return; } String name; if (accumulatorV2 instanceof AccumulatorV2Wrapper) { name = Cast.<AccumulatorV2Wrapper<IN, OUT>>as(accumulatorV2).getWrappedName().orElse(null); } else { name = name().orElse(null); } if (name == null) { SparkStreamingContext.INSTANCE.sparkContext().sc().register(accumulatorV2); } else { SparkStreamingContext.INSTANCE.sparkContext().sc().register(accumulatorV2, name); } }
@Override public void merge(@NonNull MAccumulator<IN, OUT> other) { if (other instanceof SparkMAccumulator) { accumulatorV2.merge(Cast.<SparkMAccumulator<IN, OUT>>as(other).accumulatorV2); } throw new IllegalArgumentException(getClass().getName() + " cannot merge with " + other.getClass().getName()); }
List<HoodieLogFile> logFiles = s.getLogFiles().sorted(HoodieLogFile .getBaseInstantAndLogVersionComparator().reversed()).collect(Collectors.toList()); totalLogFiles.add((long) logFiles.size()); totalFileSlices.add(1L); .collect(toList()); log.info("Total of " + operations.size() + " compactions are retrieved"); log.info("Total number of latest files slices " + totalFileSlices.value()); log.info("Total number of log files " + totalLogFiles.value()); log.info("Total number of file slices " + totalFileSlices.value());
@SuppressWarnings("unchecked") AccumulatorV2<Integer, Integer> intAccum = (AccumulatorV2<Integer, Integer>) accu.get(); intAccum.add(blocks.size());
@Override public void merge(AccumulatorV2<Map<String, SparkAccumulator>, Map<String, SparkAccumulator>> other) { this.add(other.value()); }
@SuppressWarnings("unchecked") AccumulatorV2<Integer, Integer> intAccum = (AccumulatorV2<Integer, Integer>) accu.get(); intAccum.add(blocks.size());
@Override public void add(IN in) { accumulatorV2.add(in); }
@Override public void add(double value) { accumulatorV2.add(value); }
@Override public void add(long value) { accumulatorV2.add(value); }