/** * Recursively find all operators under root, that are of class clazz or are the sub-class of clazz, and * put them in result. * @param result all operators under root that are of class clazz * @param root the root operator under which all operators will be examined * @param clazz clas to collect. Must NOT be null. */ public static void collectOp(Collection<Operator<?>> result, Operator<?> root, Class<?> clazz) { Preconditions.checkArgument(clazz != null, "AssertionError: clazz should not be null"); if (root == null) { return; } if (clazz.isAssignableFrom(root.getClass())) { result.add(root); } for (Operator<?> child : root.getChildOperators()) { collectOp(result, child, clazz); } }
private void collectDPPInfos(SparkWork sparkWork) { for (BaseWork work : sparkWork.getAllWork()) { Set<Operator<?>> seen = new HashSet<>(); for (Operator root : work.getAllRootOperators()) { List<SparkPartitionPruningSinkOperator> sinks = new ArrayList<>(); SparkUtilities.collectOp(root, SparkPartitionPruningSinkOperator.class, sinks, seen); for (SparkPartitionPruningSinkOperator sink : sinks) { idToDpps.put(sink.getUniqueId(), sink); } } } }
/** * Recursively find all operators under root, that are of class clazz, and * put them in result. * @param result all operators under root that are of class clazz * @param root the root operator under which all operators will be examined * @param clazz clas to collect. Must NOT be null. */ public static void collectOp(Collection<Operator<?>> result, Operator<?> root, Class<?> clazz) { Preconditions.checkArgument(clazz != null, "AssertionError: clazz should not be null"); if (root == null) { return; } if (clazz.equals(root.getClass())) { result.add(root); } for (Operator<?> child : root.getChildOperators()) { collectOp(result, child, clazz); } } }
SparkUtilities.collectOp(sinkSet, sel, SparkPartitionPruningSinkOperator.class); sel.setParentOperators(Utilities.makeList(newBranchingOp));
SparkUtilities.collectOp(sinkSet, root, SparkPartitionPruningSinkOperator.class);