@Override public String toString() { return getClass().getSimpleName() + " - " + getName(); } }
@Override public boolean preVisit(Operator<?> visitable) { if (!visitedOperators.add(visitable)) { return false; } this.maxDop = Math.max(this.maxDop, visitable.getParallelism()); return true; }
@Override public boolean preVisit(Operator<?> visitable) { if (this.seen.add(visitable)) { // add to the map final String name = visitable.getName(); List<Operator<?>> list = this.map.get(name); if (list == null) { list = new ArrayList<Operator<?>>(2); this.map.put(name, list); } list.add(visitable); // recurse into bulk iterations if (visitable instanceof BulkIterationBase) { ((BulkIterationBase) visitable).getNextPartialSolution().accept(this); } else if (visitable instanceof DeltaIterationBase) { ((DeltaIterationBase) visitable).getSolutionSetDelta().accept(this); ((DeltaIterationBase) visitable).getNextWorkset().accept(this); } return true; } else { return false; } }
private JobVertex createDataSinkVertex(SinkPlanNode node) throws CompilerException { final OutputFormatVertex vertex = new OutputFormatVertex(node.getNodeName()); final TaskConfig config = new TaskConfig(vertex.getConfiguration()); vertex.setResources(node.getMinResources(), node.getPreferredResources()); vertex.setInvokableClass(DataSinkTask.class); vertex.setFormatDescription(getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper())); // set user code config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper()); config.setStubParameters(node.getProgramOperator().getParameters()); return vertex; }
if (n instanceof DataSinkNode) { type = "sink"; contents = n.getOperator().toString(); } else if (n instanceof DataSourceNode) { type = "source"; contents = n.getOperator().toString(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); if (inputNum == 0) { child1name += child1name.length() > 0 ? ", " : ""; child1name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; } else if (inputNum == 1) { child2name += child2name.length() > 0 ? ", " : ""; child2name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; if (n.getOperator().getCompilerHints() != null) { CompilerHints hints = n.getOperator().getCompilerHints(); CompilerHints defaults = new CompilerHints();
@SuppressWarnings("unchecked") public static <T, K> org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> appendKeyExtractor( org.apache.flink.api.common.operators.Operator<T> input, SelectorFunctionKeys<T, K> key) { if (input instanceof Union) { // if input is a union, we apply the key extractors recursively to all inputs org.apache.flink.api.common.operators.Operator<T> firstInput = ((Union) input).getFirstInput(); org.apache.flink.api.common.operators.Operator<T> secondInput = ((Union) input).getSecondInput(); org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> firstInputWithKey = appendKeyExtractor(firstInput, key); org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> secondInputWithKey = appendKeyExtractor(secondInput, key); return new Union(firstInputWithKey, secondInputWithKey, input.getName()); } TypeInformation<T> inputType = key.getInputType(); TypeInformation<Tuple2<K, T>> typeInfoWithKey = createTypeWithKey(key); KeyExtractingMapper<T, K> extractor = new KeyExtractingMapper(key.getKeyExtractor()); MapOperatorBase<T, Tuple2<K, T>, MapFunction<T, Tuple2<K, T>>> mapper = new MapOperatorBase<T, Tuple2<K, T>, MapFunction<T, Tuple2<K, T>>>( extractor, new UnaryOperatorInformation(inputType, typeInfoWithKey), "Key Extractor" ); mapper.setInput(input); mapper.setParallelism(input.getParallelism()); return mapper; }
@Override public UnaryOperatorInformation<OUT, OUT> getOperatorInfo() { TypeInformation<OUT> previousOut = input.getOperatorInfo().getOutputType(); return new UnaryOperatorInformation<OUT, OUT>(previousOut, previousOut); }
protected void readUniqueFieldsAnnotation() { if (this.operator.getCompilerHints() != null) { Set<FieldSet> uniqueFieldSets = operator.getCompilerHints().getUniqueFields(); if (uniqueFieldSets != null) { if (this.uniqueFields == null) { this.uniqueFields = new HashSet<FieldSet>(); } this.uniqueFields.addAll(uniqueFieldSets); } } }
public ResourceSpec getPreferredResources() { return this.template.getOperator().getPreferredResources(); }
public ResourceSpec getMinResources() { return this.template.getOperator().getMinResources(); }
private <I, O> org.apache.flink.api.common.operators.Operator<O> translateSingleInputOperator(SingleInputOperator<?, ?, ?> op) { @SuppressWarnings("unchecked") SingleInputOperator<I, O, ?> typedOp = (SingleInputOperator<I, O, ?>) op; @SuppressWarnings("unchecked") DataSet<I> typedInput = (DataSet<I>) op.getInput(); Operator<I> input = translate(typedInput); org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input); if (op instanceof UdfOperator<?>) { @SuppressWarnings("unchecked") SingleInputUdfOperator<I, O, ?> udfOp = (SingleInputUdfOperator<I, O, ?>) op; // set configuration parameters Configuration opParams = udfOp.getParameters(); if (opParams != null) { dataFlowOp.getParameters().addAll(opParams); } if (dataFlowOp instanceof org.apache.flink.api.common.operators.SingleInputOperator) { org.apache.flink.api.common.operators.SingleInputOperator<?, O, ?> unaryOp = (org.apache.flink.api.common.operators.SingleInputOperator<?, O, ?>) dataFlowOp; // set the semantic properties unaryOp.setSemanticProperties(udfOp.getSemanticProperties()); } } return dataFlowOp; }
if (n instanceof DataSinkNode) { type = "sink"; contents = n.getOperator().toString(); } else if (n instanceof DataSourceNode) { type = "source"; contents = n.getOperator().toString(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); if (inputNum == 0) { child1name += child1name.length() > 0 ? ", " : ""; child1name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; } else if (inputNum == 1) { child2name += child2name.length() > 0 ? ", " : ""; child2name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; if (n.getOperator().getCompilerHints() != null) { CompilerHints hints = n.getOperator().getCompilerHints(); CompilerHints defaults = new CompilerHints();
private InputFormatVertex createDataSourceVertex(SourcePlanNode node) throws CompilerException { final InputFormatVertex vertex = new InputFormatVertex(node.getNodeName()); final TaskConfig config = new TaskConfig(vertex.getConfiguration()); vertex.setResources(node.getMinResources(), node.getPreferredResources()); vertex.setInvokableClass(DataSourceTask.class); vertex.setFormatDescription(getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper())); // set user code config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper()); config.setStubParameters(node.getProgramOperator().getParameters()); config.setOutputSerializer(node.getSerializer()); return vertex; }
@SuppressWarnings("unchecked") public static <T, K1, K2> org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> appendKeyExtractor( org.apache.flink.api.common.operators.Operator<T> input, SelectorFunctionKeys<T, K1> key1, SelectorFunctionKeys<T, K2> key2) { if (input instanceof Union) { // if input is a union, we apply the key extractors recursively to all inputs org.apache.flink.api.common.operators.Operator<T> firstInput = ((Union) input).getFirstInput(); org.apache.flink.api.common.operators.Operator<T> secondInput = ((Union) input).getSecondInput(); org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> firstInputWithKey = appendKeyExtractor(firstInput, key1, key2); org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> secondInputWithKey = appendKeyExtractor(secondInput, key1, key2); return new Union(firstInputWithKey, secondInputWithKey, input.getName()); } TypeInformation<T> inputType = key1.getInputType(); TypeInformation<Tuple3<K1, K2, T>> typeInfoWithKey = createTypeWithKey(key1, key2); TwoKeyExtractingMapper<T, K1, K2> extractor = new TwoKeyExtractingMapper<>(key1.getKeyExtractor(), key2.getKeyExtractor()); MapOperatorBase<T, Tuple3<K1, K2, T>, MapFunction<T, Tuple3<K1, K2, T>>> mapper = new MapOperatorBase<T, Tuple3<K1, K2, T>, MapFunction<T, Tuple3<K1, K2, T>>>( extractor, new UnaryOperatorInformation<>(inputType, typeInfoWithKey), "Key Extractor" ); mapper.setInput(input); mapper.setParallelism(input.getParallelism()); return mapper; }
public Union(Operator<T> input1, Operator<T> input2, String unionLocationName) { this(new BinaryOperatorInformation<T, T, T>(input1.getOperatorInfo().getOutputType(), input1.getOperatorInfo().getOutputType(), input1.getOperatorInfo().getOutputType()), unionLocationName); setFirstInput(input1); setSecondInput(input2); }
if (getOperator() == null || getOperator().getCompilerHints() == null) { return ; CompilerHints hints = getOperator().getCompilerHints(); if (hints.getOutputSize() >= 0) { this.estimatedOutputSize = hints.getOutputSize();
public ResourceSpec getPreferredResources() { return this.template.getOperator().getPreferredResources(); }
public ResourceSpec getMinResources() { return this.template.getOperator().getMinResources(); }
private <I1, I2, O> org.apache.flink.api.common.operators.Operator<O> translateTwoInputOperator(TwoInputOperator<?, ?, ?, ?> op) { @SuppressWarnings("unchecked") TwoInputOperator<I1, I2, O, ?> typedOp = (TwoInputOperator<I1, I2, O, ?>) op; @SuppressWarnings("unchecked") DataSet<I1> typedInput1 = (DataSet<I1>) op.getInput1(); @SuppressWarnings("unchecked") DataSet<I2> typedInput2 = (DataSet<I2>) op.getInput2(); Operator<I1> input1 = translate(typedInput1); Operator<I2> input2 = translate(typedInput2); org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input1, input2); if (op instanceof UdfOperator<?>) { @SuppressWarnings("unchecked") TwoInputUdfOperator<I1, I2, O, ?> udfOp = (TwoInputUdfOperator<I1, I2, O, ?>) op; // set configuration parameters Configuration opParams = udfOp.getParameters(); if (opParams != null) { dataFlowOp.getParameters().addAll(opParams); } if (dataFlowOp instanceof org.apache.flink.api.common.operators.DualInputOperator) { org.apache.flink.api.common.operators.DualInputOperator<?, ?, O, ?> binaryOp = (org.apache.flink.api.common.operators.DualInputOperator<?, ?, O, ?>) dataFlowOp; // set the semantic properties binaryOp.setSemanticProperties(udfOp.getSemanticProperties()); } } return dataFlowOp; }