/** * Applies the given {@link PTransform} to this input {@code KeyedPCollectionTuple} and returns * its {@code OutputT}. This uses {@code name} to identify the specific application of the * transform. This name is used in various places, including the monitoring UI, logging, and to * stably identify this application node in the job graph. */ public <OutputT extends POutput> OutputT apply( String name, PTransform<KeyedPCollectionTuple<K>, OutputT> transform) { return Pipeline.applyTransform(name, this, transform); }
/** * Applies the given {@link PTransform} to this {@link PBegin}, using {@code name} to identify * this specific application of the transform. * * <p>This name is used in various places, including the monitoring UI, logging, and to stably * identify this application node in the job graph. */ public <OutputT extends POutput> OutputT apply( String name, PTransform<? super PBegin, OutputT> t) { return Pipeline.applyTransform(name, this, t); }
/** * Like {@link #apply(String, PTransform)} but defaulting to the name provided by the {@link * PTransform}. */ public <OutputT extends POutput> OutputT apply( PTransform<KeyedPCollectionTuple<K>, OutputT> transform) { return Pipeline.applyTransform(this, transform); }
/** * Applies the given {@link PTransform} to this input {@link PCollectionList}, using {@code name} * to identify this specific application of the transform. This name is used in various places, * including the monitoring UI, logging, and to stably identify this application node in the job * graph. * * @return the output of the applied {@link PTransform} */ public <OutputT extends POutput> OutputT apply( String name, PTransform<PCollectionList<T>, OutputT> t) { return Pipeline.applyTransform(name, this, t); }
/** * Like {@link #apply(String, PTransform)} but defaulting to the name of the {@link PTransform}. */ public <OutputT extends POutput> OutputT apply(PTransform<? super PBegin, OutputT> t) { return Pipeline.applyTransform(this, t); }
/** * of the {@link PTransform}. * * @return the output of the applied {@link PTransform} */ public <OutputT extends POutput> OutputT apply(PTransform<? super PCollection<T>, OutputT> t) { return Pipeline.applyTransform(this, t); }
/** * Applies the given {@link PTransform} to this input {@link PCollection}, using {@code name} to * identify this specific application of the transform. This name is used in various places, * including the monitoring UI, logging, and to stably identify this application node in the job * graph. * * @return the output of the applied {@link PTransform} */ public <OutputT extends POutput> OutputT apply( String name, PTransform<? super PCollection<T>, OutputT> t) { return Pipeline.applyTransform(name, this, t); }
/** * Like {@link #apply(String, PTransform)} but defaulting to the name of the {@link PTransform}. * * @return the output of the applied {@link PTransform} */ public <OutputT extends POutput> OutputT apply(PTransform<? super PCollectionTuple, OutputT> t) { return Pipeline.applyTransform(this, t); }
/** * Like {@link #apply(String, PTransform)} but defaulting to the name of the {@code PTransform}. */ public <OutputT extends POutput> OutputT apply(PTransform<PCollectionList<T>, OutputT> t) { return Pipeline.applyTransform(this, t); }
/** * Applies the given {@link PTransform} to this input {@link PCollectionTuple}, using {@code name} * to identify this specific application of the transform. This name is used in various places, * including the monitoring UI, logging, and to stably identify this application node in the job * graph. * * @return the output of the applied {@link PTransform} */ public <OutputT extends POutput> OutputT apply( String name, PTransform<? super PCollectionTuple, OutputT> t) { return Pipeline.applyTransform(name, this, t); }
public PFeatureRows apply(String name, PTransform<PFeatureRows, PFeatureRows> transform) { return Pipeline.applyTransform(name, this, transform); } }
@Override public final PCollection<T> expand(PBegin input) { source.validate(); return Pipeline.applyTransform(input, new UnboundedReadFromBoundedSource<>(source)) .setIsBoundedInternal(IsBounded.BOUNDED); } }
/** * A {@link BeamRelNode} is a recursive structure, the {@code BeamQueryPlanner} visits it with a * DFS(Depth-First-Search) algorithm. */ static PCollection<Row> toPCollection( Pipeline pipeline, BeamRelNode node, Map<Integer, PCollection<Row>> cache) { PCollection<Row> output = cache.get(node.getId()); if (output != null) { return output; } String name = node.getClass().getSimpleName() + "_" + node.getId(); PCollectionList<Row> input = buildPCollectionList(node.getPCollectionInputs(), pipeline, cache); PTransform<PCollectionList<Row>, PCollection<Row>> transform = node.buildPTransform(); output = Pipeline.applyTransform(name, input, transform); cache.put(node.getId(), output); return output; }
@Override public final PCollection<T> expand(PBegin input) { source.validate(); if (source.requiresDeduping()) { return Pipeline.applyTransform(input, new ReadWithIds<>(source)).apply(new Deduplicate<>()); } else { return Pipeline.applyTransform(input, new ReadWithIds<>(source)) .apply("StripIds", ParDo.of(new ValueWithRecordId.StripIdsDoFn<>())); } }
@Override public final PCollection<T> expand(PBegin input) { try { PCollection<T> pc = Pipeline.applyTransform(input, Impulse.create()) .apply( ParDo.of( DecodeAndEmitDoFn.fromIterable( transform.getElements(), originalOutput.getCoder()))); pc.setCoder(originalOutput.getCoder()); return pc; } catch (IOException e) { throw new IllegalStateException("Unable to encode elements.", e); } }
/** * @return new PFeatureRows, which has any tagged errors and retries in DoFn added to the errors * and retries gathered so far. */ public PFeatureRows applyDoFn(String name, BaseFeatureDoFn doFn) { MultiOutput<FeatureRowExtended, FeatureRowExtended> transform = ParDo.of(doFn.withTransformName(name)) .withOutputTags(MAIN_TAG, TupleTagList.of(ERRORS_TAG)); PCollectionTuple transformed = Pipeline.applyTransform(name, main, transform); PCollection<FeatureRowExtended> outMain = transformed.get(MAIN_TAG).setCoder(ProtoCoder.of(FeatureRowExtended.class)); PCollection<FeatureRowExtended> outErrors = PCollectionList.of( transformed.get(ERRORS_TAG).setCoder(ProtoCoder.of(FeatureRowExtended.class))) .and(errors) .apply(name + "/Flatten errors", Flatten.pCollections()) .setCoder(ProtoCoder.of(FeatureRowExtended.class)); return new PFeatureRows(outMain, outErrors); }
Pipeline.applyTransform( input, Create.of(timestampedElements).withCoder(TimestampedValueCoder.of(coder)));
Pipeline.applyTransform(outputs, Flatten.pCollections()); flattenedOutputs.apply(CreateDataflowView.forBatch(view)); return flattenedOutputs;