@Override public PDone expand(PCollection<T> input) { return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<PubsubMessage> input) { return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<T> input) { input.apply(inner); return PDone.in(input.getPipeline()); }
@Override public POutput expand(PInput input) { return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<RedisMutation> input) { input.apply(ParDo.of(dofn)); return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<T> input) { input.apply(ParDo.of(new GridFsWriteFn<>(this))); return PDone.in(input.getPipeline()); } }
@Override public POutput buildIOWriter(PCollection<Row> input) { input.apply(ParDo.of(new CollectorFn(tableWithRows))); return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<FeatureRowExtended> input) { input.apply("Log to " + level.toString(), ParDo.of(new LoggerDoFn(level))); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<T> input) { input.apply(ParDo.of(new CollectorFn<T>(getName()))); return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<FeatureRowExtended> input) { input.apply(getName(), ParDo.of(new Identity(getName()))); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<Integer> input) { // Apply an operation so that this is a composite transform. input.apply(Count.perElement()); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<Integer> input) { // Apply an operation so that this is a composite transform. input.apply(Count.perElement()); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<SolrInputDocument> input) { checkState(getConnectionConfiguration() != null, "withConnectionConfiguration() is required"); checkState(getCollection() != null, "to() is required"); input.apply(ParDo.of(new WriteFn(this))); return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<SendMessageRequest> input) { input.apply( ParDo.of( new SqsWriteFn( new SqsConfiguration(input.getPipeline().getOptions().as(AwsOptions.class))))); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<String> input) { checkArgument(getConnectionFactory() != null, "withConnectionFactory() is required"); checkArgument( getQueue() != null || getTopic() != null, "Either withQueue(queue) or withTopic(topic) is required"); checkArgument( getQueue() == null || getTopic() == null, "withQueue(queue) and withTopic(topic) are exclusive"); input.apply(ParDo.of(new WriterFn(this))); return PDone.in(input.getPipeline()); }
@Override public PDone expand(PCollection<Iterable<T>> input) { input .apply("GroupGlobally", new GroupGlobally<>(rewindowingStrategy)) .apply("GetPane", MapElements.via(paneExtractor)) .setCoder(IterableCoder.of(input.getCoder())) .apply("RunChecks", ParDo.of(new SingletonCheckerDoFn<>(checkerFn, site))) .apply("VerifyAssertions", new DefaultConcludeTransform()); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<KV<DestinationT, String>> input) { input .apply(Values.create()) .apply(FileIO.matchAll().withEmptyMatchTreatment(EmptyMatchTreatment.DISALLOW)); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<T> input) { input .apply("GroupGlobally", new GroupGlobally<>(rewindowingStrategy)) .apply("GetPane", MapElements.via(paneExtractor)) .setCoder(IterableCoder.of(input.getCoder())) .apply("RunChecks", ParDo.of(new GroupedValuesCheckerDoFn<>(checkerFn, site))) .apply("VerifyAssertions", new DefaultConcludeTransform()); return PDone.in(input.getPipeline()); } }
@Override public PDone expand(PCollection<IndexedRecord> in) { TableReference table = new TableReference(); table.setProjectId(datastore.projectName.getValue()); table.setDatasetId(dataset.bqDataset.getValue()); table.setTableId(dataset.tableName.getValue()); BigQueryIO.Write bigQueryIOPTransform = BigQueryIO.writeTableRows().to(table); bigQueryIOPTransform = setTableOperation(bigQueryIOPTransform); bigQueryIOPTransform = setWriteOperation(bigQueryIOPTransform); in.apply(ParDo.of(new IndexedRecordToTableRowFn())).apply(bigQueryIOPTransform); return PDone.in(in.getPipeline()); }
@Override public PDone expand(PCollection<IndexedRecord> in) { TableReference table = new TableReference(); table.setProjectId(datastore.projectName.getValue()); table.setDatasetId(dataset.bqDataset.getValue()); table.setTableId(dataset.tableName.getValue()); BigQueryIO.Write bigQueryIOPTransform = BigQueryIO.writeTableRows().to(table); bigQueryIOPTransform = setTableOperation(bigQueryIOPTransform); bigQueryIOPTransform = setWriteOperation(bigQueryIOPTransform); in.apply(ParDo.of(new IndexedRecordToTableRowFn())).apply(bigQueryIOPTransform); return PDone.in(in.getPipeline()); }