readCountText.apply("WriteOutput", TextIO.write().to(pipelineOptions.getOutput()).withoutSharding());
readCountText.apply("WriteOutput", TextIO.write().to(pipelineOptions.getOutput()).withoutSharding());
.resolve(outputName, ResolveOptions.StandardResolveOptions.RESOLVE_FILE) .toString()) .withoutSharding() .withWritableByteChannelFactory(writableByteChannelFactory); DisplayData displayData = DisplayData.from(write);
result.apply("WriteOutput", TextIO.write().to(pipelineOptions.getOutput()).withoutSharding());
result.apply("WriteOutput", TextIO.write().to(pipelineOptions.getOutput()).withoutSharding());
"Write Output Result", TextIO.write() .to(pipelineOptions.getOutput() + "-result") .withoutSharding());
"Write Output Result", TextIO.write() .to(pipelineOptions.getOutput() + "-result") .withoutSharding());
.to(options.getOutputFailureFile()) .withHeader("Filename,Error") .withoutSharding());
.to(options.getOutputFailureFile()) .withHeader("Filename,Error") .withoutSharding());
return Joiner.on(DELIMITER).join(input.getLong(0), input.getString(1), input.getString(2)); })).apply(TextIO.write().to(options.getSingersFilename()).withoutSharding()); return Joiner.on(DELIMITER).join(input.getLong(0), input.getLong(1), input.getString(2)); })).apply(TextIO.write().to(options.getAlbumsFilename()).withoutSharding());
.apply(TextIO.write().to(options.getOutput()).withoutSharding());
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); String instanceId = options.getInstanceId(); String databaseId = options.getDatabaseId(); // [START spanner_dataflow_read] // Query for all the columns and rows in the specified Spanner table PCollection<Struct> records = p.apply( SpannerIO.read() .withInstanceId(instanceId) .withDatabaseId(databaseId) .withQuery("SELECT * FROM " + options.getTable())); // [END spanner_dataflow_read] PCollection<Long> tableEstimatedSize = records // Estimate the size of every row .apply(EstimateSize.create()) // Sum all the row sizes to get the total estimated size of the table .apply(Sum.longsGlobally()); // Write the total size to a file tableEstimatedSize .apply(ToString.elements()) .apply(TextIO.write().to(options.getOutput()).withoutSharding()); p.run().waitUntilFinish(); } }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); SpannerConfig spannerConfig = SpannerConfig.create() .withInstanceId(options.getInstanceId()) .withDatabaseId(options.getDatabaseId()); // [START spanner_dataflow_readall] PCollection<Struct> allRecords = p.apply(SpannerIO.read() .withSpannerConfig(spannerConfig) .withQuery("SELECT t.table_name FROM information_schema.tables AS t WHERE t" + ".table_catalog = '' AND t.table_schema = ''")).apply( MapElements.into(TypeDescriptor.of(ReadOperation.class)) .via((SerializableFunction<Struct, ReadOperation>) input -> { String tableName = input.getString(0); return ReadOperation.create().withQuery("SELECT * FROM " + tableName); })).apply(SpannerIO.readAll().withSpannerConfig(spannerConfig)); // [END spanner_dataflow_readall] PCollection<Long> dbEstimatedSize = allRecords.apply(EstimateSize.create()) .apply(Sum.longsGlobally()); dbEstimatedSize.apply(ToString.elements()).apply(TextIO.write().to(options.getOutput()) .withoutSharding()); p.run().waitUntilFinish(); }