Refine search
public static void main(String[] args) throws IOException, IllegalArgumentException { // Create and set your PipelineOptions. PipelineOptionsFactory.register(SampleOptions.class); SampleOptions options = PipelineOptionsFactory.fromArgs(args).withValidation() .as(SampleOptions.class); runCsvToAvro(options); } }
public static void main(String[] args) throws IOException, IllegalArgumentException { // Create and set your PipelineOptions. PipelineOptionsFactory.register(SampleOptions.class); SampleOptions options = PipelineOptionsFactory.fromArgs(args).withValidation() .as(SampleOptions.class); runAvroToCsv(options); } }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); String instanceId = options.getInstanceId(); String databaseId = options.getDatabaseId(); // [START spanner_dataflow_read] // Query for all the columns and rows in the specified Spanner table PCollection<Struct> records = p.apply( SpannerIO.read() .withInstanceId(instanceId) .withDatabaseId(databaseId) .withQuery("SELECT * FROM " + options.getTable())); // [END spanner_dataflow_read] PCollection<Long> tableEstimatedSize = records // Estimate the size of every row .apply(EstimateSize.create()) // Sum all the row sizes to get the total estimated size of the table .apply(Sum.longsGlobally()); // Write the total size to a file tableEstimatedSize .apply(ToString.elements()) .apply(TextIO.write().to(options.getOutput()).withoutSharding()); p.run().waitUntilFinish(); } }
/** * The main entry-point for pipeline execution. This method will start the pipeline but will not * wait for it's execution to finish. If blocking execution is required, use the {@link * BulkDecompressor#run(Options)} method to start the pipeline and invoke {@code * result.waitUntilFinish()} on the {@link PipelineResult}. * * @param args The command-line args passed by the executor. */ public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); run(options); }
/** * The main entry-point for pipeline execution. This method will start the pipeline but will not * wait for it's execution to finish. If blocking execution is required, use the {@link * BulkCompressor#run(Options)} method to start the pipeline and invoke {@code * result.waitUntilFinish()} on the {@link PipelineResult}. * * @param args The command-line args passed by the executor. */ public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); run(options); }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); SpannerConfig spannerConfig = SpannerConfig.create() .withInstanceId(options.getInstanceId()) .withDatabaseId(options.getDatabaseId()); // [START spanner_dataflow_readall] PCollection<Struct> allRecords = p.apply(SpannerIO.read() .withSpannerConfig(spannerConfig) .withQuery("SELECT t.table_name FROM information_schema.tables AS t WHERE t" + ".table_catalog = '' AND t.table_schema = ''")).apply( MapElements.into(TypeDescriptor.of(ReadOperation.class)) .via((SerializableFunction<Struct, ReadOperation>) input -> { String tableName = input.getString(0); return ReadOperation.create().withQuery("SELECT * FROM " + tableName); })).apply(SpannerIO.readAll().withSpannerConfig(spannerConfig)); // [END spanner_dataflow_readall] PCollection<Long> dbEstimatedSize = allRecords.apply(EstimateSize.create()) .apply(Sum.longsGlobally()); dbEstimatedSize.apply(ToString.elements()).apply(TextIO.write().to(options.getOutput()) .withoutSharding()); p.run().waitUntilFinish(); }
/** * Sets up and starts streaming pipeline. * * @throws IOException if there is a problem setting up resources */ public static void main(String[] args) throws IOException { TrafficMaxLaneFlowOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(TrafficMaxLaneFlowOptions.class); options.setBigQuerySchema(FormatMaxesFn.getSchema()); runTrafficMaxLaneFlow(options); }
/** * Sets up and starts streaming pipeline. * * @throws IOException if there is a problem setting up resources */ public static void main(String[] args) throws IOException { TrafficRoutesOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(TrafficRoutesOptions.class); options.setBigQuerySchema(FormatStatsFn.getSchema()); runTrafficRoutes(options); }
public static void main(String[] args) { PipelineOptionsFactory.register(ImportOptions.class); ImportOptions opts = PipelineOptionsFactory .fromArgs(args).withValidation() .as(ImportOptions.class); Pipeline pipeline = buildPipeline(opts); PipelineResult result = pipeline.run(); if (opts.getWait()) { Utils.waitForPipelineToFinish(result); } }
@Test public void testUpdateSerializeExistingValue() throws Exception { TestOptions submitOptions = PipelineOptionsFactory.fromArgs("--string=baz", "--otherString=quux").as(TestOptions.class); String serializedOptions = MAPPER.writeValueAsString(submitOptions); String updatedOptions = ValueProviders.updateSerializedOptions(serializedOptions, ImmutableMap.of("string", "bar")); TestOptions runtime = MAPPER.readValue(updatedOptions, PipelineOptions.class).as(TestOptions.class); assertEquals("bar", runtime.getString()); assertEquals("quux", runtime.getOtherString()); }
@Test public void testLongValueProvider() { String[] args = new String[] {"--longValue=12345678762"}; String[] emptyArgs = new String[] {"--longValue="}; Objects options = PipelineOptionsFactory.fromArgs(args).as(Objects.class); assertEquals(Long.valueOf(12345678762L), options.getLongValue().get()); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage(emptyStringErrorMessage()); PipelineOptionsFactory.fromArgs(emptyArgs).as(Objects.class); }