public static PipelineOptions createPipelineOptions(Map<String, String> map) { final String[] args = new String[map.size()]; int i = 0; for (Map.Entry<String, String> entry : map.entrySet()) { args[i++] = "--" + entry.getKey() + "=" + entry.getValue(); } PipelineOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().create(); options.as(ApplicationNameOptions.class).setAppName("BeamSql"); return options; }
/** * Creates a runner from the default app {@link PipelineOptions}. * * @return The newly created runner. */ public static PipelineRunner<? extends PipelineResult> create() { return fromOptions(PipelineOptionsFactory.create()); }
public static TestFlinkRunner create(boolean streaming) { FlinkPipelineOptions flinkOptions = PipelineOptionsFactory.as(FlinkPipelineOptions.class); flinkOptions.setRunner(TestFlinkRunner.class); flinkOptions.setStreaming(streaming); return TestFlinkRunner.fromOptions(flinkOptions); }
public static void main(String[] args) throws IOException, IllegalArgumentException { // Create and set your PipelineOptions. PipelineOptionsFactory.register(SampleOptions.class); SampleOptions options = PipelineOptionsFactory.fromArgs(args).withValidation() .as(SampleOptions.class); runCsvToAvro(options); } }
@Test public void testDisplayDataDeserializationWithRegistration() throws Exception { PipelineOptionsFactory.register(HasClassOptions.class); HasClassOptions options = PipelineOptionsFactory.as(HasClassOptions.class); options.setClassOption(ProxyInvocationHandlerTest.class); PipelineOptions deserializedOptions = serializeDeserialize(PipelineOptions.class, options); DisplayData displayData = DisplayData.from(deserializedOptions); assertThat(displayData, hasDisplayItem("classOption", ProxyInvocationHandlerTest.class)); }
@BeforeClass public static void setUp() { PipelineOptionsFactory.register(HIFITestOptions.class); options = TestPipeline.testingPipelineOptions().as(HIFITestOptions.class); }
public static void main(String[] args) throws IOException, IllegalArgumentException { // Create and set your PipelineOptions. PipelineOptionsFactory.register(SampleOptions.class); SampleOptions options = PipelineOptionsFactory.fromArgs(args).withValidation() .as(SampleOptions.class); runAvroToCsv(options); } }
@BeforeClass public static void setUp() { PipelineOptionsFactory.register(HadoopFormatIOTestOptions.class); options = TestPipeline.testingPipelineOptions().as(HadoopFormatIOTestOptions.class); }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options);
/** * Overloaded {@link PTransform} runner that runs with the default app {@link PipelineOptions}. */ public ResultT run(PTransform<PBegin, ?> pTransform) { return run(pTransform, PipelineOptionsFactory.create()); } }
private static GcsOptions gcsOptionsWithTestCredential() { GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class); pipelineOptions.setGcpCredential(new TestCredential()); return pipelineOptions; }
@Before public void setup() throws Exception { PipelineOptionsFactory.register(V1TestOptions.class); options = TestPipeline.testingPipelineOptions().as(V1TestOptions.class); project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); ancestor = UUID.randomUUID().toString(); // Create entities and write them to datastore writeEntitiesToDatastore(options, project, ancestor, numEntities); }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options);
@Before public void setUp() { MockitoAnnotations.initMocks(this); this.options = PipelineOptionsFactory.create(); }
@Test @SuppressWarnings({"rawtypes", "unchecked"}) public void testTranslateWithEmptyCollection() { PCollection mockOutput = mock(PCollection.class); TranslationContext translationContext = mock(TranslationContext.class); when(translationContext.getInputs()).thenReturn(Collections.EMPTY_MAP); when(translationContext.getOutput()).thenReturn(mockOutput); when(translationContext.getPipelineOptions()) .thenReturn(PipelineOptionsFactory.as(GearpumpPipelineOptions.class)); translator.translate(transform, translationContext); verify(translationContext).getSourceStream(argThat(new UnboundedSourceWrapperMatcher())); }
private void setupTestEnvironment(String recordSize, boolean enableCustomBigquery) { PipelineOptionsFactory.register(BigQueryIOReadOptions.class); options = TestPipeline.testingPipelineOptions().as(BigQueryIOReadOptions.class); options.setNumRecords(numOfRecords.get(recordSize)); options.setTempLocation(options.getTempRoot() + "/temp-it/"); project = TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject(); options.setInputTable(project + ":" + datasetId + "." + tablePrefix + recordSize); if (enableCustomBigquery) { options.setExperiments( ImmutableList.of("enable_custom_bigquery_sink", "enable_custom_bigquery_source")); } }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); String instanceId = options.getInstanceId(); String databaseId = options.getDatabaseId(); // [START spanner_dataflow_read] // Query for all the columns and rows in the specified Spanner table PCollection<Struct> records = p.apply( SpannerIO.read() .withInstanceId(instanceId) .withDatabaseId(databaseId) .withQuery("SELECT * FROM " + options.getTable())); // [END spanner_dataflow_read] PCollection<Long> tableEstimatedSize = records // Estimate the size of every row .apply(EstimateSize.create()) // Sum all the row sizes to get the total estimated size of the table .apply(Sum.longsGlobally()); // Write the total size to a file tableEstimatedSize .apply(ToString.elements()) .apply(TextIO.write().to(options.getOutput()).withoutSharding()); p.run().waitUntilFinish(); } }
@Test public void testPipelineOptionsIdIsUniquePerInstance() { Set<Long> ids = new HashSet<>(); for (int i = 0; i < 1000; ++i) { long id = PipelineOptionsFactory.create().getOptionsId(); if (!ids.add(id)) { fail(String.format("Generated duplicate id %s, existing generated ids %s", id, ids)); } } }