private PubsubClient getClient() throws IOException { if (client == null) { client = PubsubJsonClient.FACTORY.newClient(null, null, options.as(PubsubOptions.class)); } return client; }
/** * Returns a new {@link ValueProvider} that is inaccessible before {@link #run}, but will be * accessible while the pipeline runs. */ public <T> ValueProvider<T> newProvider(T runtimeValue) { String uuid = UUID.randomUUID().toString(); providerRuntimeValues.put(uuid, runtimeValue); return ValueProvider.NestedValueProvider.of( options.as(TestValueProviderOptions.class).getProviderRuntimeValues(), new GetFromRuntimeValues<T>(uuid)); }
private <T extends PipelineOptions> T serializeDeserialize(Class<T> kls, PipelineOptions options) throws Exception { String value = MAPPER.writeValueAsString(options); return MAPPER.readValue(value, PipelineOptions.class).as(kls); }
@Override public Dataflow create(PipelineOptions options) { return DataflowTransport.newDataflowClient(options.as(DataflowPipelineOptions.class)).build(); } }
/** * Creates an instance of this rule. * * <p>Loads GCP configuration from {@link TestPipelineOptions}. */ public static TestBigQuery create(Schema tableSchema) { return new TestBigQuery( TestPipeline.testingPipelineOptions().as(TestBigQueryOptions.class), tableSchema); }
/** * Creates an instance of this rule. * * <p>Loads GCP configuration from {@link TestPipelineOptions}. */ public static TestPubsub create() { TestPubsubOptions options = TestPipeline.testingPipelineOptions().as(TestPubsubOptions.class); return new TestPubsub(options); }
public static PipelineOptions createPipelineOptions(Map<String, String> map) { final String[] args = new String[map.size()]; int i = 0; for (Map.Entry<String, String> entry : map.entrySet()) { args[i++] = "--" + entry.getKey() + "=" + entry.getValue(); } PipelineOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().create(); options.as(ApplicationNameOptions.class).setAppName("BeamSql"); return options; }
@Override public long getEstimatedSizeBytes(PipelineOptions options) throws Exception { BigQueryOptions bqOptions = options.as(BigQueryOptions.class); return dryRunQueryIfNeeded(bqOptions).getQuery().getTotalBytesProcessed(); }
@Override public void setup( StreamTask<?, ?> containingTask, StreamConfig config, Output<StreamRecord<WindowedValue<OutputT>>> output) { // make sure that FileSystems is initialized correctly FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); FileSystems.setDefaultPipelineOptions(options); super.setup(containingTask, config, output); }
/** Constructs a runner from the provided options. */ public static TestDataflowRunner fromOptions(PipelineOptions options) { TestDataflowPipelineOptions dataflowOptions = options.as(TestDataflowPipelineOptions.class); String tempLocation = Joiner.on("/") .join(dataflowOptions.getTempRoot(), dataflowOptions.getJobName(), "output", "results"); dataflowOptions.setTempLocation(tempLocation); return new TestDataflowRunner( dataflowOptions, DataflowClient.create(options.as(DataflowPipelineOptions.class))); }
@Override public void setup(OperatorContext context) { this.traceTuples = ApexStreamTuple.Logging.isDebugEnabled( pipelineOptions.get().as(ApexPipelineOptions.class), this); try { reader = source.createReader(this.pipelineOptions.get(), null); available = reader.start(); } catch (IOException e) { throw new RuntimeException(e); } }
@Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); options = PipelineOptionsFactory.create(); options.as(GcsOptions.class).setGcsUtil(mockGcsUtil); options.as(GcpOptions.class).setProject("foo"); options.as(GcpOptions.class).setZone("us-north1-a"); when(mockCrmClient.projects()).thenReturn(mockProjects); when(mockProjects.get(any(String.class))).thenReturn(mockGet); fakeProject = new Project().setProjectNumber(1L); }
@Override public PathValidator create(PipelineOptions options) { GcsOptions gcsOptions = options.as(GcsOptions.class); return InstanceBuilder.ofType(PathValidator.class) .fromClass(gcsOptions.getPathValidatorClass()) .fromFactoryMethod("fromOptions") .withArg(PipelineOptions.class, options) .build(); } }
@BeforeClass public static void setUp() { PipelineOptionsFactory.register(HIFITestOptions.class); options = TestPipeline.testingPipelineOptions().as(HIFITestOptions.class); }
@Test public void testUpdateSerializeEmptyUpdate() throws Exception { TestOptions submitOptions = PipelineOptionsFactory.as(TestOptions.class); String serializedOptions = MAPPER.writeValueAsString(submitOptions); String updatedOptions = ValueProviders.updateSerializedOptions(serializedOptions, ImmutableMap.of()); TestOptions runtime = MAPPER.readValue(updatedOptions, PipelineOptions.class).as(TestOptions.class); assertNull(runtime.getString()); } }
@Test public void testMapStateUnsupportedInStreaming() throws Exception { PipelineOptions options = buildPipelineOptions(); options.as(StreamingOptions.class).setStreaming(true); verifyMapStateUnsupported(options); }
@Test public void testMergingStatefulRejectedInStreaming() throws Exception { PipelineOptions options = buildPipelineOptions(); options.as(StreamingOptions.class).setStreaming(true); verifyMergingStatefulParDoRejected(options); }
@Test public void testPropertyIsSetOnRegisteredPipelineOptionNotPartOfOriginalInterface() { PipelineOptions options = PipelineOptionsFactory.fromArgs("--streaming").create(); assertTrue(options.as(StreamingOptions.class).isStreaming()); }
@Test public void testNestedRecords() throws Exception { PipelineOptionsFactory.register(Options.class); TestPipelineOptions testOptions = TestPipeline.testingPipelineOptions().as(TestPipelineOptions.class); Options options = testOptions.as(Options.class); options.setTempLocation(testOptions.getTempRoot() + "/temp-it/"); runPipeline(options); }
@Test public void testDontOverrideEndpointWithDefaultApi() { DataflowPipelineOptions options = PipelineOptionsFactory.create().as(DataflowPipelineOptions.class); options.setProject(PROJECT_ID); options.setRegion(REGION_ID); options.setGcpCredential(new TestCredential()); String cancelCommand = MonitoringUtil.getGcloudCancelCommand(options, JOB_ID); assertEquals( "gcloud dataflow jobs --project=someProject cancel --region=thatRegion 1234", cancelCommand); }