@VisibleForTesting static String getContainerImageForJob(DataflowPipelineOptions options) { String workerHarnessContainerImage = options.getWorkerHarnessContainerImage(); if (!workerHarnessContainerImage.contains("IMAGE")) { return workerHarnessContainerImage; } else if (hasExperiment(options, "beam_fn_api")) { return workerHarnessContainerImage.replace("IMAGE", "java"); } else if (options.isStreaming()) { return workerHarnessContainerImage.replace("IMAGE", "beam-java-streaming"); } else { return workerHarnessContainerImage.replace("IMAGE", "beam-java-batch"); } }
/** Helper to configure the Dataflow Job Environment based on the user's job options. */ private static Map<String, Object> getEnvironmentVersion(DataflowPipelineOptions options) { DataflowRunnerInfo runnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String majorVersion; String jobType; if (hasExperiment(options, "beam_fn_api")) { majorVersion = runnerInfo.getFnApiEnvironmentMajorVersion(); jobType = options.isStreaming() ? "FNAPI_STREAMING" : "FNAPI_BATCH"; } else { majorVersion = runnerInfo.getLegacyEnvironmentMajorVersion(); jobType = options.isStreaming() ? "STREAMING" : "JAVA_BATCH_AUTOSCALING"; } return ImmutableMap.of( PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY, majorVersion, PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType); }
if (hasExperiment(options, "upload_graph")) { DataflowPackage stagedGraph = options
private List<PTransformOverride> getOverrides(boolean streaming) { boolean fnApiEnabled = hasExperiment(options, "beam_fn_api"); ImmutableList.Builder<PTransformOverride> overridesBuilder = ImmutableList.builder(); new SplittableParDoOverrides.SplittableParDoOverrideFactory())); if (streaming) { if (!hasExperiment(options, "enable_custom_pubsub_source")) { overridesBuilder.add( PTransformOverride.of( new StreamingPubsubIOReadOverrideFactory())); if (!hasExperiment(options, "enable_custom_pubsub_sink")) { overridesBuilder.add( PTransformOverride.of(