/** Test interface. */ @Hidden public interface HiddenOptions extends PipelineOptions { String getFoo(); void setFoo(String value); }
/** The Cloud Debugger debuggee to associate with. This should not be set directly. */ @Description("The Cloud Debugger debuggee to associate with. This should not be set directly.") @Hidden @Nullable Debuggee getDebuggee();
/** * Set to true if running a streaming pipeline. This will be automatically set to true if the * pipeline contains an Unbounded PCollection. */ @Description( "Set to true if running a streaming pipeline. This will be automatically set to " + "true if the pipeline contains an Unbounded PCollection.") @Hidden boolean isStreaming();
/** * Provides a process wide unique ID for this {@link PipelineOptions} object, assigned at graph * construction time. */ @Hidden @Default.InstanceFactory(AtomicLongFactory.class) long getOptionsId();
/** GCS endpoint to use. If unspecified, uses the default endpoint. */ @JsonIgnore @Hidden @Description("The URL for the GCS API.") String getGcsEndpoint();
@Description( "[INTERNAL] Additional configuration for the profiling agent. Not typically necessary.") @Hidden DataflowProfilingAgentConfiguration getProfilingAgentConfiguration();
/** * Internal-only options for tweaking the behavior of the {@link DirectRunner} in ways that users * should never do. * * <p>Currently, the only use is to disable user-friendly overrides that prevent fully testing * certain composite transforms. */ @Internal @Hidden public interface DirectTestOptions extends PipelineOptions, ApplicationNameOptions { @Default.Boolean(true) @Description("Indicates whether this is an automatically-run unit test.") boolean isRunnerDeterminedSharding(); void setRunnerDeterminedSharding(boolean goAheadAndDetermineSharding); }
/** Pipeline options to tune DockerEnvironment. */ @Experimental @Hidden public interface ManualDockerEnvironmentOptions extends PipelineOptions { @Description("Retain dynamically created Docker container Environments.") @Default.Boolean(false) boolean getRetainDockerContainers(); void setRetainDockerContainers(boolean retainDockerContainers); /** Register the {@link ManualDockerEnvironmentOptions}. */ @AutoService(PipelineOptionsRegistrar.class) class Options implements PipelineOptionsRegistrar { @Override public Iterable<Class<? extends PipelineOptions>> getPipelineOptions() { return ImmutableList.of(ManualDockerEnvironmentOptions.class); } } }
/** The GcsUtil instance that should be used to communicate with Google Cloud Storage. */ @JsonIgnore @Description("The GcsUtil instance that should be used to communicate with Google Cloud Storage.") @Default.InstanceFactory(GcsUtil.GcsUtilFactory.class) @Hidden GcsUtil getGcsUtil();
/** * Apache Beam provides a number of experimental features that can be enabled with this flag. If * executing against a managed service, please contact the service owners before enabling any * experiments. */ @Experimental @Hidden public interface ExperimentalOptions extends PipelineOptions { @Description( "[Experimental] Apache Beam provides a number of experimental features that can " + "be enabled with this flag. If executing against a managed service, please contact the " + "service owners before enabling any experiments.") @Nullable List<String> getExperiments(); void setExperiments(@Nullable List<String> value); /** Returns true iff the provided pipeline options has the specified experiment enabled. */ static boolean hasExperiment(PipelineOptions options, String experiment) { if (options == null) { return false; } List<String> experiments = options.as(ExperimentalOptions.class).getExperiments(); return experiments != null && experiments.contains(experiment); } }
"[Internal] Options that are used exclusively within the Dataflow worker harness. " + "These options have no effect at pipeline creation time.") @Hidden public interface DataflowWorkerHarnessOptions extends DataflowPipelineOptions {
/** * The ExecutorService instance to use to create threads, can be overridden to specify an * ExecutorService that is compatible with the users environment. If unset, the default is to * create an ExecutorService with an unbounded number of threads; this is compatible with Google * AppEngine. */ @JsonIgnore @Description( "The ExecutorService instance to use to create multiple threads. Can be overridden " + "to specify an ExecutorService that is compatible with the users environment. If unset, " + "the default is to create an ExecutorService with an unbounded number of threads; this " + "is compatible with Google AppEngine.") @Default.InstanceFactory(ExecutorServiceFactory.class) @Hidden ExecutorService getExecutorService();
/** * Docker container image that executes Dataflow worker harness, residing in Google Container * Registry. */ @Default.InstanceFactory(WorkerHarnessContainerImageFactory.class) @Description( "Docker container image that executes Dataflow worker harness, residing in Google " + " Container Registry.") @Hidden String getWorkerHarnessContainerImage();
/** Root URL for use with the Google Cloud Pub/Sub API. */ @Description("Root URL for use with the Google Cloud Pub/Sub API") @Default.String("https://pubsub.googleapis.com") @Hidden String getPubsubRootUrl();
/** Run the job as a specific service account, instead of the default GCE robot. */ @Hidden @Experimental @Description("Run the job as a specific service account, instead of the default GCE robot.") String getServiceAccount();
/** Options for controlling profiling of pipeline execution. */ @Description("[Experimental] Used to configure profiling of the Dataflow pipeline") @Experimental @Hidden public interface DataflowProfilingOptions extends PipelineOptions { @Description( "When set to a non-empty value, enables recording profiles and saving them to GCS.\n" + "Profiles will continue until the pipeline is stopped or updated without this option.\n") String getSaveProfilesToGcs(); void setSaveProfilesToGcs(String gcsPath); @Description( "[INTERNAL] Additional configuration for the profiling agent. Not typically necessary.") @Hidden DataflowProfilingAgentConfiguration getProfilingAgentConfiguration(); void setProfilingAgentConfiguration(DataflowProfilingAgentConfiguration configuration); /** Configuration the for profiling agent. */ class DataflowProfilingAgentConfiguration extends HashMap<String, Object> {} }
@Hidden public interface CloudDebuggerOptions extends PipelineOptions {
"[Internal] Options used to control execution of the Dataflow SDK for " + "debugging and testing purposes.") @Hidden public interface DataflowPipelineDebugOptions extends ExperimentalOptions, PipelineOptions {
/** * The Google Compute Engine <a * href="https://cloud.google.com/compute/docs/regions-zones/regions-zones">region</a> for * creating Dataflow jobs. * * <p>NOTE: The Cloud Dataflow now also supports the region flag. */ @Hidden @Experimental @Description( "The Google Compute Engine region for creating Dataflow jobs. See " + "https://cloud.google.com/compute/docs/regions-zones/regions-zones for a list of valid " + "options. Default is up to the Dataflow service.") @Default.String("us-central1") String getRegion();