if (portableOptions.getSdkWorkerParallelism() == null) { portableOptions.setSdkWorkerParallelism(serverConfig.getSdkWorkerParallelism());
@SuppressWarnings("FutureReturnValueIgnored") private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { // Do immediate cleanup if this class is not loaded on Flink parent classloader. if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { // Schedule task to clean the container later. // Ensure that this class is loaded in the parent Flink classloader. getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { // Do not release this asynchronously, as the releasing could fail due to the classloader not // being available anymore after the tasks have been removed from the execution engine. release(wrapper); } }
@Override public PipelineResult run(Pipeline pipeline) { TestPortablePipelineOptions testPortablePipelineOptions = options.as(TestPortablePipelineOptions.class); String jobServerHostPort; Object jobServerDriver; PortablePipelineOptions portableOptions = options.as(PortablePipelineOptions.class); portableOptions.setRunner(PortableRunner.class); portableOptions.setJobEndpoint(jobServerHostPort); PortableRunner runner = PortableRunner.fromOptions(portableOptions); PipelineResult result = runner.run(pipeline);
private static PipelineOptions createPipelineOptions() { PortablePipelineOptions options = PipelineOptionsFactory.create().as(PortablePipelineOptions.class); options.setJobEndpoint(ENDPOINT_URL); options.setRunner(PortableRunner.class); return options; } }
private static JobInfo constructJobInfo(String jobId, long parallelism) { PortablePipelineOptions portableOptions = PipelineOptionsFactory.as(PortablePipelineOptions.class); portableOptions.setSdkWorkerParallelism(parallelism); Struct pipelineOptions = PipelineOptionsTranslation.toProto(portableOptions); return JobInfo.create(jobId, "job-name", "retrieval-token", pipelineOptions); }
.getOptions() .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED); pipeline .getOptions()
@Override public FlinkExecutableStageContext get(JobInfo jobInfo) { JobFactoryState state = jobFactories.computeIfAbsent( jobInfo.jobId(), k -> { PortablePipelineOptions portableOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()) .as(PortablePipelineOptions.class); return new JobFactoryState( MoreObjects.firstNonNull(portableOptions.getSdkWorkerParallelism(), 1L) .intValue()); }); return state.getFactory().get(jobInfo); } }
private static JobInfo constructJobInfo(String jobId, long parallelism) { PortablePipelineOptions portableOptions = PipelineOptionsFactory.as(PortablePipelineOptions.class); portableOptions.setSdkWorkerParallelism(parallelism); Struct pipelineOptions = PipelineOptionsTranslation.toProto(portableOptions); return JobInfo.create(jobId, "job-name", "retrieval-token", pipelineOptions); }
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED);
@Override public FlinkExecutableStageContext get(JobInfo jobInfo) { JobFactoryState state = jobFactories.computeIfAbsent( jobInfo.jobId(), k -> { PortablePipelineOptions portableOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()) .as(PortablePipelineOptions.class); return new JobFactoryState( MoreObjects.firstNonNull(portableOptions.getSdkWorkerParallelism(), 1L) .intValue()); }); return state.getFactory().get(jobInfo); } }
if (portableOptions.getSdkWorkerParallelism() == null) { portableOptions.setSdkWorkerParallelism(serverConfig.getSdkWorkerParallelism());
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED); Pipeline p = Pipeline.create(options); p.apply(Impulse.create())
@SuppressWarnings("FutureReturnValueIgnored") private void scheduleRelease(JobInfo jobInfo) { WrappedContext wrapper = getCache().get(jobInfo.jobId()); Preconditions.checkState( wrapper != null, "Releasing context for unknown job: " + jobInfo.jobId()); PipelineOptions pipelineOptions = PipelineOptionsTranslation.fromProto(jobInfo.pipelineOptions()); int environmentCacheTTLMillis = pipelineOptions.as(PortablePipelineOptions.class).getEnvironmentCacheMillis(); if (environmentCacheTTLMillis > 0) { // Do immediate cleanup if this class is not loaded on Flink parent classloader. if (this.getClass().getClassLoader() != ExecutionEnvironment.class.getClassLoader()) { LOG.warn( "{} is not loaded on parent Flink classloader. " + "Falling back to synchronous environment release for job {}.", this.getClass(), jobInfo.jobId()); release(wrapper); } else { // Schedule task to clean the container later. // Ensure that this class is loaded in the parent Flink classloader. getExecutor() .schedule(() -> release(wrapper), environmentCacheTTLMillis, TimeUnit.MILLISECONDS); } } else { // Do not release this asynchronously, as the releasing could fail due to the classloader not // being available anymore after the tasks have been removed from the execution engine. release(wrapper); } }
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED);
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED); Pipeline p = Pipeline.create(options); p.apply("impulse", Impulse.create())
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED); Pipeline p = Pipeline.create(options); PCollection<KV<String, Iterable<Long>>> result =
options .as(PortablePipelineOptions.class) .setDefaultEnvironmentType(Environments.ENVIRONMENT_EMBEDDED); Pipeline p = Pipeline.create(options); PCollection<KV<String, String>> output =