@Override public WorkflowToken getWorkflowToken() { return workflowProgramInfo.getWorkflowToken(); }
@Override public WorkflowToken getWorkflowToken() { return workflowProgramInfo.getWorkflowToken(); }
/** * Returns the WorkflowToken if the MapReduce program is executed as a part of the Workflow. */ @Override @Nullable public WorkflowToken getWorkflowToken() { return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
/** * Returns the WorkflowToken if the MapReduce program is executed as a part of the Workflow. */ @Override @Nullable public WorkflowToken getWorkflowToken() { return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
/** * Returns the WorkflowToken if the MapReduce program is executed as a part of the Workflow. */ @Override @Nullable public BasicWorkflowToken getWorkflowToken() { return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
/** * Returns the WorkflowToken if the MapReduce program is executed as a part of the Workflow. */ @Override @Nullable public BasicWorkflowToken getWorkflowToken() { return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
@Nullable @Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = sparkRuntimeContext.getWorkflowInfo(); return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
@Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = context.getWorkflowInfo(); if (workflowProgramInfo == null) { throw new IllegalStateException("No workflow information for Spark program that is started by Workflow."); } return workflowProgramInfo.getWorkflowToken(); }
@Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = context.getWorkflowInfo(); if (workflowProgramInfo == null) { throw new IllegalStateException("No workflow information for Spark program that is started by Workflow."); } return workflowProgramInfo.getWorkflowToken(); }
@Nullable @Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = sparkRuntimeContext.getWorkflowInfo(); return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
@Nullable @Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = sparkRuntimeContext.getWorkflowInfo(); return workflowProgramInfo == null ? null : workflowProgramInfo.getWorkflowToken(); }
@Override public WorkflowToken getWorkflowToken() { WorkflowProgramInfo workflowProgramInfo = context.getWorkflowInfo(); if (workflowProgramInfo == null) { throw new IllegalStateException("No workflow information for Spark program that is started by Workflow."); } return workflowProgramInfo.getWorkflowToken(); }
/** * Returns the {@link WorkflowProgramInfo} if it is running inside Workflow or {@code null} if not. */ @Nullable WorkflowProgramInfo getWorkflowProgramInfo() { String info = hConf.get(HCONF_ATTR_WORKFLOW_INFO); if (info == null) { return null; } WorkflowProgramInfo workflowProgramInfo = GSON.fromJson(info, WorkflowProgramInfo.class); workflowProgramInfo.getWorkflowToken().disablePut(); return workflowProgramInfo; }
/** * Returns the {@link WorkflowProgramInfo} if it is running inside Workflow or {@code null} if not. */ @Nullable WorkflowProgramInfo getWorkflowProgramInfo() { String info = hConf.get(HCONF_ATTR_WORKFLOW_INFO); if (info == null) { return null; } WorkflowProgramInfo workflowProgramInfo = GSON.fromJson(info, WorkflowProgramInfo.class); workflowProgramInfo.getWorkflowToken().disablePut(); return workflowProgramInfo; }
public SparkDriverService(URI baseURI, SparkRuntimeContext runtimeContext) { this.client = new SparkExecutionClient(baseURI, runtimeContext.getProgramRunId()); this.credentialsUpdater = createCredentialsUpdater(runtimeContext.getConfiguration(), client); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); this.workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); }
public SparkDriverService(URI baseURI, SparkRuntimeContext runtimeContext) { this.client = new SparkExecutionClient(baseURI, runtimeContext.getProgramRunId()); this.credentialsUpdater = createCredentialsUpdater(runtimeContext.getConfiguration(), client); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); this.workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); }
public SparkDriverService(URI baseURI, SparkRuntimeContext runtimeContext) { this.client = new SparkExecutionClient(baseURI, runtimeContext.getProgramRunId()); this.credentialsUpdater = createCredentialsUpdater(runtimeContext.getConfiguration(), client); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); this.workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); }
public DistributedSparkSubmitter(Configuration hConf, LocationFactory locationFactory, String hostname, SparkRuntimeContext runtimeContext, @Nullable String schedulerQueueName) { this.hConf = hConf; this.schedulerQueueName = schedulerQueueName; ProgramRunId programRunId = runtimeContext.getProgram().getId().run(runtimeContext.getRunId().getId()); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); BasicWorkflowToken workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); this.sparkExecutionService = new SparkExecutionService(locationFactory, hostname, programRunId, workflowToken); Arguments systemArgs = runtimeContext.getProgramOptions().getArguments(); this.tokenRenewalInterval = systemArgs.hasOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS) ? Long.parseLong(systemArgs.getOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS)) : -1L; }
public DistributedSparkSubmitter(Configuration hConf, LocationFactory locationFactory, String hostname, SparkRuntimeContext runtimeContext, @Nullable String schedulerQueueName) { this.hConf = hConf; this.schedulerQueueName = schedulerQueueName; ProgramRunId programRunId = runtimeContext.getProgram().getId().run(runtimeContext.getRunId().getId()); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); BasicWorkflowToken workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); this.sparkExecutionService = new SparkExecutionService(locationFactory, hostname, programRunId, workflowToken); Arguments systemArgs = runtimeContext.getProgramOptions().getArguments(); this.tokenRenewalInterval = systemArgs.hasOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS) ? Long.parseLong(systemArgs.getOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS)) : -1L; }
public DistributedSparkSubmitter(Configuration hConf, LocationFactory locationFactory, String hostname, SparkRuntimeContext runtimeContext, @Nullable String schedulerQueueName) { this.hConf = hConf; this.schedulerQueueName = schedulerQueueName; ProgramRunId programRunId = runtimeContext.getProgram().getId().run(runtimeContext.getRunId().getId()); WorkflowProgramInfo workflowInfo = runtimeContext.getWorkflowInfo(); BasicWorkflowToken workflowToken = workflowInfo == null ? null : workflowInfo.getWorkflowToken(); this.sparkExecutionService = new SparkExecutionService(locationFactory, hostname, programRunId, workflowToken); Arguments systemArgs = runtimeContext.getProgramOptions().getArguments(); this.tokenRenewalInterval = systemArgs.hasOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS) ? Long.parseLong(systemArgs.getOption(SparkRuntimeContextConfig.CREDENTIALS_UPDATE_INTERVAL_MS)) : -1L; }