/** Get the project this job exists in. */ public String getProjectId() { return dataflowOptions.getProject(); }
public static String getGcloudCancelCommand(DataflowPipelineOptions options, String jobId) { // If using a different Dataflow API than default, prefix command with an API override. String dataflowApiOverridePrefix = ""; String apiUrl = options.getDataflowClient().getBaseUrl(); if (!apiUrl.equals(Dataflow.DEFAULT_BASE_URL)) { dataflowApiOverridePrefix = String.format("%s=%s ", ENDPOINT_OVERRIDE_ENV_VAR, apiUrl); } // Assemble cancel command from optional prefix and project/job parameters. return String.format( "%s%s jobs --project=%s cancel --region=%s %s", dataflowApiOverridePrefix, GCLOUD_DATAFLOW_PREFIX, options.getProject(), options.getRegion(), jobId); }
/** Updates the Dataflow {@link Job} with the given {@code jobId}. */ public Job updateJob(@Nonnull String jobId, @Nonnull Job content) throws IOException { checkNotNull(jobId, "jobId"); checkNotNull(content, "content"); Jobs.Update jobsUpdate = dataflow .projects() .locations() .jobs() .update(options.getProject(), options.getRegion(), jobId, content); return jobsUpdate.execute(); }
private void maybeRegisterDebuggee(DataflowPipelineOptions options, String uniquifier) { if (!options.getEnableCloudDebugger()) { return; } if (options.getDebuggee() != null) { throw new RuntimeException("Should not specify the debuggee"); } CloudDebugger debuggerClient = DataflowTransport.newClouddebuggerClient(options).build(); Debuggee debuggee = registerDebuggee(debuggerClient, uniquifier); options.setDebuggee(debuggee); System.out.println(debuggerMessage(options.getProject(), debuggee.getUniquifier())); }
/** * Lists Dataflow {@link Job Jobs} in the project associated with the {@link * DataflowPipelineOptions}. */ public ListJobsResponse listJobs(@Nullable String pageToken) throws IOException { Jobs.List jobsList = dataflow .projects() .locations() .jobs() .list(options.getProject(), options.getRegion()) .setPageToken(pageToken); return jobsList.execute(); }
/** Creates the Dataflow {@link Job}. */ public Job createJob(@Nonnull Job job) throws IOException { checkNotNull(job, "job"); Jobs.Create jobsCreate = dataflow .projects() .locations() .jobs() .create(options.getProject(), options.getRegion(), job); return jobsCreate.execute(); }
/** Gets the Dataflow {@link Job} with the given {@code jobId}. */ public Job getJob(@Nonnull String jobId) throws IOException { checkNotNull(jobId, "jobId"); Jobs.Get jobsGet = dataflow .projects() .locations() .jobs() .get(options.getProject(), options.getRegion(), jobId); return jobsGet.execute(); }
/** Gets the {@link JobMetrics} with the given {@code jobId}. */ public JobMetrics getJobMetrics(@Nonnull String jobId) throws IOException { checkNotNull(jobId, "jobId"); Jobs.GetMetrics jobsGetMetrics = dataflow .projects() .locations() .jobs() .getMetrics(options.getProject(), options.getRegion(), jobId); return jobsGetMetrics.execute(); }
/** Lists job messages with the given {@code jobId}. */ public ListJobMessagesResponse listJobMessages(@Nonnull String jobId, @Nullable String pageToken) throws IOException { checkNotNull(jobId, "jobId"); Jobs.Messages.List jobMessagesList = dataflow .projects() .locations() .jobs() .messages() .list(options.getProject(), options.getRegion(), jobId) .setPageToken(pageToken); return jobMessagesList.execute(); }
/** Reports the status of the work item for {@code jobId}. */ @SuppressWarnings("unused") // used internally in the Cloud Dataflow execution environment. public ReportWorkItemStatusResponse reportWorkItemStatus( @Nonnull String jobId, @Nonnull ReportWorkItemStatusRequest request) throws IOException { checkNotNull(jobId, "jobId"); checkNotNull(request, "request"); Jobs.WorkItems.ReportStatus jobWorkItemsReportStatus = dataflow .projects() .locations() .jobs() .workItems() .reportStatus(options.getProject(), options.getRegion(), jobId, request); return jobWorkItemsReportStatus.execute(); } }
/** Leases the work item for {@code jobId}. */ @SuppressWarnings("unused") // used internally in the Cloud Dataflow execution environment. public LeaseWorkItemResponse leaseWorkItem( @Nonnull String jobId, @Nonnull LeaseWorkItemRequest request) throws IOException { checkNotNull(jobId, "jobId"); checkNotNull(request, "request"); Jobs.WorkItems.Lease jobWorkItemsLease = dataflow .projects() .locations() .jobs() .workItems() .lease(options.getProject(), options.getRegion(), jobId, request); return jobWorkItemsLease.execute(); }
String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException(
private Debuggee registerDebuggee(CloudDebugger debuggerClient, String uniquifier) { RegisterDebuggeeRequest registerReq = new RegisterDebuggeeRequest(); registerReq.setDebuggee( new Debuggee() .setProject(options.getProject()) .setUniquifier(uniquifier) .setDescription(uniquifier) .setAgentVersion("google.com/cloud-dataflow-java/v1")); try { RegisterDebuggeeResponse registerResponse = debuggerClient.controller().debuggees().register(registerReq).execute(); Debuggee debuggee = registerResponse.getDebuggee(); if (debuggee.getStatus() != null && debuggee.getStatus().getIsError()) { throw new RuntimeException( "Unable to register with the debugger: " + debuggee.getStatus().getDescription().getFormat()); } return debuggee; } catch (IOException e) { throw new RuntimeException("Unable to register with the debugger: ", e); } }
"To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL( options.getProject(), options.getRegion(), jobResult.getId())); System.out.println("Submitted job: " + jobResult.getId());
assertThat(optionsMap, hasEntry("project", (Object) options.getProject())); assertThat( optionsMap,