JobReference toPb() { return new JobReference() .setProjectId(getProject()) .setJobId(getJob()) .setLocation(getLocation()); }
/** * Creates a new JobReference with a unique jobId generated from {@code jobIdPrefix} plus a * randomly generated UUID String. */ public JobReference createJobReference( String projectId, String jobIdPrefix, @Nullable String location) { Preconditions.checkArgument(projectId != null, "projectId must not be null."); Preconditions.checkArgument(jobIdPrefix != null, "jobIdPrefix must not be null."); Preconditions.checkArgument(jobIdPrefix.matches(BIGQUERY_JOB_ID_PATTERN), "jobIdPrefix '%s' must match pattern '%s'", jobIdPrefix, BIGQUERY_JOB_ID_PATTERN); String fullJobId = String.format("%s-%s", jobIdPrefix, UUID.randomUUID().toString()); Preconditions.checkArgument(fullJobId.length() <= BIGQUERY_JOB_ID_MAX_LENGTH, "fullJobId '%s' has length '%s'; must be less than or equal to %s", fullJobId, fullJobId.length(), BIGQUERY_JOB_ID_MAX_LENGTH); return new JobReference().setProjectId(projectId).setJobId(fullJobId).setLocation(location); }
JobReference toPb() { return new JobReference() .setProjectId(getProject()) .setJobId(getJob()) .setLocation(getLocation()); }
private List<ResourceId> executeExtract( String jobId, TableReference table, JobService jobService, String executingProject, String extractDestinationDir, String bqLocation) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setProjectId(executingProject).setLocation(bqLocation).setJobId(jobId); String destinationUri = BigQueryIO.getExtractDestinationUri(extractDestinationDir); JobConfigurationExtract extract = new JobConfigurationExtract() .setSourceTable(table) .setDestinationFormat("AVRO") .setDestinationUris(ImmutableList.of(destinationUri)); LOG.info("Starting BigQuery extract job: {}", jobId); jobService.startExtractJob(jobRef, extract); Job extractJob = jobService.pollJob(jobRef, JOB_POLL_MAX_RETRIES); if (BigQueryHelpers.parseStatus(extractJob) != Status.SUCCEEDED) { throw new IOException( String.format( "Extract job %s failed, status: %s.", extractJob.getJobReference().getJobId(), BigQueryHelpers.statusToPrettyString(extractJob.getStatus()))); } LOG.info("BigQuery extract job completed: {}", jobId); return BigQueryIO.getExtractFilePaths(extractDestinationDir, extractJob); }
.setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); LOG.info( "Starting copy job for table {} using {}, job id iteration {}", .setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); try { return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES); .setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); try { return jobService.getJob(jobRef);
new JobReference() .setProjectId(executingProject) .setLocation(bqLocation) .setJobId(queryJobId);
.setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); LOG.info( "Loading {} files into {} using job {}, job id iteration {}", .setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); try { return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES); .setProjectId(projectId) .setJobId(jobId.getJobId()) .setLocation(bqLocation); try { return jobService.getJob(jobRef);
@Override public JobStatistics dryRunQuery( String projectId, JobConfigurationQuery queryConfig, String location) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setLocation(location).setProjectId(projectId); Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setQuery(queryConfig).setDryRun(true)); return executeWithRetries( client.jobs().insert(projectId, job), String.format( "Unable to dry run query: %s, aborting after %d retries.", queryConfig, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), ALWAYS_RETRY) .getStatistics(); }
new JobReference() .setProjectId("") .setLocation("") .setJobId(retryId.getJobId())); return null;