private synchronized JobStatistics dryRunQueryIfNeeded(BigQueryOptions bqOptions) throws InterruptedException, IOException { if (dryRunJobStats.get() == null) { JobStatistics jobStats = bqServices .getJobService(bqOptions) .dryRunQuery(bqOptions.getProject(), createBasicQueryConfig(), this.location); dryRunJobStats.compareAndSet(null, jobStats); } return dryRunJobStats.get(); }
private List<ResourceId> executeExtract( String jobId, TableReference table, JobService jobService, String executingProject, String extractDestinationDir, String bqLocation) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setProjectId(executingProject).setLocation(bqLocation).setJobId(jobId); String destinationUri = BigQueryIO.getExtractDestinationUri(extractDestinationDir); JobConfigurationExtract extract = new JobConfigurationExtract() .setSourceTable(table) .setDestinationFormat("AVRO") .setDestinationUris(ImmutableList.of(destinationUri)); LOG.info("Starting BigQuery extract job: {}", jobId); jobService.startExtractJob(jobRef, extract); Job extractJob = jobService.pollJob(jobRef, JOB_POLL_MAX_RETRIES); if (BigQueryHelpers.parseStatus(extractJob) != Status.SUCCEEDED) { throw new IOException( String.format( "Extract job %s failed, status: %s.", extractJob.getJobReference().getJobId(), BigQueryHelpers.statusToPrettyString(extractJob.getStatus()))); } LOG.info("BigQuery extract job completed: {}", jobId); return BigQueryIO.getExtractFilePaths(extractDestinationDir, extractJob); }
jobId.getRetryIndex()); try { jobService.startLoadJob(jobRef, loadConfig); } catch (IOException | InterruptedException e) { LOG.warn("Load job {} failed with {}", jobRef, e.toString()); .setLocation(bqLocation); try { return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES); } catch (InterruptedException e) { throw new RuntimeException(e); .setLocation(bqLocation); try { return jobService.getJob(jobRef); } catch (InterruptedException | IOException e) { throw new RuntimeException(e);
jobId.getRetryIndex()); try { jobService.startCopyJob(jobRef, copyConfig); } catch (IOException | InterruptedException e) { LOG.warn("Copy job {} failed with {}", jobRef, e); .setLocation(bqLocation); try { return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES); } catch (InterruptedException e) { throw new RuntimeException(e); .setLocation(bqLocation); try { return jobService.getJob(jobRef); } catch (InterruptedException | IOException e) { throw new RuntimeException(e);
JobService jobService = getBigQueryServices().getJobService(bqOptions); try { jobService.dryRunQuery( bqOptions.getProject(), new JobConfigurationQuery()
jobService.startQueryJob(jobRef, queryConfig); Job job = jobService.pollJob(jobRef, JOB_POLL_MAX_RETRIES); if (BigQueryHelpers.parseStatus(job) != Status.SUCCEEDED) { throw new IOException(
@Override void cleanup(PassThroughThenCleanup.ContextContainer c) throws Exception { PipelineOptions options = c.getPipelineOptions(); BigQueryOptions bqOptions = options.as(BigQueryOptions.class); String jobUuid = c.getJobId(); final String extractDestinationDir = resolveTempLocation(bqOptions.getTempLocation(), "BigQueryExtractTemp", jobUuid); final String executingProject = bqOptions.getProject(); JobReference jobRef = new JobReference() .setProjectId(executingProject) .setJobId(getExtractJobId(createJobIdToken(bqOptions.getJobName(), jobUuid))); Job extractJob = getBigQueryServices().getJobService(bqOptions).getJob(jobRef); if (extractJob != null) { List<ResourceId> extractFiles = getExtractFilePaths(extractDestinationDir, extractJob); if (extractFiles != null && !extractFiles.isEmpty()) { FileSystems.delete( extractFiles, MoveOptions.StandardMoveOptions.IGNORE_MISSING_FILES); } } } };