.setLoad(loadConfigurationPb);
.setLoad(loadConfigurationPb);
@Test public void testIncomplete() { // https://github.com/googleapis/google-cloud-java/issues/2357 com.google.api.services.bigquery.model.Job job = new com.google.api.services.bigquery.model.Job() .setStatistics( new com.google.api.services.bigquery.model.JobStatistics() .setCreationTime(1234L) .setStartTime(5678L)); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setCopy(new com.google.api.services.bigquery.model.JobConfigurationTableCopy())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(CopyStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setLoad(new com.google.api.services.bigquery.model.JobConfigurationLoad())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(LoadStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setExtract(new com.google.api.services.bigquery.model.JobConfigurationExtract())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(ExtractStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setQuery(new com.google.api.services.bigquery.model.JobConfigurationQuery())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(QueryStatistics.class); }
/** * Defines names of counters we track for each operation. * * There are two types of counters: -- METHOD_NAME : Number of successful invocations of method * METHOD. -- METHOD_NAME_TIME : Total inclusive time spent in method METHOD. */ public enum Counter { BYTES_WRITTEN, CLOSE_CALLS, CLOSE_TOTAL_TIME, JOBS_INSERTED, WRITE_CALLS, WRITE_TOTAL_TIME, }
/** * {@inheritDoc} * * <p>Tries executing the RPC for at most {@code MAX_RPC_RETRIES} times until it succeeds. * * @throws IOException if it exceeds {@code MAX_RPC_RETRIES} attempts. */ @Override public void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) throws InterruptedException, IOException { Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setLoad(loadConfig)); startJob(job, errorExtractor, client); }
TableSchema schema = new TableSchema(); schema.setFields(new ArrayList<TableFieldSchema>()); JacksonFactory JACKSON = new JacksonFactory(); JACKSON.createJsonParser(new FileInputStream("schema.json")) .parseArrayAndClose(schema.getFields(), TableFieldSchema.class, null); schema.setFactory(JACKSON); TableReference destTable = new TableReference(); destTable.setProjectId(projectId); destTable.setDatasetId(datasetId); destTable.setTableId(tableId); FileContent content = new FileContent("application/octet-stream", new File(csv)); Job job = new Job(); JobConfiguration config = new JobConfiguration(); JobConfigurationLoad configLoad = new JobConfigurationLoad(); configLoad.setSchema(schema); configLoad.setDestinationTable(destTable); configLoad.setEncoding("UTF-8"); configLoad.setCreateDisposition("CREATE_IF_NEEDED"); config.setLoad(configLoad); job.setConfiguration(config); Insert insert = bigquery.jobs().insert(projectId, job, content); insert.setProjectId(projectId); JobReference jobRef = insert.execute().getJobReference(); String jobId = jobRef.getJobId();
loadConfig.setDestinationTable(table); config.setLoad(loadConfig); job.setConfiguration(config); Insert insert = bigquery.jobs().insert(exporterConfig.getBigqueryProjectId(), job);
@Override public void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) throws IOException { synchronized (allJobs) { verifyUniqueJobId(jobRef.getJobId()); Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setLoad(loadConfig)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); // Copy the files to a new location for import, as the temporary files will be deleted by // the caller. if (loadConfig.getSourceUris().size() > 0) { ImmutableList.Builder<ResourceId> sourceFiles = ImmutableList.builder(); ImmutableList.Builder<ResourceId> loadFiles = ImmutableList.builder(); for (String filename : loadConfig.getSourceUris()) { sourceFiles.add(FileSystems.matchNewResource(filename, false /* isDirectory */)); loadFiles.add( FileSystems.matchNewResource( filename + ThreadLocalRandom.current().nextInt(), false /* isDirectory */)); } FileSystems.copy(sourceFiles.build(), loadFiles.build()); filesForLoadJobs.put(jobRef.getProjectId(), jobRef.getJobId(), loadFiles.build()); } allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
config.setLoad(loadConfig);
.setLoad(loadConfigurationPb);
.setLoad(loadConfigurationPb);
.setLoad(loadConfigurationPb);
loadConfigurationPb.setProjectionFields(projectionFields); return new com.google.api.services.bigquery.model.JobConfiguration() .setLoad(loadConfigurationPb);
.setLoad(loadConfigurationPb);
com.google.api.services.bigquery.model.JobConfiguration toPb() { JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); loadConfigurationPb.setDestinationTable(destinationTable.toPb()); if (createDisposition != null) { loadConfigurationPb.setCreateDisposition(createDisposition.toString()); } if (writeDisposition != null) { loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); } if (csvOptions() != null) { CsvOptions csvOptions = csvOptions(); loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) .setAllowJaggedRows(csvOptions.allowJaggedRows()) .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) .setEncoding(csvOptions.encoding()) .setQuote(csvOptions.quote()) .setSkipLeadingRows(csvOptions.skipLeadingRows()); } if (schema != null) { loadConfigurationPb.setSchema(schema.toPb()); } if (formatOptions != null) { loadConfigurationPb.setSourceFormat(formatOptions.type()); } loadConfigurationPb.setMaxBadRecords(maxBadRecords); loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); loadConfigurationPb.setProjectionFields(projectionFields); return new com.google.api.services.bigquery.model.JobConfiguration() .setLoad(loadConfigurationPb); }
@Override protected JobConfiguration jobConfiguration(String projectId) { JobConfigurationLoad cfg = new JobConfigurationLoad() .setSourceUris(sourceUris(params)); if (params.has("schema")) { cfg.setSchema(tableSchema(params)); } Optional<DatasetReference> defaultDataset = params.getOptional("dataset", String.class) .transform(Bq::datasetReference); String destinationTable = params.get("destination_table", String.class); cfg.setDestinationTable(tableReference(projectId, defaultDataset, destinationTable)); params.getOptional("create_disposition", String.class).transform(cfg::setCreateDisposition); params.getOptional("write_disposition", String.class).transform(cfg::setWriteDisposition); params.getOptional("source_format", String.class).transform(cfg::setSourceFormat); params.getOptional("field_delimiter", String.class).transform(cfg::setFieldDelimiter); params.getOptional("skip_leading_rows", int.class).transform(cfg::setSkipLeadingRows); params.getOptional("encoding", String.class).transform(cfg::setEncoding); params.getOptional("quote", String.class).transform(cfg::setQuote); params.getOptional("max_bad_records", int.class).transform(cfg::setMaxBadRecords); params.getOptional("allow_quoted_newlines", boolean.class).transform(cfg::setAllowQuotedNewlines); params.getOptional("allow_jagged_rows", boolean.class).transform(cfg::setAllowJaggedRows); params.getOptional("ignore_unknown_values", boolean.class).transform(cfg::setIgnoreUnknownValues); params.getOptional("projection_fields", new TypeReference<List<String>>() {}).transform(cfg::setProjectionFields); params.getOptional("autodetect", boolean.class).transform(cfg::setAutodetect); params.getOptional("schema_update_options", new TypeReference<List<String>>() {}).transform(cfg::setSchemaUpdateOptions); return new JobConfiguration() .setLoad(cfg); }