Refine search
/** Example of running a query. */ // [TARGET query(QueryJobConfiguration, JobOption...)] public void runQuery() throws InterruptedException { // [START bigquery_query] // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build(); // Print the results. for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) { for (FieldValue val : row) { System.out.printf("%s,", val.toString()); } System.out.printf("\n"); } // [END bigquery_query] } }
private void compareQueryJobConfiguration( QueryJobConfiguration expected, QueryJobConfiguration value) { assertEquals(expected, value); assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.toString(), value.toString()); assertEquals(expected.dryRun(), value.dryRun()); assertEquals(expected.allowLargeResults(), value.allowLargeResults()); assertEquals(expected.getCreateDisposition(), value.getCreateDisposition()); assertEquals(expected.getDefaultDataset(), value.getDefaultDataset()); assertEquals(expected.getDestinationTable(), value.getDestinationTable()); assertEquals(expected.flattenResults(), value.flattenResults()); assertEquals(expected.getPriority(), value.getPriority()); assertEquals(expected.getQuery(), value.getQuery()); assertEquals(expected.getTableDefinitions(), value.getTableDefinitions()); assertEquals(expected.useQueryCache(), value.useQueryCache()); assertEquals(expected.getUserDefinedFunctions(), value.getUserDefinedFunctions()); assertEquals(expected.getWriteDisposition(), value.getWriteDisposition()); assertEquals(expected.useLegacySql(), value.useLegacySql()); assertEquals(expected.getMaximumBillingTier(), value.getMaximumBillingTier()); assertEquals(expected.getSchemaUpdateOptions(), value.getSchemaUpdateOptions()); assertEquals( expected.getDestinationEncryptionConfiguration(), value.getDestinationEncryptionConfiguration()); assertEquals(expected.getTimePartitioning(), value.getTimePartitioning()); assertEquals(expected.getClustering(), value.getClustering()); } }
@Test public void testToPbAndFromPb() { assertNotNull(QUERY_JOB_CONFIGURATION.toPb().getQuery()); assertNull(QUERY_JOB_CONFIGURATION.toPb().getExtract()); assertNull(QUERY_JOB_CONFIGURATION.toPb().getCopy()); assertNull(QUERY_JOB_CONFIGURATION.toPb().getLoad()); compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); compareQueryJobConfiguration(job, QueryJobConfiguration.fromPb(job.toPb())); }
@Override QueryJobConfiguration setProjectId(String projectId) { Builder builder = toBuilder(); if (getDestinationTable() != null && Strings.isNullOrEmpty(getDestinationTable().getProject())) { builder.setDestinationTable(getDestinationTable().setProjectId(projectId)); } if (getDefaultDataset() != null) { builder.setDefaultDataset(getDefaultDataset().setProjectId(projectId)); } return builder.build(); }
.build(); TableInfo tableInfo = TableInfo.of(tableId, viewDefinition); Table createdTable = bigquery.create(tableInfo); assertNotNull(createdTable); assertEquals(DATASET, createdTable.getTableId().getDataset()); assertEquals(tableName, createdTable.getTableId().getTable()); Table remoteTable = bigquery.getTable(DATASET, tableName); assertNotNull(remoteTable); assertEquals(createdTable.getTableId(), remoteTable.getTableId()); assertEquals(expectedSchema, remoteTable.getDefinition().getSchema()); QueryJobConfiguration config = QueryJobConfiguration.newBuilder("SELECT * FROM " + tableName) .setDefaultDataset(DatasetId.of(DATASET)) .setUseLegacySql(true) .build(); TableResult result = bigquery.query(config); int rowCount = 0; for (FieldValueList row : result.getValues()) {
private QueryResponse queryWithLarge(BigQuery bigquery, QueryRequest queryRequest, String projectId) { String tempDataset = genTempName("dataset"); String tempTable = genTempName("table"); bigquery.create(DatasetInfo.of(tempDataset)); TableId tableId = TableId.of(projectId, tempDataset, tempTable); QueryJobConfiguration jobConfiguration = QueryJobConfiguration .newBuilder(queryRequest.getQuery()) .setAllowLargeResults(true) .setUseLegacySql(queryRequest.useLegacySql()) .setDestinationTable(tableId) .build(); Job job = bigquery.create(JobInfo.of(jobConfiguration)); QueryResponse queryResponse = bigquery.getQueryResults(job.getJobId()); queryResponse = loopQueryResponse(bigquery, queryResponse); bigquery.delete(tableId); return queryResponse; }
bigquery.create(DatasetInfo.newBuilder(datasetId).build()); bigquery.create(TableInfo.of(tableId, tableDefinition)); InsertAllRequest.newBuilder(tableId).addRow(firstRow).addRow(secondRow).build(); InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); QueryJobConfiguration.newBuilder("SELECT * FROM my_dataset_id.my_table_id").build();
private void generateTableWithDdl(String datasetId, String tableId) throws InterruptedException { String sql = String.format( "CREATE TABLE %s.%s " + "AS " + "SELECT " + "2000 + CAST(18 * RAND() as INT64) AS year, " + "IF(RAND() > 0.5,\"foo\",\"bar\") AS token " + "FROM " + "UNNEST(GENERATE_ARRAY(0,5,1)) AS r", datasetId, tableId); Job job = bigquery.create(JobInfo.of(QueryJobConfiguration.newBuilder(sql).build())); job.waitFor(); }
/** Example of running a batch query. */ public void runBatchQuery() throws TimeoutException, InterruptedException { // [START bigquery_query_batch] // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query) // Run at batch priority, which won't count toward concurrent rate // limit. .setPriority(QueryJobConfiguration.Priority.BATCH) .build(); // Location must match that of the dataset(s) referenced in the query. JobId jobId = JobId.newBuilder().setRandomJob().setLocation("US").build(); String jobIdString = jobId.getJob(); // API request - starts the query. bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); // Check on the progress by getting the job's updated state. Once the state // is `DONE`, the results are ready. Job queryJob = bigquery.getJob(JobId.newBuilder().setJob(jobIdString).setLocation("US").build()); System.out.printf( "Job %s in location %s currently in state: %s%n", queryJob.getJobId().getJob(), queryJob.getJobId().getLocation(), queryJob.getStatus().getState().toString()); // [END bigquery_query_batch] }
TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = QueryJobConfiguration.newBuilder(query) .setDefaultDataset(DatasetId.of(DATASET)) .setDestinationTable(destinationTable) .build(); Job remoteJob = bigquery.create(JobInfo.of(configuration)); remoteJob = remoteJob.waitFor(); assertNull(remoteJob.getStatus().getError()); assertTrue(bigquery.delete(DATASET, tableName)); Job queryJob = bigquery.getJob(remoteJob.getJobId()); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); assertNotNull(statistics.getQueryPlan());
@Test public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { QueryJobConfiguration jobConfig = QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); QueryStatistics jobStatistics = QueryStatistics.newBuilder() expect(bigquery.getOptions()).andReturn(mockOptions); expect(mockOptions.getClock()).andReturn(CurrentMillisClock.getDefaultClock()).times(2); Job completedJob = expectedJob.toBuilder().setStatus(status).build(); .build(); expect(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .andReturn(completedQuery); expect(bigquery.getJob(JOB_INFO.getJobId())).andReturn(completedJob); expect(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .andReturn(completedQuery);
QueryJobConfiguration.newBuilder( "SELECT " + "CONCAT('https://stackoverflow.com/questions/', CAST(id as STRING)) as url, " .setUseLegacySql(false) .build(); Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); QueryResponse response = bigquery.getQueryResults(jobId);
/** * Returns a BigQuery Copy Job for the given the query to be run. Job's id is chosen by the * service. */ public static QueryJobConfiguration of(String query) { return newBuilder(query).build(); }
@Test public void testSetProjectIdDoNotOverride() { QueryJobConfiguration configuration = QUERY_JOB_CONFIGURATION .toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("update-only-on-dataset"); assertEquals("update-only-on-dataset", configuration.getDefaultDataset().getProject()); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); }
static void checkNotDryRun(JobConfiguration jobConfig, String op) { QueryJobConfiguration config; if (jobConfig instanceof QueryJobConfiguration) { config = (QueryJobConfiguration) jobConfig; } else { return; } Boolean dryRun = config.dryRun(); if (dryRun == null) { dryRun = false; } if (dryRun) { String msg = "Operation \"%s\" does not work for dryrun queries, " + "since a dry run does not actually create a job. " + "To validate a query and obtain some processing statistics, consider calling " + "BigQuery.create(JobInfo)."; throw new UnsupportedOperationException(String.format(msg, op)); } }
@Test public void testSetProjectId() { QueryJobConfiguration configuration = QUERY_JOB_CONFIGURATION.setProjectId(TEST_PROJECT_ID); assertEquals(TEST_PROJECT_ID, configuration.getDefaultDataset().getProject()); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); }
@SuppressWarnings("unchecked") static <T extends JobConfiguration> T fromPb( com.google.api.services.bigquery.model.JobConfiguration configurationPb) { if (configurationPb.getCopy() != null) { return (T) CopyJobConfiguration.fromPb(configurationPb); } else if (configurationPb.getExtract() != null) { return (T) ExtractJobConfiguration.fromPb(configurationPb); } else if (configurationPb.getLoad() != null) { return (T) LoadJobConfiguration.fromPb(configurationPb); } else if (configurationPb.getQuery() != null) { return (T) QueryJobConfiguration.fromPb(configurationPb); } else { // never reached throw new IllegalArgumentException("Job configuration is not supported"); } } }
@Test public void testToBuilder() { compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QUERY_JOB_CONFIGURATION.toBuilder().build()); QueryJobConfiguration job = QUERY_JOB_CONFIGURATION.toBuilder().setQuery("New BigQuery SQL").build(); assertEquals("New BigQuery SQL", job.getQuery()); job = job.toBuilder().setQuery(QUERY).build(); compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, job); }
@Override QueryJobConfiguration setProjectId(String projectId) { Builder builder = toBuilder(); if (destinationTable() != null) { builder.destinationTable(destinationTable().setProjectId(projectId)); } if (defaultDataset() != null) { builder.defaultDataset(defaultDataset().setProjectId(projectId)); } return builder.build(); }