static TableId fromPb(TableReference tableRef) { return new TableId(tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } }
@Override public Table patch(Table table, Map<Option, ?> options) { try { // unset the type, as it is output only table.setType(null); TableReference reference = table.getTableReference(); return bigquery .tables() .patch(reference.getProjectId(), reference.getDatasetId(), reference.getTableId(), table) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
/** Returns a canonical string representation of the {@link TableReference}. */ public static String toTableSpec(TableReference ref) { StringBuilder sb = new StringBuilder(); if (ref.getProjectId() != null) { sb.append(ref.getProjectId()); sb.append(":"); } sb.append(ref.getDatasetId()).append('.').append(ref.getTableId()); return sb.toString(); }
private Table getTable(TableReference ref) throws IOException, InterruptedException { Bigquery.Tables.Get get = client.tables().get(ref.getProjectId(), ref.getDatasetId(), ref.getTableId()); return executeWithBackOff( get, String.format( "Error opening BigQuery table %s of dataset %s.", ref.getTableId(), ref.getDatasetId())); }
private BqOperation deleteTable(JsonNode config) { if (!config.isTextual()) { throw new ConfigException("Bad table reference: " + config); } return (bq, projectId) -> { TableReference r = Bq.tableReference(projectId, defaultDataset, config.asText()); bq.deleteTable(r.getProjectId(), r.getDatasetId(), r.getTableId()); }; }
static TableId fromPb(TableReference tableRef) { return new TableId( tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } }
/** * Configure the BigQuery input table for a job */ public static void setInputTable(Configuration configuration, TableReference tableReference) throws IOException { setInputTable( configuration, tableReference.getProjectId(), tableReference.getDatasetId(), tableReference.getTableId()); }
static TableId fromPb(TableReference tableRef) { return new TableId( tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } }
static TableId fromPb(TableReference tableRef) { return new TableId(tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } }
public String tableSpec() { return String.format( "%s:%s.%s", table.getTableReference().getProjectId(), table.getTableReference().getDatasetId(), table.getTableReference().getTableId()); }
/** * Sets the Bigquery access related fields in the JobConf for input connector. * * @param config the job configuration. * @param fullyQualifiedInputTableId input-table id of the form * [optional projectId]:[datasetId].[tableId] */ public static void configureBigQueryInput( Configuration config, String fullyQualifiedInputTableId) throws IOException { TableReference parsedTable = BigQueryStrings.parseTableReference(fullyQualifiedInputTableId); configureBigQueryInput( config, parsedTable.getProjectId(), parsedTable.getDatasetId(), parsedTable.getTableId()); }
void emptyTable(String projectId, Table table) throws IOException { TableReference r = table.getTableReference(); deleteTable(r.getProjectId(), r.getDatasetId(), r.getTableId()); createTable(projectId, table); }
private List<TableRow> getTableRows(Bigquery bq) { try { return bq.tabledata() .list( pipelineOptions.getProject(), pipelineOptions.getTargetDataset(), table.getTableReference().getTableId()) .execute() .getRows(); } catch (IOException e) { throw new RuntimeException(e); } }
private TableSchema getSchema(Bigquery bq) { try { return bq.tables() .get( pipelineOptions.getProject(), pipelineOptions.getTargetDataset(), table.getTableReference().getTableId()) .execute() .getSchema(); } catch (IOException e) { throw new RuntimeException(e); } }
@Override public void cleanupExport() throws IOException { if (deleteIntermediateTable) { logger.atInfo().log( "Deleting input intermediate table: %s:%s.%s", tableToExport.getProjectId(), tableToExport.getDatasetId(), tableToExport.getTableId()); Bigquery.Tables tables = bigQueryHelper.getRawBigquery().tables(); Bigquery.Tables.Delete delete = tables.delete( tableToExport.getProjectId(), tableToExport.getDatasetId(), tableToExport.getTableId()); delete.execute(); } delegate.cleanupExport(); }
@Test public void testTableParsing() { TableReference ref = BigQueryHelpers.parseTableSpec("my-project:data_set.table_name"); assertEquals("my-project", ref.getProjectId()); assertEquals("data_set", ref.getDatasetId()); assertEquals("table_name", ref.getTableId()); }
@Test public void testTableParsing_noProjectId() { TableReference ref = BigQueryHelpers.parseTableSpec("data_set.table_name"); assertEquals(null, ref.getProjectId()); assertEquals("data_set", ref.getDatasetId()); assertEquals("table_name", ref.getTableId()); }
private void checkReadTableObjectWithValidate( BigQueryIO.Read read, String project, String dataset, String table, boolean validate) { assertEquals(project, read.getTable().getProjectId()); assertEquals(dataset, read.getTable().getDatasetId()); assertEquals(table, read.getTable().getTableId()); assertNull(read.getQuery()); assertEquals(validate, read.getValidate()); }
@Test public void testBuildWriteDefaultProject() { BigQueryIO.Write<TableRow> write = BigQueryIO.writeTableRows().to("somedataset.sometable"); assertEquals(null, write.getTable().get().getProjectId()); assertEquals("somedataset", write.getTable().get().getDatasetId()); assertEquals("sometable", write.getTable().get().getTableId()); }
@Test public void testBuildWriteWithTableReference() { TableReference table = new TableReference() .setProjectId("foo.com:project") .setDatasetId("somedataset") .setTableId("sometable"); BigQueryIO.Write<TableRow> write = BigQueryIO.writeTableRows().to(table); assertEquals("foo.com:project", write.getTable().get().getProjectId()); assertEquals("somedataset", write.getTable().get().getDatasetId()); assertEquals("sometable", write.getTable().get().getTableId()); }