/** * Creates a BigQuery standard table definition given its schema. * * @param schema the schema of the table */ public static StandardTableDefinition of(Schema schema) { return newBuilder().setSchema(schema).build(); }
@Test public void testOf() { StandardTableDefinition definition = StandardTableDefinition.of(TABLE_SCHEMA); assertEquals(TableDefinition.Type.TABLE, TABLE_DEFINITION.getType()); assertEquals(TABLE_SCHEMA, TABLE_DEFINITION.getSchema()); assertNull(definition.getLocation()); assertNull(definition.getNumBytes()); assertNull(definition.getNumRows()); assertNull(definition.getStreamingBuffer()); assertNull(definition.getTimePartitioning()); assertNull(definition.getClustering()); }
@Override Table toPb() { Table tablePb = super.toPb(); if (getNumRows() != null) { tablePb.setNumRows(BigInteger.valueOf(getNumRows())); } tablePb.setNumBytes(getNumBytes()); tablePb.setLocation(getLocation()); if (getStreamingBuffer() != null) { tablePb.setStreamingBuffer(getStreamingBuffer().toPb()); } if (getTimePartitioning() != null) { tablePb.setTimePartitioning(getTimePartitioning().toPb()); } if (getClustering() != null) { tablePb.setClustering(getClustering().toPb()); } return tablePb; }
private void compareStandardTableDefinition( StandardTableDefinition expected, StandardTableDefinition value) { assertEquals(expected, value); assertEquals(expected.getSchema(), value.getSchema()); assertEquals(expected.getType(), value.getType()); assertEquals(expected.getNumBytes(), value.getNumBytes()); assertEquals(expected.getNumRows(), value.getNumRows()); assertEquals(expected.getLocation(), value.getLocation()); assertEquals(expected.getStreamingBuffer(), value.getStreamingBuffer()); assertEquals(expected.getType(), value.getType()); assertEquals(expected.getTimePartitioning(), value.getTimePartitioning()); assertEquals(expected.getClustering(), value.getClustering()); assertEquals(expected.hashCode(), value.hashCode()); } }
@Override TableInfo parse(String... args) throws Exception { if (args.length >= 3) { String dataset = args[0]; String table = args[1]; TableId tableId = TableId.of(dataset, table); return TableInfo.of(tableId, StandardTableDefinition.of(parseSchema(args, 2, args.length))); } throw new IllegalArgumentException("Missing required arguments."); }
@Test public void testCreateAndGetTableWithSelectedField() { String tableName = "test_create_and_get_selected_fields_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); Table createdTable = bigquery.create( TableInfo.newBuilder(tableId, tableDefinition) .setLabels(Collections.singletonMap("a", "b")) .build()); assertNotNull(createdTable); assertEquals(DATASET, createdTable.getTableId().getDataset()); assertEquals(tableName, createdTable.getTableId().getTable()); Table remoteTable = bigquery.getTable( DATASET, tableName, TableOption.fields(TableField.CREATION_TIME, TableField.LABELS)); assertNotNull(remoteTable); assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); assertEquals(createdTable.getTableId(), remoteTable.getTableId()); assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); assertThat(remoteTable.getLabels()).containsExactly("a", "b"); assertNotNull(remoteTable.getCreationTime()); assertNull(remoteTable.getDefinition().getSchema()); assertNull(remoteTable.getLastModifiedTime()); assertNull(remoteTable.<StandardTableDefinition>getDefinition().getNumBytes()); assertNull(remoteTable.<StandardTableDefinition>getDefinition().getNumRows()); assertNull(remoteTable.<StandardTableDefinition>getDefinition().getTimePartitioning()); assertNull(remoteTable.<StandardTableDefinition>getDefinition().getClustering()); assertTrue(remoteTable.delete()); }
Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) .setTimePartitioning(partitioning) assertNotNull(remoteTable.getCreationTime()); assertNotNull(remoteTable.getLastModifiedTime()); assertNotNull(remoteTable.<StandardTableDefinition>getDefinition().getNumBytes()); assertNotNull(remoteTable.<StandardTableDefinition>getDefinition().getNumRows()); assertEquals( partitioning, remoteTable.<StandardTableDefinition>getDefinition().getTimePartitioning()); assertEquals(clustering, remoteTable.<StandardTableDefinition>getDefinition().getClustering()); assertTrue(remoteTable.delete());
@Test public void testUpdateTableWithSelectedFields() { String tableName = "test_update_with_selected_fields_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); Table createdTable = bigquery.create(tableInfo); assertNotNull(createdTable); Table updatedTable = bigquery.update( tableInfo.toBuilder().setDescription("newDescr").build(), TableOption.fields(TableField.DESCRIPTION)); assertTrue(updatedTable.getDefinition() instanceof StandardTableDefinition); assertEquals(DATASET, updatedTable.getTableId().getDataset()); assertEquals(tableName, updatedTable.getTableId().getTable()); assertEquals("newDescr", updatedTable.getDescription()); assertNull(updatedTable.getDefinition().getSchema()); assertNull(updatedTable.getLastModifiedTime()); assertNull(updatedTable.<StandardTableDefinition>getDefinition().getNumBytes()); assertNull(updatedTable.<StandardTableDefinition>getDefinition().getNumRows()); assertTrue(createdTable.delete()); }
createFeatureField(featureSpec)); TableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(schema) .setTimePartitioning(TimePartitioning.of(Type.DAY)) fields.add(newField); Schema newSchema = Schema.of(fields); TableDefinition tableDefinition = StandardTableDefinition.of(newSchema); TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); checkTableCreation(bigQuery.update(tableInfo), featureSpec);
TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) .setTimePartitioning(TimePartitioning.of(Type.DAY)) assertThat( ((StandardTableDefinition) table.getDefinition()) .getTimePartitioning() .getExpirationMs()) .isNull(); .setDefinition( tableDefinition .toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY, 42L)) .build()) assertThat( ((StandardTableDefinition) table.getDefinition()) .getTimePartitioning() .getExpirationMs()) .isEqualTo(42L); .setDefinition( tableDefinition .toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY)) .build())
@Test public void testToAndFromPb() { assertTrue(TableDefinition.fromPb(TABLE_DEFINITION.toPb()) instanceof StandardTableDefinition); compareStandardTableDefinition( TABLE_DEFINITION, TableDefinition.<StandardTableDefinition>fromPb(TABLE_DEFINITION.toPb())); StandardTableDefinition definition = StandardTableDefinition.of(TABLE_SCHEMA); assertTrue(TableDefinition.fromPb(definition.toPb()) instanceof StandardTableDefinition); compareStandardTableDefinition( definition, TableDefinition.<StandardTableDefinition>fromPb(definition.toPb())); }
/** Example of copying multiple tables to a destination. */ public void copyTables(String datasetId, String destinationTableId) throws InterruptedException { generateTableWithDdl(datasetId, "table1"); generateTableWithDdl(datasetId, "table2"); // [START bigquery_copy_table_multiple_source] TableId destinationTable = TableId.of(datasetId, destinationTableId); CopyJobConfiguration configuration = CopyJobConfiguration.newBuilder( destinationTable, Arrays.asList(TableId.of(datasetId, "table1"), TableId.of(datasetId, "table2"))) .build(); // Copy the tables. Job job = bigquery.create(JobInfo.of(configuration)); job = job.waitFor(); // Check the table StandardTableDefinition table = bigquery.getTable(destinationTable).getDefinition(); System.out.println("State: " + job.getStatus().getState()); System.out.printf("Copied %d rows.\n", table.getNumRows()); // [END bigquery_copy_table_multiple_source] }
@SuppressWarnings("unchecked") static <T extends TableDefinition> T fromPb(Table tablePb) { switch (Type.valueOf(tablePb.getType()).toString()) { case "TABLE": return (T) StandardTableDefinition.fromPb(tablePb); case "VIEW": return (T) ViewDefinition.fromPb(tablePb); case "EXTERNAL": return (T) ExternalTableDefinition.fromPb(tablePb); case "MODEL": return (T) ModelTableDefinition.fromPb(tablePb); default: // never reached throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); } } }
@Test public void testToBuilder() { compareStandardTableDefinition(TABLE_DEFINITION, TABLE_DEFINITION.toBuilder().build()); StandardTableDefinition tableDefinition = TABLE_DEFINITION.toBuilder().setLocation("EU").build(); assertEquals("EU", tableDefinition.getLocation()); tableDefinition = tableDefinition.toBuilder().setLocation(LOCATION).build(); compareStandardTableDefinition(TABLE_DEFINITION, tableDefinition); }
public static void main(String... args) throws InterruptedException, TimeoutException { BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); TableId tableId = TableId.of("dataset", "table"); Table table = bigquery.getTable(tableId); if (table == null) { System.out.println("Creating table " + tableId); Field integerField = Field.of("fieldName", LegacySQLTypeName.INTEGER); Schema schema = Schema.of(integerField); table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema))); } System.out.println("Loading data into table " + tableId); Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path"); loadJob = loadJob.waitFor(); if (loadJob.getStatus().getError() != null) { System.out.println("Job completed with errors"); } else { System.out.println("Job succeeded"); } } }
@Override Table toPb() { Table tablePb = super.toPb(); if (getNumRows() != null) { tablePb.setNumRows(BigInteger.valueOf(getNumRows())); } tablePb.setNumBytes(getNumBytes()); tablePb.setLocation(getLocation()); if (getStreamingBuffer() != null) { tablePb.setStreamingBuffer(getStreamingBuffer().toPb()); } if (getTimePartitioning() != null) { tablePb.setTimePartitioning(getTimePartitioning().toPb()); } if (getClustering() != null) { tablePb.setClustering(getClustering().toPb()); } return tablePb; }
/** Example of loading a parquet file from GCS to a table. */ public void loadTableGcsParquet(String datasetName) throws InterruptedException { // [START bigquery_load_table_gcs_parquet] String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet"; TableId tableId = TableId.of(datasetName, "us_states"); LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri) .setFormatOptions(FormatOptions.parquet()) .build(); // Load the table Job loadJob = bigquery.create(JobInfo.of(configuration)); loadJob = loadJob.waitFor(); // Check the table StandardTableDefinition destinationTable = bigquery.getTable(tableId).getDefinition(); System.out.println("State: " + loadJob.getStatus().getState()); System.out.printf("Loaded %d rows.\n", destinationTable.getNumRows()); // [END bigquery_load_table_gcs_parquet] }
@SuppressWarnings("unchecked") static <T extends TableDefinition> T fromPb(Table tablePb) { switch (Type.valueOf(tablePb.getType())) { case TABLE: return (T) StandardTableDefinition.fromPb(tablePb); case VIEW: return (T) ViewDefinition.fromPb(tablePb); case EXTERNAL: return (T) ExternalTableDefinition.fromPb(tablePb); default: // never reached throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); } } }