.withTriggeringFrequency(Duration.standardSeconds(30)) .withNumFileShards(2) .withMethod(BigQueryIO.Write.Method.FILE_LOADS) .withoutValidation()); p.run();
.withCreateDisposition(CreateDisposition.CREATE_NEVER) .withWriteDisposition(WriteDisposition.WRITE_APPEND) .withMethod(Method.FILE_LOADS); if (!Strings.isNullOrEmpty(bigQueryOptions.tempLocation)) { log.info(
.to("project-id:dataset-id.table-id") .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS) .withSchema( new TableSchema()
.withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED) .withWriteDisposition(WriteDisposition.WRITE_APPEND) .withMethod(Method.STREAMING_INSERTS) .withFailedInsertRetryPolicy(InsertRetryPolicy.retryTransientErrors()) .withCustomGcsTempLocation(options.getBigQueryLoadingTemporaryDirectory()));
.to(tableSpec) .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS) .withSchema( new TableSchema()
.to("project-id:dataset-id.table-id") .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS) .withSchema( new TableSchema()
.to("project-id:dataset-id.table-id") .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED) .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS) .withSchema( new TableSchema()
public void testTimePartitioning(BigQueryIO.Write.Method insertMethod) throws Exception { TableRow row1 = new TableRow().set("name", "a").set("number", "1"); TableRow row2 = new TableRow().set("name", "b").set("number", "2"); TimePartitioning timePartitioning = new TimePartitioning().setType("DAY").setExpirationMs(1000L); TableSchema schema = new TableSchema() .setFields( ImmutableList.of(new TableFieldSchema().setName("number").setType("INTEGER"))); p.apply(Create.of(row1, row2)) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id") .withTestServices(fakeBqServices) .withMethod(insertMethod) .withSchema(schema) .withTimePartitioning(timePartitioning) .withoutValidation()); p.run(); Table table = fakeDatasetService.getTable( BigQueryHelpers.parseTableSpec("project-id:dataset-id.table-id")); assertEquals(schema, table.getSchema()); assertEquals(timePartitioning, table.getTimePartitioning()); }
@Test public void testWriteToTableDecorator() throws Exception { TableRow row1 = new TableRow().set("name", "a").set("number", "1"); TableRow row2 = new TableRow().set("name", "b").set("number", "2"); TableSchema schema = new TableSchema() .setFields( ImmutableList.of(new TableFieldSchema().setName("number").setType("INTEGER"))); p.apply(Create.of(row1, row2)) .apply( BigQueryIO.writeTableRows() .to("project-id:dataset-id.table-id$20171127") .withTestServices(fakeBqServices) .withMethod(BigQueryIO.Write.Method.STREAMING_INSERTS) .withSchema(schema) .withoutValidation()); p.run(); }