@Test public void testRemoveTemporaryTables() throws Exception { FakeDatasetService datasetService = new FakeDatasetService(); String projectId = "project"; String datasetId = "dataset"; datasetService.createDataset(projectId, datasetId, "", "", null); List<TableReference> tableRefs = Lists.newArrayList( BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table1")), BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table2")), BigQueryHelpers.parseTableSpec( String.format("%s:%s.%s", projectId, datasetId, "table3"))); for (TableReference tableRef : tableRefs) { datasetService.createTable(new Table().setTableReference(tableRef)); } // Add one more table to delete that does not actually exist. tableRefs.add( BigQueryHelpers.parseTableSpec(String.format("%s:%s.%s", projectId, datasetId, "table4"))); WriteRename.removeTemporaryTables(datasetService, tableRefs); for (TableReference ref : tableRefs) { loggedWriteRename.verifyDebug("Deleting table " + toJsonString(ref)); checkState(datasetService.getTable(ref) == null, "Table " + ref + " was not deleted!"); } }
private boolean validateDispositions( Table table, CreateDisposition createDisposition, WriteDisposition writeDisposition) throws InterruptedException, IOException { if (table == null) { if (createDisposition == CreateDisposition.CREATE_NEVER) { return false; } } else if (writeDisposition == WriteDisposition.WRITE_TRUNCATE) { datasetService.deleteTable(table.getTableReference()); } else if (writeDisposition == WriteDisposition.WRITE_EMPTY) { List<TableRow> allRows = datasetService.getAllRows( table.getTableReference().getProjectId(), table.getTableReference().getDatasetId(), table.getTableReference().getTableId()); if (!allRows.isEmpty()) { return false; } } return true; }
@Override public Table patchTableDescription( TableReference tableReference, @Nullable String tableDescription) throws IOException, InterruptedException { validateWholeTableReference(tableReference); synchronized (tables) { TableContainer tableContainer = getTableContainer( tableReference.getProjectId(), tableReference.getDatasetId(), tableReference.getTableId()); tableContainer.getTable().setDescription(tableDescription); return tableContainer.getTable(); } }
@Before public void setUp() throws IOException, InterruptedException { FakeDatasetService.setUp(); BigQueryIO.clearCreatedTables(); fakeDatasetService.createDataset("project-id", "dataset-id", "", "", null); }
.setDatasetId("dataset-id") .setTableId(String.format("%s_%05d_%05d", jobIdToken, i, j)); fakeDatasetService.createTable(new Table().setTableReference(tempTable)); rows.add(new TableRow().set("number", j * numTempTablesPerFinalTable + k)); fakeDatasetService.insertAll(tempTable, rows, null); expectedRowsPerTable.putAll(tableDestination, rows); String tableJson = toJsonString(tempTable); TableDestination tableDestination = entry.getKey(); TableReference tableReference = tableDestination.getTableReference(); Table table = checkNotNull(fakeDatasetService.getTable(tableReference)); assertEquals(tableReference.getTableId() + "_desc", tableDestination.getTableDescription()); fakeDatasetService.getAllRows( tableReference.getProjectId(), tableReference.getDatasetId(), TableReference tempTable = BigQueryHelpers.fromJsonString(tempTableJson, TableReference.class); assertEquals(null, fakeDatasetService.getTable(tempTable));
fakeDatasetService.createDataset("project", "data_set", "", "", null); fakeDatasetService.createTable( new Table() .setTableReference(table) new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER"))))); fakeDatasetService.insertAll(table, expected, null);
.setTableId("sometable")); sometable.setNumBytes(1024L * 1024L); FakeDatasetService fakeDatasetService = new FakeDatasetService(); fakeDatasetService.createDataset("non-executing-project", "somedataset", "", "", null); fakeDatasetService.createTable(sometable); new TableRow().set("name", "b").set("number", 2L), new TableRow().set("name", "c").set("number", 3L)); fakeDatasetService.insertAll(sometable.getTableReference(), records, null);
.setErrors(ImmutableList.of(new ErrorProto().setReason("timeout"))); fakeDatasetService.failOnInsert( ImmutableMap.of( row1, ImmutableList.of(ephemeralError, ephemeralError), fakeDatasetService.getAllRows("project-id", "dataset-id", "table-id"), containsInAnyOrder(row1, row2, row3));
CreateDisposition createDisposition = CreateDisposition.valueOf(load.getCreateDisposition()); checkArgument("NEWLINE_DELIMITED_JSON".equals(load.getSourceFormat())); Table existingTable = datasetService.getTable(destination); if (!validateDispositions(existingTable, createDisposition, writeDisposition)) { return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto()); existingTable = existingTable.setTimePartitioning(load.getTimePartitioning()); datasetService.createTable(existingTable); rows.addAll(readRows(filename.toString())); datasetService.insertAll(destination, rows, null); FileSystems.delete(sourceFiles); return new JobStatus().setState("DONE");
private JobStatus runQueryJob(JobConfigurationQuery query) throws IOException, InterruptedException { List<TableRow> rows = FakeBigQueryServices.rowsFromEncodedQuery(query.getQuery()); datasetService.createTable(new Table().setTableReference(query.getDestinationTable())); datasetService.insertAll(query.getDestinationTable(), rows, null); return new JobStatus().setState("DONE"); }
private JobStatus runExtractJob(Job job, JobConfigurationExtract extract) throws InterruptedException, IOException { TableReference sourceTable = extract.getSourceTable(); List<TableRow> rows = datasetService.getAllRows( sourceTable.getProjectId(), sourceTable.getDatasetId(), sourceTable.getTableId()); TableSchema schema = datasetService.getTable(sourceTable).getSchema(); List<Long> destinationFileCounts = Lists.newArrayList(); for (String destination : extract.getDestinationUris()) { destinationFileCounts.add(writeRows(sourceTable.getTableId(), rows, schema, destination)); } job.setStatistics( new JobStatistics() .setExtract(new JobStatistics4().setDestinationUriFileCounts(destinationFileCounts))); return new JobStatus().setState("DONE"); }
new TableRow().set("name", "e").set("number", 5L), new TableRow().set("name", "f").set("number", 6L)); fakeDatasetService.createDataset( tempTableReference.getProjectId(), tempTableReference.getDatasetId(), "", "", null); Table table = new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")))); fakeDatasetService.createTable(table);
boolean ignoreUnknownValues) throws IOException, InterruptedException { Map<TableRow, List<TableDataInsertAllResponse.InsertErrors>> insertErrors = getInsertErrors(); synchronized (tables) { if (insertIdList != null) { getTableContainer( ref.getProjectId(), ref.getDatasetId(),
@Override public boolean isTableEmpty(TableReference tableRef) throws IOException, InterruptedException { Long numBytes = getTable(tableRef).getNumBytes(); return numBytes == null || numBytes == 0L; }
public FakeJobService(int numFailures) { this.datasetService = new FakeDatasetService(); this.numFailuresExpected = numFailures; }
fakeDatasetService.getAllRows("project-id", "dataset-id", "userid-" + entry.getKey()), containsInAnyOrder(Iterables.toArray(entry.getValue(), TableRow.class)));
public List<TableRow> getAllRows(String projectId, String datasetId, String tableId) throws InterruptedException, IOException { synchronized (tables) { return getTableContainer(projectId, datasetId, tableId).getRows(); } }
public long insertAll( TableReference ref, List<TableRow> rowList, @Nullable List<String> insertIdList) throws IOException, InterruptedException { List<ValueInSingleWindow<TableRow>> windowedRows = Lists.newArrayList(); for (TableRow row : rowList) { windowedRows.add( ValueInSingleWindow.of( row, GlobalWindow.TIMESTAMP_MAX_VALUE, GlobalWindow.INSTANCE, PaneInfo.ON_TIME_AND_ONLY_FIRING)); } return insertAll( ref, windowedRows, insertIdList, InsertRetryPolicy.alwaysRetry(), null, null, false, false); }
WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition()); CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition()); Table existingTable = datasetService.getTable(destination); if (!validateDispositions(existingTable, createDisposition, writeDisposition)) { return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto()); List<TableRow> allRows = Lists.newArrayList(); for (TableReference source : sources) { Table table = checkNotNull(datasetService.getTable(source)); if (!first) { if (!Objects.equals(partitioning, table.getTimePartitioning())) { first = false; allRows.addAll( datasetService.getAllRows( source.getProjectId(), source.getDatasetId(), source.getTableId())); datasetService.createTable( new Table() .setTableReference(destination) .setSchema(schema) .setTimePartitioning(partitioning)); datasetService.insertAll(destination, allRows, null); return new JobStatus().setState("DONE");
fakeDatasetService.createDataset("project", "data_set", "", "", null); fakeDatasetService.createTable( new Table() .setTableReference(table) new TableFieldSchema().setName("number").setType("INTEGER")))) .setStreamingBuffer(new Streamingbuffer().setEstimatedBytes(BigInteger.valueOf(10)))); fakeDatasetService.insertAll(table, data, null);