@Override public boolean cancel(String projectId, String jobId, String location) { try { bigquery.jobs().cancel(projectId, jobId).setLocation(location).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean deleteTable(String projectId, String datasetId, String tableId) { try { bigquery.tables().delete(projectId, datasetId, tableId).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery .datasets() .insert(dataset.getDatasetReference().getProjectId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
HttpRequest httpRequest = bigquery .getRequestFactory() .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); httpRequest.setParser(bigquery.getObjectParser()); long limit = destOffset + length; StringBuilder range = new StringBuilder("bytes ");
@Override public String open(Job loadJob) { try { String builder = BASE_RESUMABLE_URI + options.getProjectId() + "/jobs"; GenericUrl url = new GenericUrl(builder); url.set("uploadType", "resumable"); JsonFactory jsonFactory = bigquery.getJsonFactory(); HttpRequestFactory requestFactory = bigquery.getRequestFactory(); HttpRequest httpRequest = requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); HttpResponse response = httpRequest.execute(); return response.getHeaders().getLocation(); } catch (IOException ex) { throw translate(ex); } }
@Override public TableDataInsertAllResponse insertAll( String projectId, String datasetId, String tableId, TableDataInsertAllRequest request) { try { return bigquery.tabledata().insertAll(projectId, datasetId, tableId, request).execute(); } catch (IOException ex) { throw translate(ex); } }
public void deleteDataset(String projectId, String datasetId) { try { TableList tables = bqClient.tables().list(projectId, datasetId).execute(); for (Tables table : tables.getTables()) { this.deleteTable(projectId, datasetId, table.getTableReference().getTableId()); } } catch (Exception e) { LOG.debug("Exceptions caught when listing all tables: " + e.getMessage()); } try { bqClient.datasets().delete(projectId, datasetId).execute(); LOG.info("Successfully deleted dataset: " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when deleting dataset: " + e.getMessage()); } }
public List<Jobs> listAllJobs(HttpServletResponse resp, Bigquery bigquery) throws IOException { Bigquery.Projects.List projectRequest = bigquery.projects().list(); ProjectList projectResponse = projectRequest.execute(); Bigquery.Jobs.List jobsRequest = bigquery.jobs().list(projectResponse.getProjects().get(0).getId()); JobList jobsResponse = jobsRequest.execute(); return jobsResponse.getJobs(); }
try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); long limit = destOffset + length;
@Override public TableDataList listTableData( String projectId, String datasetId, String tableId, Map<Option, ?> options) { try { return bigquery .tabledata() .list(projectId, datasetId, tableId) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setStartIndex( Option.START_INDEX.getLong(options) != null ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) : null) .execute(); } catch (IOException ex) { throw translate(ex); } }
public Job listJob(HttpServletResponse resp, Bigquery bigquery, String jobId) throws IOException { Bigquery.Projects.List projectRequest = bigquery.projects().list(); ProjectList projectResponse = projectRequest.execute(); if (projectResponse.getTotalItems() == 0) { return null; } Projects project = projectResponse.getProjects().get(0); Get jobsRequest = bigquery.jobs().get(project.getId(), jobId); Job j = jobsRequest.execute(); return j; } }
@Override public String open(JobConfiguration configuration) { try { Job loadJob = new Job().setConfiguration(configuration); StringBuilder builder = new StringBuilder() .append(BASE_RESUMABLE_URI) .append(options.projectId()) .append("/jobs"); GenericUrl url = new GenericUrl(builder.toString()); url.set("uploadType", "resumable"); JsonFactory jsonFactory = bigquery.getJsonFactory(); HttpRequestFactory requestFactory = bigquery.getRequestFactory(); HttpRequest httpRequest = requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); HttpResponse response = httpRequest.execute(); return response.getHeaders().getLocation(); } catch (IOException ex) { throw translate(ex); } }
HttpRequest httpRequest = bigquery .getRequestFactory() .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); httpRequest.setParser(bigquery.getObjectParser()); long limit = destOffset + length; StringBuilder range = new StringBuilder("bytes ");
try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); long limit = destOffset + length;
@Override public Table getTable( String projectId, String datasetId, String tableId, Map<Option, ?> options) { try { return bigquery .tables() .get(projectId, datasetId, tableId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Job getJob(String projectId, String jobId, String location, Map<Option, ?> options) { try { return bigquery .jobs() .get(projectId, jobId) .setLocation(location) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Dataset getDataset(String projectId, String datasetId, Map<Option, ?> options) { try { return bigquery .datasets() .get(projectId, datasetId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
public void insertDataToTable( String projectId, String datasetId, String tableName, List<Map<String, Object>> rows) { try { List<Rows> dataRows = rows.stream().map(row -> new Rows().setJson(row)).collect(Collectors.toList()); this.bqClient .tabledata() .insertAll( projectId, datasetId, tableName, new TableDataInsertAllRequest().setRows(dataRows)) .execute(); LOG.info("Successfully inserted data into table : " + tableName); } catch (Exception e) { LOG.debug("Exceptions caught when inserting data: " + e.getMessage()); } } }
@Override public String open(JobConfiguration configuration) { try { Job loadJob = new Job().setConfiguration(configuration); StringBuilder builder = new StringBuilder() .append(BASE_RESUMABLE_URI) .append(options.projectId()) .append("/jobs"); GenericUrl url = new GenericUrl(builder.toString()); url.set("uploadType", "resumable"); JsonFactory jsonFactory = bigquery.getJsonFactory(); HttpRequestFactory requestFactory = bigquery.getRequestFactory(); HttpRequest httpRequest = requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); HttpResponse response = httpRequest.execute(); return response.getHeaders().getLocation(); } catch (IOException ex) { throw translate(ex); } }