@Override public boolean deleteTable(String projectId, String datasetId, String tableId) { try { bigquery.tables().delete(projectId, datasetId, tableId).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean cancel(String projectId, String jobId, String location) { try { bigquery.jobs().cancel(projectId, jobId).setLocation(location).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Table getTable( String projectId, String datasetId, String tableId, Map<Option, ?> options) { try { return bigquery .tables() .get(projectId, datasetId, tableId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Dataset getDataset(String projectId, String datasetId, Map<Option, ?> options) { try { return bigquery .datasets() .get(projectId, datasetId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public boolean deleteDataset(String projectId, String datasetId, Map<Option, ?> options) { try { bigquery .datasets() .delete(projectId, datasetId) .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Job getJob(String projectId, String jobId, String location, Map<Option, ?> options) { try { return bigquery .jobs() .get(projectId, jobId) .setLocation(location) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Test public void testBigQueryException() { BigQueryException exception = new BigQueryException(500, "message"); assertEquals(500, exception.getCode()); assertEquals("message", exception.getMessage()); assertNull(exception.getReason()); assertEquals(502, exception.getCode()); assertEquals("message", exception.getMessage()); assertNull(exception.getReason()); assertEquals(503, exception.getCode()); assertEquals("message", exception.getMessage()); assertNull(exception.getReason()); assertEquals(504, exception.getCode()); assertEquals("message", exception.getMessage()); assertNull(exception.getReason()); assertEquals(400, exception.getCode()); assertEquals("message", exception.getMessage()); assertNull(exception.getReason()); assertEquals(504, exception.getCode()); assertEquals("message", exception.getMessage()); assertEquals("reason", exception.getReason()); assertEquals(BigQueryException.UNKNOWN_CODE, exception.getCode()); assertNull(exception.getReason()); assertEquals("socketTimeoutMessage", exception.getMessage());
/** * @param exception the {@link BigQueryException} to check. * @return true if this error is an error that can be fixed by retrying with a smaller batch * size, or false otherwise. */ private static boolean isBatchSizeError(BigQueryException exception) { if (exception.getCode() == BAD_REQUEST_CODE && exception.getError() == null && exception.getReason() == null) { /* * 400 with no error or reason represents a request that is more than 10MB. This is not * documented but is referenced slightly under "Error codes" here: * https://cloud.google.com/bigquery/quota-policy * (by decreasing the batch size we can eventually expect to end up with a request under 10MB) */ return true; } else if (exception.getCode() == BAD_REQUEST_CODE && INVALID_REASON.equals(exception.getReason())) { /* * this is the error that the documentation claims google will return if a request exceeds * 10MB. if this actually ever happens... * todo distinguish this from other invalids (like invalid table schema). */ return true; } return false; }
if (err.getCode() == INTERNAL_SERVICE_ERROR || err.getCode() == SERVICE_UNAVAILABLE || err.getCode() == BAD_GATEWAY) { logger.warn("BQ backend error: {}, attempting retry", err.getCode()); retryCount++; } else if (err.getCode() == FORBIDDEN && err.getError() != null && QUOTA_EXCEEDED_REASON.equals(err.getReason())) { } else if (err.getCode() == FORBIDDEN && err.getError() != null && RATE_LIMIT_EXCEEDED_REASON.equals(err.getReason())) {
@Override public boolean deleteTable(String projectId, String datasetId, String tableId) { try { bigquery.tables().delete(projectId, datasetId, tableId).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean cancel(String projectId, String jobId, String location) { try { bigquery.jobs().cancel(projectId, jobId).setLocation(location).execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean deleteDataset(String projectId, String datasetId, Map<Option, ?> options) { try { bigquery .datasets() .delete(projectId, datasetId) .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Dataset getDataset(String projectId, String datasetId, Map<Option, ?> options) { try { return bigquery .datasets() .get(projectId, datasetId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Table getTable( String projectId, String datasetId, String tableId, Map<Option, ?> options) { try { return bigquery .tables() .get(projectId, datasetId, tableId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Job getJob(String projectId, String jobId, String location, Map<Option, ?> options) { try { return bigquery .jobs() .get(projectId, jobId) .setLocation(location) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }