@Override public boolean deleteDataset(String projectId, String datasetId, Map<Option, ?> options) { try { bigquery .datasets() .delete(projectId, datasetId) .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery .datasets() .insert(dataset.getDatasetReference().getProjectId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset getDataset(String projectId, String datasetId, Map<Option, ?> options) { try { return bigquery .datasets() .get(projectId, datasetId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Dataset patch(Dataset dataset, Map<Option, ?> options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery .datasets() .patch(reference.getProjectId(), reference.getDatasetId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Tuple<String, Iterable<Dataset>> listDatasets(String projectId, Map<Option, ?> options) { try { DatasetList datasetsList = bigquery .datasets() .list(projectId) .setAll(Option.ALL_DATASETS.getBoolean(options)) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .execute(); Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets(); return Tuple.of( datasetsList.getNextPageToken(), Iterables.transform( datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), LIST_TO_DATASET)); } catch (IOException ex) { throw translate(ex); } }
private void deleteDataset(String datasetId) throws IOException, InterruptedException { executeWithBackOff( client.datasets().delete(projectId, datasetId), String.format( "Error when trying to delete the temporary dataset %s in project %s. " + "Manual deletion may be required.", datasetId, projectId)); }
public void createNewDataset(String projectId, String datasetId) { try { bqClient .datasets() .insert( projectId, new Dataset().setDatasetReference(new DatasetReference().setDatasetId(datasetId))) .execute(); LOG.info("Successfully created new dataset : " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when creating new dataset: " + e.getMessage()); } }
boolean datasetExists(String projectId, String datasetId) throws IOException { try { client.datasets().get(projectId, datasetId).execute(); return true; } catch (GoogleJsonResponseException e) { if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_NOT_FOUND) { return false; } throw e; } }
public void deleteDataset(String projectId, String datasetId) { try { TableList tables = bqClient.tables().list(projectId, datasetId).execute(); for (Tables table : tables.getTables()) { this.deleteTable(projectId, datasetId, table.getTableReference().getTableId()); } } catch (Exception e) { LOG.debug("Exceptions caught when listing all tables: " + e.getMessage()); } try { bqClient.datasets().delete(projectId, datasetId).execute(); LOG.info("Successfully deleted dataset: " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when deleting dataset: " + e.getMessage()); } }
private void createDataset(String datasetId, @Nullable String location) throws IOException, InterruptedException { Dataset dataset = new Dataset(); DatasetReference reference = new DatasetReference(); reference.setProjectId(projectId); reference.setDatasetId(datasetId); dataset.setDatasetReference(reference); if (location != null) { dataset.setLocation(location); } executeWithBackOff( client.datasets().insert(projectId, dataset), String.format( "Error when trying to create the temporary dataset %s in project %s.", datasetId, projectId)); }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery .datasets() .insert(dataset.getDatasetReference().getProjectId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset getDataset(String datasetId, Map<Option, ?> options) { try { return bigquery.datasets() .get(this.options.projectId(), datasetId) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Dataset getDataset(String datasetId, Map<Option, ?> options) { try { return bigquery.datasets() .get(this.options.projectId(), datasetId) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
@Override public Dataset patch(Dataset dataset, Map<Option, ?> options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery.datasets() .patch(this.options.projectId(), reference.getDatasetId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset patch(Dataset dataset, Map<Option, ?> options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery.datasets() .patch(this.options.projectId(), reference.getDatasetId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public boolean deleteDataset(String datasetId, Map<Option, ?> options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean deleteDataset(String datasetId, Map<Option, ?> options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public Dataset patch(Dataset dataset, Map<Option, ?> options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery .datasets() .patch(reference.getProjectId(), reference.getDatasetId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }