@Override public Tuple<String, Iterable<Dataset>> listDatasets(String projectId, Map<Option, ?> options) { try { DatasetList datasetsList = bigquery .datasets() .list(projectId) .setAll(Option.ALL_DATASETS.getBoolean(options)) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .execute(); Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets(); return Tuple.of( datasetsList.getNextPageToken(), Iterables.transform( datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), LIST_TO_DATASET)); } catch (IOException ex) { throw translate(ex); } }
do { try { client.datasets().insert(projectId, dataset).execute(); return; // SUCCEEDED } catch (GoogleJsonResponseException e) {
service.datasets().get(tableRef.getProjectId(), tableRef.getDatasetId()).execute();
@Override public Tuple<String, Iterable<Dataset>> listDatasets(Map<Option, ?> options) { try { DatasetList datasetsList = bigquery.datasets() .list(this.options.projectId()) .setAll(ALL_DATASETS.getBoolean(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) .setPageToken(PAGE_TOKEN.getString(options)) .execute(); Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets(); return Tuple.of(datasetsList.getNextPageToken(), Iterables.transform(datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), new Function<DatasetList.Datasets, Dataset>() { @Override public Dataset apply(DatasetList.Datasets datasetPb) { return new Dataset() .setDatasetReference(datasetPb.getDatasetReference()) .setFriendlyName(datasetPb.getFriendlyName()) .setId(datasetPb.getId()) .setKind(datasetPb.getKind()); } })); } catch (IOException ex) { throw translate(ex); } }
@Override public Tuple<String, Iterable<Dataset>> listDatasets(Map<Option, ?> options) { try { DatasetList datasetsList = bigquery.datasets() .list(this.options.projectId()) .setAll(ALL_DATASETS.getBoolean(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) .setPageToken(PAGE_TOKEN.getString(options)) .execute(); Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets(); return Tuple.of(datasetsList.getNextPageToken(), Iterables.transform(datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), new Function<DatasetList.Datasets, Dataset>() { @Override public Dataset apply(DatasetList.Datasets datasetPb) { return new Dataset() .setDatasetReference(datasetPb.getDatasetReference()) .setFriendlyName(datasetPb.getFriendlyName()) .setId(datasetPb.getId()) .setKind(datasetPb.getKind()); } })); } catch (IOException ex) { throw translate(ex); } }
/** * Deletes the temporary dataset, including all of the work tables. * * @param context the job's context. * @throws IOException */ @Override public void cleanupJob(JobContext context) throws IOException { logger.atFine().log("cleanupJob(%s)", lazy(() -> HadoopToStringUtil.toString(context))); Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets(); try { logger.atFine().log( "cleanupJob: Deleting dataset '%s' from project '%s'", tempTableRef.getDatasetId(), tempTableRef.getProjectId()); datasets.delete(tempTableRef.getProjectId(), tempTableRef.getDatasetId()) .setDeleteContents(true) .execute(); } catch (IOException e) { // Error is swallowed as job has completed successfully and the only failure is deleting // temporary data. // This matches the FileOutputCommitter pattern. logger.atWarning().withCause(e).log( "Could not delete dataset. Temporary data not cleaned up."); } }
/** * Creates the temporary dataset that will contain all of the task work tables. * * @param context the job's context. * @throws IOException on IO Error. */ @Override public void setupJob(JobContext context) throws IOException { logger.atFine().log("setupJob(%s)", lazy(() -> HadoopToStringUtil.toString(context))); // Create dataset. DatasetReference datasetReference = new DatasetReference(); datasetReference.setProjectId(tempTableRef.getProjectId()); datasetReference.setDatasetId(tempTableRef.getDatasetId()); Dataset tempDataset = new Dataset(); tempDataset.setDatasetReference(datasetReference); tempDataset.setLocation(getLocation(context)); // Insert dataset into Bigquery. Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets(); // TODO(user): Maybe allow the dataset to exist already instead of throwing 409 here. logger.atFine().log( "Creating temporary dataset '%s' for project '%s'", tempTableRef.getDatasetId(), tempTableRef.getProjectId()); // NB: Even though this "insert" makes it look like we can specify a different projectId than // the one which owns the dataset, it actually has to match. datasets.insert(tempTableRef.getProjectId(), tempDataset).execute(); }
@Override public Tuple<String, Iterable<Dataset>> listDatasets(String projectId, Map<Option, ?> options) { try { DatasetList datasetsList = bigquery .datasets() .list(projectId) .setAll(Option.ALL_DATASETS.getBoolean(options)) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .execute(); Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets(); return Tuple.of( datasetsList.getNextPageToken(), Iterables.transform( datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), LIST_TO_DATASET)); } catch (IOException ex) { throw translate(ex); } }
/** * Display all BigQuery datasets associated with a project * * @param bigquery an authorized BigQuery client * @param projectId a string containing the current project ID * @throws IOException */ public static void listDatasets(Bigquery bigquery, String projectId) throws IOException { Datasets.List datasetRequest = bigquery.datasets().list(projectId); DatasetList datasetList = datasetRequest.execute(); if (datasetList.getDatasets() != null) { List<DatasetList.Datasets> datasets = datasetList.getDatasets(); System.out.println("Available datasets\n----------------"); System.out.println(datasets.toString()); for (DatasetList.Datasets dataset : datasets) { System.out.format("%s\n", dataset.getDatasetReference().getDatasetId()); } } }
/** * {@inheritDoc} * * <p>Tries executing the RPC for at most {@code MAX_RPC_RETRIES} times until it succeeds. * * @throws IOException if it exceeds {@code MAX_RPC_RETRIES} attempts. */ @Override public void deleteDataset(String projectId, String datasetId) throws IOException, InterruptedException { executeWithRetries( client.datasets().delete(projectId, datasetId), String.format( "Unable to delete table: %s, aborting after %d retries.", datasetId, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), ALWAYS_RETRY); }
/** * {@inheritDoc} * * <p>Tries executing the RPC for at most {@code MAX_RPC_RETRIES} times until it succeeds. * * @throws IOException if it exceeds {@code MAX_RPC_RETRIES} attempts. */ @Override public Dataset getDataset(String projectId, String datasetId) throws IOException, InterruptedException { return executeWithRetries( client.datasets().get(projectId, datasetId), String.format( "Unable to get dataset: %s, aborting after %d retries.", datasetId, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), DONT_RETRY_NOT_FOUND); }
@Override public boolean deleteDataset(String projectId, String datasetId, Map<Option, ?> options) { try { bigquery .datasets() .delete(projectId, datasetId) .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
void deleteDataset(String projectId, String datasetId) throws IOException { if (datasetExists(projectId, datasetId)) { deleteTables(projectId, datasetId); try { client.datasets().delete(projectId, datasetId).execute(); } catch (GoogleJsonResponseException e) { if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_NOT_FOUND) { // Already deleted return; } throw e; } } }
void createDataset(String projectId, Dataset dataset) throws IOException { try { client.datasets().insert(projectId, dataset) .execute(); } catch (GoogleJsonResponseException e) { if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_CONFLICT) { logger.debug("Dataset already exists: {}:{}", dataset.getDatasetReference()); } else { throw e; } } }
@Override public Dataset getDataset(String projectId, String datasetId, Map<Option, ?> options) { try { return bigquery .datasets() .get(projectId, datasetId) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }
private void createDataset(String datasetId, @Nullable String location) throws IOException, InterruptedException { Dataset dataset = new Dataset(); DatasetReference reference = new DatasetReference(); reference.setProjectId(projectId); reference.setDatasetId(datasetId); dataset.setDatasetReference(reference); if (location != null) { dataset.setLocation(location); } executeWithBackOff( client.datasets().insert(projectId, dataset), String.format( "Error when trying to create the temporary dataset %s in project %s.", datasetId, projectId)); }
@Override public boolean deleteDataset(String datasetId, Map<Option, ?> options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
@Override public boolean deleteDataset(String datasetId, Map<Option, ?> options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) .execute(); return true; } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; } }
public void deleteDataset(String projectId, String datasetId) { try { TableList tables = bqClient.tables().list(projectId, datasetId).execute(); for (Tables table : tables.getTables()) { this.deleteTable(projectId, datasetId, table.getTableReference().getTableId()); } } catch (Exception e) { LOG.debug("Exceptions caught when listing all tables: " + e.getMessage()); } try { bqClient.datasets().delete(projectId, datasetId).execute(); LOG.info("Successfully deleted dataset: " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when deleting dataset: " + e.getMessage()); } }