public void createNewDataset(String projectId, String datasetId) { try { bqClient .datasets() .insert( projectId, new Dataset().setDatasetReference(new DatasetReference().setDatasetId(datasetId))) .execute(); LOG.info("Successfully created new dataset : " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when creating new dataset: " + e.getMessage()); } }
do { try { client.datasets().insert(projectId, dataset).execute(); return; // SUCCEEDED } catch (GoogleJsonResponseException e) {
private void createDataset(String datasetId, @Nullable String location) throws IOException, InterruptedException { Dataset dataset = new Dataset(); DatasetReference reference = new DatasetReference(); reference.setProjectId(projectId); reference.setDatasetId(datasetId); dataset.setDatasetReference(reference); if (location != null) { dataset.setLocation(location); } executeWithBackOff( client.datasets().insert(projectId, dataset), String.format( "Error when trying to create the temporary dataset %s in project %s.", datasetId, projectId)); }
.setDatasetReference( new DatasetReference().setProjectId(projectId).setDatasetId(datasetId)); datasetService.insert(projectId, newDataset).execute();
/** * Creates the temporary dataset that will contain all of the task work tables. * * @param context the job's context. * @throws IOException on IO Error. */ @Override public void setupJob(JobContext context) throws IOException { logger.atFine().log("setupJob(%s)", lazy(() -> HadoopToStringUtil.toString(context))); // Create dataset. DatasetReference datasetReference = new DatasetReference(); datasetReference.setProjectId(tempTableRef.getProjectId()); datasetReference.setDatasetId(tempTableRef.getDatasetId()); Dataset tempDataset = new Dataset(); tempDataset.setDatasetReference(datasetReference); tempDataset.setLocation(getLocation(context)); // Insert dataset into Bigquery. Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets(); // TODO(user): Maybe allow the dataset to exist already instead of throwing 409 here. logger.atFine().log( "Creating temporary dataset '%s' for project '%s'", tempTableRef.getDatasetId(), tempTableRef.getProjectId()); // NB: Even though this "insert" makes it look like we can specify a different projectId than // the one which owns the dataset, it actually has to match. datasets.insert(tempTableRef.getProjectId(), tempDataset).execute(); }
void createDataset(String projectId, Dataset dataset) throws IOException { try { client.datasets().insert(projectId, dataset) .execute(); } catch (GoogleJsonResponseException e) { if (e.getStatusCode() == HttpStatusCodes.STATUS_CODE_CONFLICT) { logger.debug("Dataset already exists: {}:{}", dataset.getDatasetReference()); } else { throw e; } } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery .datasets() .insert(dataset.getDatasetReference().getProjectId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public Dataset create(Dataset dataset, Map<Option, ?> options) { try { return bigquery .datasets() .insert(dataset.getDatasetReference().getProjectId(), dataset) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }