DatasetReference toPb() { return new DatasetReference().setProjectId(project).setDatasetId(dataset); }
@Test public void testListToDataset() { DatasetReference datasetRef = new DatasetReference().setDatasetId("dataset-id").setProjectId("project-id"); DatasetList.Datasets listDataSet = new DatasetList.Datasets() .setDatasetReference(datasetRef) .setId("project-id:dataset-id") .setFriendlyName("friendly") .setKind("bigquery#dataset") .setLabels(Collections.singletonMap("foo", "bar")); Dataset dataset = HttpBigQueryRpc.LIST_TO_DATASET.apply(listDataSet); assertThat(dataset.getKind()).isEqualTo("bigquery#dataset"); assertThat(dataset.getId()).isEqualTo("project-id:dataset-id"); assertThat(dataset.getFriendlyName()).isEqualTo("friendly"); assertThat(dataset.getDatasetReference()).isEqualTo(datasetRef); assertThat(dataset.getLabels()).containsExactly("foo", "bar"); } }
Dataset dataset = new Dataset(); DatasetReference datasetRef = new DatasetReference(); datasetRef.setProjectId(PROJECT_ID); datasetRef.setDatasetId(DATASET_ID); dataset.setDatasetReference(datasetRef); try { bigquery.datasets().insert(PROJECT_ID, dataset).execute(); } catch (IOException e) { }
static DatasetReference datasetReference(Optional<String> defaultProjectId, String s) { Matcher matcher = DATASET_REFERENCE_PATTERN.matcher(s); if (!matcher.matches()) { throw new IllegalArgumentException("Bad dataset reference: " + s); } return new DatasetReference() .setProjectId(Optional.fromNullable(matcher.group("project")).or(defaultProjectId).orNull()) .setDatasetId(matcher.group("dataset")); } }
DatasetReference toPb() { return new DatasetReference().setProjectId(project).setDatasetId(dataset); }
DatasetReference toPb() { return new DatasetReference().setProjectId(project).setDatasetId(dataset); }
DatasetReference toPb() { return new DatasetReference().setProjectId(project).setDatasetId(dataset); }
public void createNewDataset(String projectId, String datasetId) { try { bqClient .datasets() .insert( projectId, new Dataset().setDatasetReference(new DatasetReference().setDatasetId(datasetId))) .execute(); LOG.info("Successfully created new dataset : " + datasetId); } catch (Exception e) { LOG.debug("Exceptions caught when creating new dataset: " + e.getMessage()); } }
private Dataset dataset(String defaultProjectId, DatasetConfig config) { return new Dataset() .setDatasetReference(new DatasetReference() .setProjectId(config.project().or(defaultProjectId)) .setDatasetId(config.id())) .setFriendlyName(config.friendly_name().orNull()) .setDefaultTableExpirationMs(config.default_table_expiration().transform(d -> d.getDuration().toMillis()).orNull()) .setLocation(config.location().orNull()) .setAccess(config.access().orNull()) .setLabels(config.labels().orNull()); }
private void createDataset(String datasetId, @Nullable String location) throws IOException, InterruptedException { Dataset dataset = new Dataset(); DatasetReference reference = new DatasetReference(); reference.setProjectId(projectId); reference.setDatasetId(datasetId); dataset.setDatasetReference(reference); if (location != null) { dataset.setLocation(location); } executeWithBackOff( client.datasets().insert(projectId, dataset), String.format( "Error when trying to create the temporary dataset %s in project %s.", datasetId, projectId)); }
@Override public Dataset getDataset(String projectId, String datasetId) throws IOException, InterruptedException { synchronized (tables) { Map<String, TableContainer> dataset = tables.get(projectId, datasetId); if (dataset == null) { throwNotFound( "Tried to get a dataset %s:%s, but no such table was set", projectId, datasetId); } return new Dataset() .setDatasetReference( new DatasetReference().setDatasetId(datasetId).setProjectId(projectId)); } }
throws IOException, InterruptedException { DatasetReference datasetRef = new DatasetReference().setProjectId(projectId).setDatasetId(datasetId);
/** * Creates the temporary dataset that will contain all of the task work tables. * * @param context the job's context. * @throws IOException on IO Error. */ @Override public void setupJob(JobContext context) throws IOException { logger.atFine().log("setupJob(%s)", lazy(() -> HadoopToStringUtil.toString(context))); // Create dataset. DatasetReference datasetReference = new DatasetReference(); datasetReference.setProjectId(tempTableRef.getProjectId()); datasetReference.setDatasetId(tempTableRef.getDatasetId()); Dataset tempDataset = new Dataset(); tempDataset.setDatasetReference(datasetReference); tempDataset.setLocation(getLocation(context)); // Insert dataset into Bigquery. Bigquery.Datasets datasets = bigQueryHelper.getRawBigquery().datasets(); // TODO(user): Maybe allow the dataset to exist already instead of throwing 409 here. logger.atFine().log( "Creating temporary dataset '%s' for project '%s'", tempTableRef.getDatasetId(), tempTableRef.getProjectId()); // NB: Even though this "insert" makes it look like we can specify a different projectId than // the one which owns the dataset, it actually has to match. datasets.insert(tempTableRef.getProjectId(), tempDataset).execute(); }
new Dataset() .setDatasetReference( new DatasetReference().setProjectId(projectId).setDatasetId(datasetId)); datasetService.insert(projectId, newDataset).execute();