@Override public TableDataWriteChannel writer( JobId jobId, WriteChannelConfiguration writeChannelConfiguration) { return new TableDataWriteChannel( getOptions(), jobId.setProjectId(getOptions().getProjectId()), writeChannelConfiguration.setProjectId(getOptions().getProjectId())); }
JobInfo setProjectId(String projectId) { Builder builder = toBuilder(); if (jobId != null) { builder.setJobId(jobId.setProjectId(projectId)); } return builder.setConfiguration(configuration.setProjectId(projectId)).build(); }
/** Example of updating a dataset by changing its description. */ // [TARGET update(DatasetInfo, DatasetOption...)] // [VARIABLE "my_dataset_name"] // [VARIABLE "some_new_description"] public Dataset updateDataset(String datasetName, String newDescription) { // [START bigquery_update_dataset_description] Dataset oldDataset = bigquery.getDataset(datasetName); DatasetInfo datasetInfo = oldDataset.toBuilder().setDescription(newDescription).build(); Dataset newDataset = bigquery.update(datasetInfo); // [END bigquery_update_dataset_description] return newDataset; }
@Override protected Restorable<?>[] restorableObjects() { BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("p2").build(); // avoid closing when you don't want partial writes upon failure @SuppressWarnings("resource") TableDataWriteChannel writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION, "upload-id"); return new Restorable<?>[] {writer}; } }
/** Example of updating a table by changing its description. */ public Table updateTableDescription(String datasetName, String tableName, String newDescription) { // [START bigquery_update_table_description] // String datasetName = "my_dataset_name"; // String tableName = "my_table_name"; // String newDescription = "new_description"; Table beforeTable = bigquery.getTable(datasetName, tableName); TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build(); Table afterTable = bigquery.update(tableInfo); // [END bigquery_update_table_description] return afterTable; }
/** * Returns a builder for an {@code InsertAllRequest} object given the destination table and the * rows to insert. */ public static Builder newBuilder(String datasetId, String tableId, Iterable<RowToInsert> rows) { return newBuilder(TableId.of(datasetId, tableId), rows); }
@Test public void testToPbAndFromPb() { compareExtractStatistics( EXTRACT_STATISTICS, ExtractStatistics.fromPb(EXTRACT_STATISTICS.toPb())); compareLoadStatistics(LOAD_STATISTICS, LoadStatistics.fromPb(LOAD_STATISTICS.toPb())); compareQueryStatistics(QUERY_STATISTICS, QueryStatistics.fromPb(QUERY_STATISTICS.toPb())); compareStatistics(COPY_STATISTICS, CopyStatistics.fromPb(COPY_STATISTICS.toPb())); compareLoadStatistics( LOAD_STATISTICS_INCOMPLETE, LoadStatistics.fromPb(LOAD_STATISTICS_INCOMPLETE.toPb())); compareQueryStatistics( QUERY_STATISTICS_INCOMPLETE, QueryStatistics.fromPb(QUERY_STATISTICS_INCOMPLETE.toPb())); }
/** * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. */ public static InsertAllRequest of(TableInfo tableInfo, RowToInsert... rows) { return newBuilder(tableInfo.getTableId(), rows).build(); }
@Override public Page<Table> listTables(String datasetId, TableListOption... options) { return listTables( DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); }
@Override public TableResult query(QueryJobConfiguration configuration, JobOption... options) throws InterruptedException, JobException { Job.checkNotDryRun(configuration, "query"); return create(JobInfo.of(configuration), options).getQueryResults(); }
/** * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. */ public static InsertAllRequest of(TableId tableId, Iterable<RowToInsert> rows) { return newBuilder(tableId, rows).build(); }
/** * Returns a {@code JobInfo} object given the job configuration. Use {@link CopyJobConfiguration} * for a job that copies an existing table. Use {@link ExtractJobConfiguration} for a job that * exports a table to Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads * data from Google Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that * runs a query. */ public static JobInfo of(JobConfiguration configuration) { return newBuilder(configuration).build(); }
@Override public Table apply(com.google.api.services.bigquery.model.Table table) { return Table.fromPb(serviceOptions.getService(), table); } });
static Dataset fromPb( BigQuery bigquery, com.google.api.services.bigquery.model.Dataset datasetPb) { return new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetPb)); } }
/** * Returns a BigQuery Extract Job configuration for the given source table and destination URI. */ public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri) { return newBuilder(sourceTable, destinationUri).build(); }
/** * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. */ public static InsertAllRequest of(TableInfo tableInfo, Iterable<RowToInsert> rows) { return newBuilder(tableInfo.getTableId(), rows).build(); }
@Override public TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options) throws InterruptedException, JobException { Job.checkNotDryRun(configuration, "query"); return create(JobInfo.of(jobId, configuration), options).getQueryResults(); }
/** * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. */ public static InsertAllRequest of(String datasetId, String tableId, RowToInsert... rows) { return newBuilder(datasetId, tableId, rows).build(); }