@Override public String apply(TableReference from) { return toTableSpec(from); } }
public TableDestination( TableReference tableReference, @Nullable String tableDescription, @Nullable String jsonTimePartitioning) { this(BigQueryHelpers.toTableSpec(tableReference), tableDescription, jsonTimePartitioning); }
public TableDestination( TableReference tableReference, @Nullable String tableDescription, TimePartitioning timePartitioning) { this( BigQueryHelpers.toTableSpec(tableReference), tableDescription, timePartitioning != null ? BigQueryHelpers.toJsonString(timePartitioning) : null); }
@Override public String apply(String from) { return toTableSpec(fromJsonString(from, TableReference.class)); } }
static void verifyTableNotExistOrEmpty(DatasetService datasetService, TableReference tableRef) { try { if (datasetService.getTable(tableRef) != null) { checkState( datasetService.isTableEmpty(tableRef), "BigQuery table is not empty: %s.", toTableSpec(tableRef)); } } catch (IOException | InterruptedException e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } throw new RuntimeException( "unable to confirm BigQuery table emptiness for table " + toTableSpec(tableRef), e); } }
static void verifyTablePresence(DatasetService datasetService, TableReference table) { try { datasetService.getTable(table); } catch (Exception e) { ApiErrorExtractor errorExtractor = new ApiErrorExtractor(); if ((e instanceof IOException) && errorExtractor.itemNotFound((IOException) e)) { throw new IllegalArgumentException( String.format(RESOURCE_NOT_FOUND_ERROR, "table", toTableSpec(table)), e); } else if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new RuntimeException( String.format( UNABLE_TO_CONFIRM_PRESENCE_OF_RESOURCE_ERROR, "table", toTableSpec(table)), e); } } }
/** Writes to the given table, specified as a {@link TableReference}. */ public Write<T> to(TableReference table) { return to(StaticValueProvider.of(BigQueryHelpers.toTableSpec(table))); }
static void verifyDatasetPresence(DatasetService datasetService, TableReference table) { try { datasetService.getDataset(table.getProjectId(), table.getDatasetId()); } catch (Exception e) { ApiErrorExtractor errorExtractor = new ApiErrorExtractor(); if ((e instanceof IOException) && errorExtractor.itemNotFound((IOException) e)) { throw new IllegalArgumentException( String.format(RESOURCE_NOT_FOUND_ERROR, "dataset", toTableSpec(table)), e); } else if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new RuntimeException( String.format( UNABLE_TO_CONFIRM_PRESENCE_OF_RESOURCE_ERROR, "dataset", toTableSpec(table)), e); } } }
/** See {@link Read#from(TableReference)}. */ public TypedRead<T> from(TableReference table) { return from(StaticValueProvider.of(BigQueryHelpers.toTableSpec(table))); }
/** * {@inheritDoc} * * <p>If a table with the same name already exists in the dataset, the function simply returns. * In such a case, the existing table doesn't necessarily have the same schema as specified by * the parameter. * * @throws IOException if other error than already existing table occurs. */ @Override public void createTable(Table table) throws InterruptedException, IOException { LOG.info( "Trying to create BigQuery table: {}", BigQueryHelpers.toTableSpec(table.getTableReference())); BackOff backoff = new ExponentialBackOff.Builder() .setMaxElapsedTimeMillis(RETRY_CREATE_TABLE_DURATION_MILLIS) .build(); tryCreateTable(table, backoff, Sleeper.DEFAULT); }
@Override public void encode(BigQueryInsertError value, OutputStream outStream) throws IOException { String errorStrValue = MAPPER.writeValueAsString(value.getError()); StringUtf8Coder.of().encode(errorStrValue, outStream); TableRowJsonCoder.of().encode(value.getRow(), outStream); StringUtf8Coder.of().encode(BigQueryHelpers.toTableSpec(value.getTable()), outStream); }
protected ExtractResult extractFiles(PipelineOptions options) throws Exception { BigQueryOptions bqOptions = options.as(BigQueryOptions.class); TableReference tableToExtract = getTableToExtract(bqOptions); BigQueryServices.DatasetService datasetService = bqServices.getDatasetService(bqOptions); Table table = datasetService.getTable(tableToExtract); if (table == null) { throw new IOException( String.format( "Cannot start an export job since table %s does not exist", BigQueryHelpers.toTableSpec(tableToExtract))); } TableSchema schema = table.getSchema(); JobService jobService = bqServices.getJobService(bqOptions); String extractJobId = getExtractJobId(createJobIdToken(options.getJobName(), stepUuid)); final String extractDestinationDir = resolveTempLocation(bqOptions.getTempLocation(), "BigQueryExtractTemp", stepUuid); String bqLocation = BigQueryHelpers.getDatasetLocation( datasetService, tableToExtract.getProjectId(), tableToExtract.getDatasetId()); List<ResourceId> tempFiles = executeExtract( extractJobId, tableToExtract, jobService, bqOptions.getProject(), extractDestinationDir, bqLocation); return new ExtractResult(schema, tempFiles); }