@Override public GetQueryResultsResponse getQueryResults( String projectId, String jobId, String location, Map<Option, ?> options) { try { return bigquery .jobs() .getQueryResults(projectId, jobId) .setLocation(location) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setStartIndex( Option.START_INDEX.getLong(options) != null ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) : null) .setTimeoutMs(Option.TIMEOUT.getLong(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
bigquery .jobs() .list(projectId) .setAllUsers(Option.ALL_USERS.getBoolean(options)) .setFields(Option.FIELDS.getString(options))
Insert insert = bigquery.jobs().insert(exporterConfig.getBigqueryProjectId(), job);
.setDryRun(true)); JobStatistics jobStats = executeWithBackOff( client.jobs().insert(projectId, dryRunJob), String.format("Error when trying to dry run query %s.", queryConfig.toPrettyString())).getStatistics(); client.jobs().insert(projectId, job), String.format("Error when trying to execute the job for query %s.", queryConfig.toPrettyString())); client.jobs().get(projectId, jobId.getJobId()), String.format("Error when trying to get status of the job for query %s.", queryConfig.toPrettyString()));
bigquery .jobs() .get(projectId, jobReference.getJobId()) .setLocation(jobReference.getLocation());
bigquery .jobs() .list(projectId) .setAllUsers(Option.ALL_USERS.getBoolean(options)) .setFields(Option.FIELDS.getString(options))
try { JobList jobsList = bigquery.jobs() .list(this.options.projectId()) .setAllUsers(ALL_USERS.getBoolean(options)) .setFields(FIELDS.getString(options))
try { JobList jobsList = bigquery.jobs() .list(this.options.projectId()) .setAllUsers(ALL_USERS.getBoolean(options)) .setFields(FIELDS.getString(options))
Job insertedJob = bqClient.jobs().insert(projectId, job).execute(); bqClient .jobs() .getQueryResults(projectId, insertedJob.getJobReference().getJobId()) .execute();
client .jobs() .get(jobRef.getProjectId(), jobRef.getJobId()) .setLocation(jobRef.getLocation()) .execute();
do { try { client.jobs().insert(jobRef.getProjectId(), job).execute(); LOG.info( "Started BigQuery job: {}.\n{}",
/** * Polls the status of a BigQuery job, returns Job reference if "Done" * * @param bigquery an authorized BigQuery client * @param projectId a string containing the current project ID * @param jobId a reference to an inserted query Job * @return a reference to the completed Job * @throws IOException * @throws InterruptedException */ private static Job checkQueryResults(Bigquery bigquery, String projectId, JobReference jobId) throws IOException, InterruptedException { // Variables to keep track of total query time long startTime = System.currentTimeMillis(); long elapsedTime; while (true) { Job pollJob = bigquery.jobs().get(projectId, jobId.getJobId()).execute(); elapsedTime = System.currentTimeMillis() - startTime; System.out.format("Job status (%dms) %s: %s\n", elapsedTime, jobId.getJobId(), pollJob.getStatus().getState()); if (pollJob.getStatus().getState().equals("DONE")) { return pollJob; } // Pause execution for one second before polling job status again, to // reduce unnecessary calls to the BigQUery API and lower overall // application bandwidth. Thread.sleep(1000); } } // [END start_query]
@Nonnull public QueryResponse queryWithRetries(String query, String projectId, boolean typed) throws IOException, InterruptedException { Sleeper sleeper = Sleeper.DEFAULT; BackOff backoff = BackOffAdapter.toGcpBackOff(BACKOFF_FACTORY.backoff()); IOException lastException = null; QueryRequest bqQueryRequest = new QueryRequest().setQuery(query); do { if (lastException != null) { LOG.warn("Retrying query ({}) after exception", bqQueryRequest.getQuery(), lastException); } try { QueryResponse response = bqClient.jobs().query(projectId, bqQueryRequest).execute(); if (response != null) { return typed ? getTypedTableRows(response) : response; } else { lastException = new IOException("Expected valid response from query job, but received null."); } } catch (IOException e) { // ignore and retry lastException = e; } } while (BackOffUtils.next(sleeper, backoff)); throw new RuntimeException( String.format( "Unable to get BigQuery response after retrying %d times using query (%s)", MAX_QUERY_RETRIES, bqQueryRequest.getQuery()), lastException); }
@VisibleForTesting public Job getJob(JobReference jobRef, Sleeper sleeper, BackOff backoff) throws IOException, InterruptedException { String jobId = jobRef.getJobId(); Exception lastException; do { try { return client.jobs().get(jobRef.getProjectId(), jobId).execute(); } catch (GoogleJsonResponseException e) { if (errorExtractor.itemNotFound(e)) { LOG.info("No BigQuery job with job id {} found.", jobId); return null; } LOG.info( "Ignoring the error encountered while trying to query the BigQuery job {}", jobId, e); lastException = e; } catch (IOException e) { LOG.info( "Ignoring the error encountered while trying to query the BigQuery job {}", jobId, e); lastException = e; } } while (nextBackOff(sleeper, backoff)); throw new IOException( String.format( "Unable to find BigQuery job: %s, aborting after %d retries.", jobRef, MAX_RPC_RETRIES), lastException); } }
"Require non-null JobReference and JobId inside; getJobReference() == '%s'", job.getJobReference()); Insert insert = service.jobs().insert(projectId, job); Job response = null; try { "Fetching existing job after catching exception for duplicate jobId '%s'", job.getJobReference().getJobId()); response = service.jobs().get(projectId, job.getJobReference().getJobId()).execute(); } else { throw new IOException(
/** * Makes an API call to the BigQuery API * * @param bigquery an authorized BigQuery client * @param projectId a string containing the current project ID * @param completedJob to the completed Job * @throws IOException */ private static void displayQueryResults(Bigquery bigquery, String projectId, Job completedJob) throws IOException { GetQueryResultsResponse queryResult = bigquery.jobs() .getQueryResults( projectId, completedJob .getJobReference() .getJobId() ).execute(); List<TableRow> rows = queryResult.getRows(); System.out.print("\nQuery Results:\n------------\n"); for (TableRow row : rows) { for (TableCell field : row.getF()) { System.out.printf("%-50s", field.getV()); } System.out.println(); } } // [END display_result]
/** * Creates a Query Job for a particular query on a dataset * * @param bigquery an authorized BigQuery client * @param projectId a String containing the project ID * @param querySql the actual query string * @return a reference to the inserted query job * @throws IOException */ public static JobReference startQuery(Bigquery bigquery, String projectId, String querySql) throws IOException { System.out.format("\nInserting Query Job: %s\n", querySql); Job job = new Job(); JobConfiguration config = new JobConfiguration(); JobConfigurationQuery queryConfig = new JobConfigurationQuery(); config.setQuery(queryConfig); job.setConfiguration(config); queryConfig.setQuery(querySql); Insert insert = bigquery.jobs().insert(projectId, job); insert.setProjectId(projectId); JobReference jobId = insert.execute().getJobReference(); System.out.format("\nJob ID of Query Job is: %s\n", jobId.getJobId()); return jobId; }
@Override public GetQueryResultsResponse getQueryResults( String projectId, String jobId, String location, Map<Option, ?> options) { try { return bigquery .jobs() .getQueryResults(projectId, jobId) .setLocation(location) .setMaxResults(Option.MAX_RESULTS.getLong(options)) .setPageToken(Option.PAGE_TOKEN.getString(options)) .setStartIndex( Option.START_INDEX.getLong(options) != null ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) : null) .setTimeoutMs(Option.TIMEOUT.getLong(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public JobStatistics dryRunQuery( String projectId, JobConfigurationQuery queryConfig, String location) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setLocation(location).setProjectId(projectId); Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setQuery(queryConfig).setDryRun(true)); return executeWithRetries( client.jobs().insert(projectId, job), String.format( "Unable to dry run query: %s, aborting after %d retries.", queryConfig, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), ALWAYS_RETRY) .getStatistics(); }
@Override public GetQueryResultsResponse getQueryResults(String jobId, Map<Option, ?> options) { try { return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) .setStartIndex(START_INDEX.getLong(options) != null ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) .setTimeoutMs(TIMEOUT.getLong(options)) .execute(); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.code() == HTTP_NOT_FOUND) { return null; } throw serviceException; } }