@Override public String call() { return options .getBigQueryRpcV2() .open( new com.google.api.services.bigquery.model.Job() .setConfiguration(writeChannelConfiguration.toPb()) .setJobReference(jobId.toPb())); } },
BuilderImpl(Job jobPb) { this.etag = jobPb.getEtag(); this.generatedId = jobPb.getId(); if (jobPb.getJobReference() != null) { this.jobId = JobId.fromPb(jobPb.getJobReference()); } this.selfLink = jobPb.getSelfLink(); if (jobPb.getStatus() != null) { this.status = JobStatus.fromPb(jobPb.getStatus()); } if (jobPb.getStatistics() != null) { this.statistics = JobStatistics.fromPb(jobPb); } this.userEmail = jobPb.getUserEmail(); if (jobPb.getConfiguration() != null) { this.configuration = JobConfiguration.fromPb(jobPb.getConfiguration()); } }
Job toPb() { Job jobPb = new Job(); jobPb.setEtag(etag); jobPb.setId(generatedId); jobPb.setSelfLink(selfLink); jobPb.setUserEmail(userEmail); if (jobId != null) { jobPb.setJobReference(jobId.toPb()); } if (status != null) { jobPb.setStatus(status.toPb()); } if (statistics != null) { jobPb.setStatistics(statistics.toPb()); } if (configuration != null) { jobPb.setConfiguration(configuration.toPb()); } return jobPb; }
@Override public Job apply(JobList.Jobs jobPb) { JobStatus statusPb = jobPb.getStatus() != null ? jobPb.getStatus() : new JobStatus(); if (statusPb.getState() == null) { statusPb.setState(jobPb.getState()); } if (statusPb.getErrorResult() == null) { statusPb.setErrorResult(jobPb.getErrorResult()); } return new Job() .setConfiguration(jobPb.getConfiguration()) .setId(jobPb.getId()) .setJobReference(jobPb.getJobReference()) .setKind(jobPb.getKind()) .setStatistics(jobPb.getStatistics()) .setStatus(statusPb) .setUserEmail(jobPb.getUserEmail()); } }));
JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() .setConfiguration(QUERY_JOB_CONFIGURATION_FOR_QUERY.toPb()) .setJobReference(queryJob.toPb()) .setId(JOB) .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); jobResponsePb.getConfiguration().getQuery().setDestinationTable(TABLE_ID.toPb()); GetQueryResultsResponse responsePb = new GetQueryResultsResponse()
@Override public void startQueryJob(JobReference jobRef, JobConfigurationQuery query) { synchronized (allJobs) { Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setQuery(query)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
/** * Creates a Query Job for a particular query on a dataset * * @param bigquery an authorized BigQuery client * @param projectId a String containing the project ID * @param querySql the actual query string * @return a reference to the inserted query job * @throws IOException */ public static JobReference startQuery(Bigquery bigquery, String projectId, String querySql) throws IOException { System.out.format("\nInserting Query Job: %s\n", querySql); Job job = new Job(); JobConfiguration config = new JobConfiguration(); JobConfigurationQuery queryConfig = new JobConfigurationQuery(); config.setQuery(queryConfig); job.setConfiguration(config); queryConfig.setQuery(querySql); Insert insert = bigquery.jobs().insert(projectId, job); insert.setProjectId(projectId); JobReference jobId = insert.execute().getJobReference(); System.out.format("\nJob ID of Query Job is: %s\n", jobId.getJobId()); return jobId; }
checkState(queryConfig != null, "Unable to execute a query without a configured query"); Job dryRunJob = new Job() .setConfiguration(new JobConfiguration() .setQuery(queryConfig) .setDryRun(true)); client.jobs().insert(projectId, dryRunJob), String.format("Error when trying to dry run query %s.", queryConfig.toPrettyString())).getStatistics(); Job job = new Job(); JobConfiguration config = new JobConfiguration(); config.setQuery(queryConfig); job.setConfiguration(config); String.format("Error when trying to execute the job for query %s.", queryConfig.toPrettyString())); JobReference jobId = queryJob.getJobReference(); String.format("Error when trying to get status of the job for query %s.", queryConfig.toPrettyString())); JobStatus status = pollJob.getStatus(); if (status.getState().equals("DONE")) { return pollJob.getConfiguration().getQuery().getDestinationTable(); } else {
@Override public void beginExport() throws IOException { // Create job and configuration. JobConfigurationExtract extractConfig = new JobConfigurationExtract(); // Set source. extractConfig.setSourceTable(tableToExport.getTableReference()); // Set destination. extractConfig.setDestinationUris(getExportPaths()); extractConfig.set(DESTINATION_FORMAT_KEY, fileFormat.getFormatIdentifier()); JobConfiguration config = new JobConfiguration(); config.setExtract(extractConfig); JobReference jobReference = bigQueryHelper.createJobReference( projectId, "exporttocloudstorage", tableToExport.getLocation()); Job job = new Job(); job.setConfiguration(config); job.setJobReference(jobReference); // Insert and run job. try { Job response = bigQueryHelper.insertJobOrFetchDuplicate(projectId, job); logger.atFine().log("Got response '%s'", response); exportJobReference = response.getJobReference(); } catch (IOException e) { String error = String.format( "Error while exporting table %s", BigQueryStrings.toString(tableToExport.getTableReference())); throw new IOException(error, e); } }
.runOnce((TaskState state) -> { logger.info("Submitting BigQuery job: {}", canonicalJobId); Job job = new Job() .setJobReference(reference) .setConfiguration(config); JobStatus status = job.getStatus(); switch (status.getState()) { case "DONE": JobStatus status = completed.getStatus(); if (status.getErrorResult() != null) {
Job currentJob = new Job(); currentJob.setKind(" bigquery#job"); PendingJob pendingJob = new PendingJob( throw new RuntimeException("Failing to start."); currentJob.setJobReference( new JobReference() .setProjectId("") retryId -> { if (retryId.getRetryIndex() < 5) { currentJob.setStatus(new JobStatus().setErrorResult(new ErrorProto())); } else { currentJob.setStatus(new JobStatus().setErrorResult(null)); if (retryId.getJobId().equals(currentJob.getJobReference().getJobId())) { return currentJob; } else {
private static com.google.api.services.bigquery.model.Job newJobPb() { return new com.google.api.services.bigquery.model.Job() .setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setQuery(new JobConfigurationQuery().setQuery("FOO"))); }
@Test public void testIncomplete() { // https://github.com/googleapis/google-cloud-java/issues/2357 com.google.api.services.bigquery.model.Job job = new com.google.api.services.bigquery.model.Job() .setStatistics( new com.google.api.services.bigquery.model.JobStatistics() .setCreationTime(1234L) .setStartTime(5678L)); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setCopy(new com.google.api.services.bigquery.model.JobConfigurationTableCopy())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(CopyStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setLoad(new com.google.api.services.bigquery.model.JobConfigurationLoad())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(LoadStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setExtract(new com.google.api.services.bigquery.model.JobConfigurationExtract())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(ExtractStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setQuery(new com.google.api.services.bigquery.model.JobConfigurationQuery())); assertThat(JobStatistics.fromPb(job)).isInstanceOf(QueryStatistics.class); }
@Override public JobStatistics dryRunQuery( String projectId, JobConfigurationQuery queryConfig, String location) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setLocation(location).setProjectId(projectId); Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setQuery(queryConfig).setDryRun(true)); return executeWithRetries( client.jobs().insert(projectId, job), String.format( "Unable to dry run query: %s, aborting after %d retries.", queryConfig, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), ALWAYS_RETRY) .getStatistics(); }
@Test public void testGetJobSucceeds() throws Exception { Job testJob = new Job(); testJob.setStatus(new JobStatus()); when(response.getContentType()).thenReturn(Json.MEDIA_TYPE); when(response.getStatusCode()).thenReturn(200); when(response.getContent()).thenReturn(toStream(testJob)); BigQueryServicesImpl.JobServiceImpl jobService = new BigQueryServicesImpl.JobServiceImpl(bigquery); JobReference jobRef = new JobReference().setProjectId("projectId").setJobId("jobId"); Job job = jobService.getJob(jobRef, Sleeper.DEFAULT, BackOff.ZERO_BACKOFF); assertEquals(testJob, job); verify(response, times(1)).getStatusCode(); verify(response, times(1)).getContent(); verify(response, times(1)).getContentType(); }
/** Tests that {@link BigQueryServicesImpl.JobServiceImpl#startLoadJob} succeeds. */ @Test public void testStartLoadJobSucceeds() throws IOException, InterruptedException { Job testJob = new Job(); JobReference jobRef = new JobReference(); jobRef.setJobId("jobId"); jobRef.setProjectId("projectId"); testJob.setJobReference(jobRef); when(response.getContentType()).thenReturn(Json.MEDIA_TYPE); when(response.getStatusCode()).thenReturn(200); when(response.getContent()).thenReturn(toStream(testJob)); Sleeper sleeper = new FastNanoClockAndSleeper(); JobServiceImpl.startJob( testJob, new ApiErrorExtractor(), bigquery, sleeper, BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff())); verify(response, times(1)).getStatusCode(); verify(response, times(1)).getContent(); verify(response, times(1)).getContentType(); expectedLogs.verifyInfo(String.format("Started BigQuery job: %s", jobRef)); }
TableReference dryRunTable = new TableReference(); Job queryJob = new Job(); JobStatistics queryJobStats = new JobStatistics(); JobStatistics2 queryStats = new JobStatistics2(); queryStats.setReferencedTables(ImmutableList.of(dryRunTable)); queryJobStats.setQuery(queryStats); queryJob.setStatus(new JobStatus()).setStatistics(queryJobStats); Job extractJob = new Job(); JobStatistics extractJobStats = new JobStatistics(); JobStatistics4 extractStats = new JobStatistics4(); extractStats.setDestinationUriFileCounts(ImmutableList.of(1L)); extractJobStats.setExtract(extractStats); extractJob.setStatus(new JobStatus()).setStatistics(extractJobStats);
@Override public Job create(Job job, Map<Option, ?> options) { try { String projectId = job.getJobReference() != null ? job.getJobReference().getProjectId() : this.options.getProjectId(); return bigquery .jobs() .insert(projectId, job) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
static Status parseStatus(@Nullable Job job) { if (job == null) { return Status.UNKNOWN; } JobStatus status = job.getStatus(); if (status.getErrorResult() != null) { return Status.FAILED; } else if (status.getErrors() != null && !status.getErrors().isEmpty()) { return Status.FAILED; } else { return Status.SUCCEEDED; } }
if (!"FAILED".equals(job.job.getStatus().getState())) { if (numFailures < numFailuresExpected) { ++numFailures; job.job.getStatus().setState("RUNNING"); } else if (job.getJobCount == 2 * GET_JOBS_TRANSITION_INTERVAL + 1) { job.job.setStatus(runJob(job.job)); .getStatus() .setState("FAILED") .setErrorResult( .setMessage( String.format( "Job %s failed: %s", job.job.getConfiguration(), e.toString()))); List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId());