static JobId fromPb(JobReference jobRef) { return newBuilder() .setProject(jobRef.getProjectId()) .setJob(jobRef.getJobId()) .setLocation(jobRef.getLocation()) .build(); } }
@Override public Job create(Job job, Map<Option, ?> options) { try { String projectId = job.getJobReference() != null ? job.getJobReference().getProjectId() : this.options.getProjectId(); return bigquery .jobs() .insert(projectId, job) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
static JobId fromPb(JobReference jobRef) { return new JobId(jobRef.getProjectId(), jobRef.getJobId()); } }
static JobId fromPb(JobReference jobRef) { return new JobId(jobRef.getProjectId(), jobRef.getJobId()); } }
@VisibleForTesting public Job getJob(JobReference jobRef, Sleeper sleeper, BackOff backoff) throws IOException, InterruptedException { String jobId = jobRef.getJobId(); Exception lastException; do { try { return client.jobs().get(jobRef.getProjectId(), jobId).execute(); } catch (GoogleJsonResponseException e) { if (errorExtractor.itemNotFound(e)) { LOG.info("No BigQuery job with job id {} found.", jobId); return null; } LOG.info( "Ignoring the error encountered while trying to query the BigQuery job {}", jobId, e); lastException = e; } catch (IOException e) { LOG.info( "Ignoring the error encountered while trying to query the BigQuery job {}", jobId, e); lastException = e; } } while (nextBackOff(sleeper, backoff)); throw new IOException( String.format( "Unable to find BigQuery job: %s, aborting after %d retries.", jobRef, MAX_RPC_RETRIES), lastException); } }
do { try { client.jobs().insert(jobRef.getProjectId(), job).execute(); LOG.info( "Started BigQuery job: {}.\n{}", jobRef, formatBqStatusCommand(jobRef.getProjectId(), jobRef.getJobId())); return; // SUCCEEDED } catch (IOException e) {
client .jobs() .get(jobRef.getProjectId(), jobRef.getJobId()) .setLocation(jobRef.getLocation()) .execute(); jobRef.getJobId(), status, formatBqStatusCommand(jobRef.getProjectId(), jobRef.getJobId())); } catch (IOException e) {
static JobId fromPb(JobReference jobRef) { return newBuilder() .setProject(jobRef.getProjectId()) .setJob(jobRef.getJobId()) .setLocation(jobRef.getLocation()) .build(); } }
@Override public Job create(Job job, Map<Option, ?> options) { try { String projectId = job.getJobReference() != null ? job.getJobReference().getProjectId() : this.options.getProjectId(); return bigquery .jobs() .insert(projectId, job) .setFields(Option.FIELDS.getString(options)) .execute(); } catch (IOException ex) { throw translate(ex); } }
@Override public void startLoadJob(JobReference jobRef, JobConfigurationLoad loadConfig) throws IOException { synchronized (allJobs) { verifyUniqueJobId(jobRef.getJobId()); Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setLoad(loadConfig)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); // Copy the files to a new location for import, as the temporary files will be deleted by // the caller. if (loadConfig.getSourceUris().size() > 0) { ImmutableList.Builder<ResourceId> sourceFiles = ImmutableList.builder(); ImmutableList.Builder<ResourceId> loadFiles = ImmutableList.builder(); for (String filename : loadConfig.getSourceUris()) { sourceFiles.add(FileSystems.matchNewResource(filename, false /* isDirectory */)); loadFiles.add( FileSystems.matchNewResource( filename + ThreadLocalRandom.current().nextInt(), false /* isDirectory */)); } FileSystems.copy(sourceFiles.build(), loadFiles.build()); filesForLoadJobs.put(jobRef.getProjectId(), jobRef.getJobId(), loadFiles.build()); } allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
try { synchronized (allJobs) { JobInfo job = allJobs.get(jobRef.getProjectId(), jobRef.getJobId()); if (job == null) { return null; "Job %s failed: %s", job.job.getConfiguration(), e.toString()))); List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId()); FileSystems.delete(sourceFiles);
@Override public void startQueryJob(JobReference jobRef, JobConfigurationQuery query) { synchronized (allJobs) { Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setQuery(query)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
@Override public void startExtractJob(JobReference jobRef, JobConfigurationExtract extractConfig) throws IOException { checkArgument( "AVRO".equals(extractConfig.getDestinationFormat()), "Only extract to AVRO is supported"); synchronized (allJobs) { verifyUniqueJobId(jobRef.getJobId()); ++numExtractJobCalls; Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setExtract(extractConfig)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
@Override public void startCopyJob(JobReference jobRef, JobConfigurationTableCopy copyConfig) throws IOException { synchronized (allJobs) { verifyUniqueJobId(jobRef.getJobId()); Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setCopy(copyConfig)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
TableSchema schema = load.getSchema(); checkArgument(schema != null, "No schema specified"); List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId()); WriteDisposition writeDisposition = WriteDisposition.valueOf(load.getWriteDisposition()); CreateDisposition createDisposition = CreateDisposition.valueOf(load.getCreateDisposition());