private void createAndSaveRefresh(final JobDetails details, final RefreshDecision decision) { final boolean isFull = decision.getAccelerationSettings().getMethod() == RefreshMethod.FULL; final long updateId = isFull ? -1L : getUpdateId(job.getJobId(), job.getData()); final MaterializationMetrics metrics = ReflectionUtils.computeMetrics(job); final List<DataPartition> dataPartitions = ReflectionUtils.computeDataPartitions(job.getJobAttempt().getInfo()); final List<String> refreshPath = ReflectionUtils.getRefreshPath(job.getJobId(), job.getData(), accelerationBasePath); final Refresh refresh = ReflectionUtils.createRefresh(reflection.getId(), refreshPath, decision.getSeriesId(), decision.getSeriesOrdinal(), updateId, details, metrics, dataPartitions); logger.trace("Refresh created: {}", refresh); materializationStore.save(refresh); logger.debug("materialization {} was written to {}", ReflectionUtils.getId(materialization), PathUtils.constructFullPath(refreshPath)); }
public PartialJobListItem(Job input) { final JobAttempt firstAttempt = input.getAttempts().get(0); final JobAttempt lastAttempt = input.getAttempts().get(input.getAttempts().size() - 1); this.id = input.getJobId().getId(); this.state = lastAttempt.getState(); this.failureInfo = JobDetailsUI.toJobFailureInfo(input.getJobAttempt().getInfo()); this.cancellationInfo = JobDetailsUI.toJobCancellationInfo(input.getJobAttempt()); this.user = firstAttempt.getInfo().getUser(); this.startTime = firstAttempt.getInfo().getStartTime(); this.endTime = lastAttempt.getInfo().getFinishTime(); this.description = firstAttempt.getInfo().getDescription(); this.accelerated = lastAttempt.getInfo().getAcceleration() != null; this.requestType = firstAttempt.getInfo().getRequestType(); this.datasetVersion = firstAttempt.getInfo().getDatasetVersion(); this.isComplete = isComplete(state); AccelerationDetails accelerationDetails = deserialize(lastAttempt.getAccelerationDetails()); this.snowflakeAccelerated = this.accelerated && JobDetailsUI.wasSnowflakeAccelerated(accelerationDetails); this.spilled = lastAttempt.getInfo().getSpillJobDetails() != null; }
@Override public Job apply(Entry<JobId, JobResult> input) { return new Job(input.getKey(), input.getValue(), jobResultsStore); } });
public JobUI(Job job) { this.jobId = job.getJobId(); this.attempts = FluentIterable.from(job.getAttempts()) .transform(new Function<JobAttempt, JobAttemptUI>() { @Override public JobAttemptUI apply(JobAttempt input) { return toUI(input); } }).toList(); this.data = new JobDataWrapper(job.getData()); }
private void setupJobData() { final JobLoader jobLoader = isInternal ? new InternalJobLoader(exception, completionLatch, job.getJobId(), jobResultsStore, store) : new ExternalJobLoader(completionLatch, exception); final JobData result = jobResultsStore.cacheNewJob(job.getJobId(), new JobDataImpl(jobLoader, job.getJobId())); job.setData(result); }
public static String getDownloadURL(Job job) { final JobInfo jobInfo = job.getJobAttempt().getInfo(); if (jobInfo.getQueryType() == QueryType.UI_EXPORT) { return format("/job/%s/download", job.getJobId().getId()); } return null; }
public static MaterializationMetrics computeMetrics(Job job) { final int fetchSize = 1000; final JobData completeJobData = job.getData(); .setOriginalCost(job.getJobAttempt().getInfo().getOriginalCost()) .setMedianFileSize(medianFileSize) .setNumFiles(numFiles);
public JobData(Job job, int offset, int limit) { this.delegate = job.getData().range(offset, limit); this.job = job; }
@GET @Path("/{id}/results") public JobData getQueryResults(@PathParam("id") String id, @QueryParam("offset") @DefaultValue("0") Integer offset, @Valid @QueryParam("limit") @DefaultValue("100") Integer limit) { Preconditions.checkArgument(limit <= 500,"limit can not exceed 500 rows"); try { Job job = jobs.getJob(new JobId(id), securityContext.getUserPrincipal().getName()); if (job.getJobAttempt().getState() != JobState.COMPLETED) { throw new BadRequestException(String.format("Can not fetch details for a job that is in [%s] state.", job.getJobAttempt().getState())); } return new QueryJobResults(job, offset, limit).getData(); } catch (JobNotFoundException e) { throw new NotFoundException(String.format("Could not find a job with id [%s]", id)); } }
ExternalJobResultListener(AttemptId attemptId, UserResponseHandler connection, Job job, BufferAllocator allocator) { super(attemptId, job, allocator, NoOpJobStatusListener.INSTANCE); this.connection = connection; this.externalId = JobsServiceUtil.getJobIdAsExternalId(job.getJobId()); }
private static QueryProfile getQueryProfile(final String query) throws JobNotFoundException { final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .setDatasetVersion(DatasetVersion.NONE) .build(), new NoOpJobStatusListener()); job.getData().loadIfNecessary(); return getJobsService().getProfile(job.getJobId(), 0); }
public static JobDetailsUI of(Job job) { JobInfo jobInfo = job.getJobAttempt().getInfo(); List<JobAttempt> attempts = job.getAttempts(); AccelerationDetails accelerationDetails = deserialize(Util.last(attempts).getAccelerationDetails()); return new JobDetailsUI( job.getJobId(), job.getJobAttempt().getDetails(), JobResource.getPaginationURL(job.getJobId()), attempts, JobResource.getDownloadURL(job), toJobFailureInfo(jobInfo), toJobCancellationInfo(Util.last(attempts)), job.getJobAttempt().getInfo().getDatasetVersion(), job.hasResults(), accelerationDetails, jobInfo.getSpillJobDetails()); }
job.getJobAttempt() .setAttemptId(AttemptIdUtils.toString(attemptId)) .setState(JobState.FAILED) attemptId = attemptId.nextAttempt(); final JobAttempt jobAttempt = new JobAttempt() .setInfo(newJobInfo(job.getJobAttempt().getInfo(), 100L+2*i, 100L+2*i+1, "failed")) .setAttemptId(AttemptIdUtils.toString(attemptId)) .setState(JobState.FAILED) .setReason(i == 0 ? AttemptReason.OUT_OF_MEMORY : AttemptReason.SCHEMA_CHANGE) .setDetails(new JobDetails()); job.addAttempt(jobAttempt); .setInfo(newJobInfo(job.getJobAttempt().getInfo(), 106, 107, null)) .setAttemptId(AttemptIdUtils.toString(attemptId)) .setState(JobState.COMPLETED) .setReason(AttemptReason.SCHEMA_CHANGE) .setDetails(new JobDetails()); job.addAttempt(jobAttempt); JobDetailsUI detailsUI = new JobDetailsUI(job.getJobId(), new JobDetails(), JobResource.getPaginationURL(job.getJobId()), job.getAttempts(), JobResource.getDownloadURL(job), null, null, null, false, null, null); final AttemptDetailsUI attemptDetails = detailsUI.getAttemptDetails().get(i); final String reason = i == 0 ? "" : (i == 1 ? AttemptsUIHelper.OUT_OF_MEMORY_TEXT : AttemptsUIHelper.SCHEMA_CHANGE_TEXT); checkAttemptDetail(attemptDetails, job.getJobId(), i, i == 4 ? JobState.COMPLETED : JobState.FAILED, reason);
jobsService.getJobsForDataset(datasetPath.toNamespaceKey(), version, query.getUsername(), MAX_JOBS_TO_SEARCH); for (Job job : jobsForDataset) { if (job.getJobAttempt().getInfo().getQueryType() == queryType && query.getSql().equals(job.getJobAttempt().getInfo().getSql()) && job.getJobAttempt().getState() == JobState.COMPLETED && job.hasResults()) { try { statusListener.jobCompleted(); return new JobUI(jobsService.getJob(job.getJobId())); } catch (RuntimeException | JobNotFoundException e) { logger.debug("job {} not found for dataset {}", job.getJobId().getId(), messagePath, e);
@Override public AttemptObserver newAttempt(AttemptId attemptId, AttemptReason reason) { // first attempt is already part of the job if (attemptId.getAttemptNum() > 0) { // create a new JobAttempt for the new attempt final JobInfo jobInfo = ProtostuffUtil.copy(job.getJobAttempt().getInfo()) .setStartTime(System.currentTimeMillis()) // use different startTime for every attempt .setFailureInfo(null) .setDetailedFailureInfo(null) .setResultMetadataList(new ArrayList<ArrowFileMetadata>()); final JobAttempt jobAttempt = new JobAttempt() .setInfo(jobInfo) .setReason(reason) .setEndpoint(identity) .setDetails(new JobDetails()) .setState(ENQUEUED); job.addAttempt(jobAttempt); } job.getJobAttempt().setAttemptId(AttemptIdUtils.toString(attemptId)); if (isInternal) { attemptObserver = new JobResultListener(attemptId, job, allocator, statusListener, listeners); } else { attemptObserver = new ExternalJobResultListener(attemptId, responseHandler, job, allocator); } return attemptObserver; }
private JobResult toJobResult(Job job) { return new JobResult().setAttemptsList(job.getAttempts()); }
@Test public void testDownloadCsv() throws Exception { Job job = datasetService.prepareDownload(dsg1DatasetPath, dsg1.getVersion(), DownloadFormat.CSV, -1, SampleDataPopulator.DEFAULT_USER_NAME); job.getData().loadIfNecessary(); DownloadDataResponse downloadDataResponse = datasetService.downloadData(job.getJobAttempt().getInfo().getDownloadInfo(), SampleDataPopulator.DEFAULT_USER_NAME); validateAllRows(readDataCsv(downloadDataResponse.getInput())); }
@Override public JobUI answer(InvocationOnMock invocation) throws Throwable { String query = invocation.getArgumentAt(0, SqlQuery.class).getSql(); Job job = mock(Job.class); JobData jobData = mock(JobData.class); when(job.getData()).thenReturn(jobData); if ("SELECT * FROM dataset".equals(query)) { when(jobData.getJobResultsTable()).thenReturn("jobResults.previewJob"); } else if (query.contains("jobResults.previewJob")) { if (expFilter != null) { assertTrue(query, query.contains(expFilter)); } else { assertFalse(query, query.contains("WHERE")); } JobDataFragment fragment = mock(JobDataFragment.class); when(jobData.truncate(1)).thenReturn(fragment); when(fragment.getSchema()).thenReturn( BatchSchema.newBuilder() .addField(new Field("dremio_selection_count", true, new ArrowType.Int(64, true), null)) .build() ); when(fragment.extractValue("dremio_selection_count", 0)).thenReturn(expCount); } return new JobUI(job); } }
public long getRowCount() { return job.getJobAttempt().getDetails().getOutputRecords(); }