private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }
@Override public ArrayList<SparkStageInfo> call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { ArrayList<JavaFutureAction<?>> list = new ArrayList<>(jc.getMonitoredJobs().get(clientJobId));
SparkJobInfo sparkJobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (sparkJobInfo != null && sparkJobInfo.stageIds() != null && sparkJobInfo.stageIds().length > 0) {
private static SparkJobInfo getJobInfo(int jobID, JavaSparkContext sparkContext) { SparkJobInfo jobInfo = sparkContext.statusTracker().getJobInfo(jobID); if (jobInfo == null) { throw new RuntimeException("No jobInfo available for jobID " + jobID); } return jobInfo; }
private SparkJobInfo getJobInfo() { return sparkContext.statusTracker().getJobInfo(jobId); }
@Override public SparkJobInfo call(JobContext jc) throws Exception { SparkJobInfo jobInfo = jc.sc().statusTracker().getJobInfo(sparkJobId); if (jobInfo == null) { List<JavaFutureAction<?>> list = jc.getMonitoredJobs().get(clientJobId); if (list != null && list.size() == 1) { JavaFutureAction<?> futureAction = list.get(0); if (futureAction.isDone()) { boolean futureSucceed = true; try { futureAction.get(); } catch (Exception e) { LOG.error("Failed to run job " + sparkJobId, e); futureSucceed = false; } jobInfo = getDefaultJobInfo(sparkJobId, futureSucceed ? JobExecutionStatus.SUCCEEDED : JobExecutionStatus.FAILED); } } } if (jobInfo == null) { jobInfo = getDefaultJobInfo(sparkJobId, JobExecutionStatus.UNKNOWN); } return jobInfo; } }