@Override public void logPlanProgress(SessionState ss) throws IOException { ss.getHiveHistory().logPlanProgress(queryPlan); }
@Override public void logPlanProgress(SessionState ss) throws IOException { ss.getHiveHistory().logPlanProgress(queryPlan); }
private void releasePlan(QueryPlan plan) { // Plan maybe null if Driver.close is called in another thread for the same Driver object lDrvState.stateLock.lock(); try { if (plan != null) { plan.setDone(); if (SessionState.get() != null) { try { SessionState.get().getHiveHistory().logPlanProgress(plan); } catch (Exception e) { // Log and ignore LOG.warn("Could not log query plan progress", e); } } } } finally { lDrvState.stateLock.unlock(); } }
private void releasePlan(QueryPlan plan) { // Plan maybe null if Driver.close is called in another thread for the same Driver object lDrvState.stateLock.lock(); try { if (plan != null) { plan.setDone(); if (SessionState.get() != null) { try { SessionState.get().getHiveHistory().logPlanProgress(plan); } catch (Exception e) { // Log and ignore LOG.warn("Could not log query plan progress", e); } } } } finally { lDrvState.stateLock.unlock(); } }
/** * This method is called in the Driver on every task. It updates counters and calls execute(), * which is overridden in each task * * @return return value of execute() */ public int executeTask() { try { SessionState ss = SessionState.get(); this.setStarted(); if (ss != null) { ss.getHiveHistory().logPlanProgress(queryPlan); } int retval = execute(driverContext); this.setDone(); if (ss != null) { ss.getHiveHistory().logPlanProgress(queryPlan); } return retval; } catch (IOException e) { throw new RuntimeException("Unexpected error: " + e.getMessage(), e); } }
private void addToHistory(Keys key, String value) { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setQueryProperty(queryState.getQueryId(), key, value); } }
/** * Launches a task, and sets its exit value in the result variable. */ public void runSequential() { int exitVal = -101; try { exitVal = tsk.executeTask(ss == null ? null : ss.getHiveHistory()); } catch (Throwable t) { if (tsk.getException() == null) { tsk.setException(t); } LOG.error("Error in executeTask", t); } result.setExitVal(exitVal); if (tsk.getException() != null) { result.setTaskError(tsk.getException()); } }
private void addToHistory(SparkJobRef jobRef) { console.printInfo("Starting Spark Job = " + jobRef.getJobId()); if (SessionState.get() != null) { SessionState.get().getHiveHistory() .setQueryProperty(queryState.getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId()); } }
HiveHistory hiveHist = sessionState.getHiveHistory(); if (null != hiveHist) { hiveHist.closeStream();
String jobname, int jobs, DriverContext cxt) throws HiveException { if (SessionState.get() != null) { SessionState.get().getHiveHistory().startTask(queryId, tsk, tsk.getClass().getName());
numMap = mappers.length; if (ss != null) { ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_MAPPERS, Integer.toString(numMap)); numReduce = reducers.length; if (ss != null) { ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_REDUCERS, Integer.toString(numReduce)); SessionState ss = SessionState.get(); if (ss != null) { ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs); ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_PROGRESS, output); if (ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) { ss.getHiveHistory().progressTask(queryId, this.task); this.callBackObj.logPlanProgress(ss); SessionState ss = SessionState.get(); if (ss != null) { ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs);
numMap = mappers.length; if (ss != null) { ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_MAPPERS, Integer.toString(numMap)); numReduce = reducers.length; if (ss != null) { ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_NUM_REDUCERS, Integer.toString(numReduce)); SessionState ss = SessionState.get(); if (ss != null) { ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs); ss.getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_PROGRESS, output); if (ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS)) { ss.getHiveHistory().progressTask(queryId, this.task); this.callBackObj.logPlanProgress(ss); SessionState ss = SessionState.get(); if (ss != null) { ss.getHiveHistory().setTaskCounters(queryId, getId(), ctrs);
String jobname, int jobs, DriverContext cxt) throws HiveException { if (SessionState.get() != null) { SessionState.get().getHiveHistory().startTask(queryId, tsk, tsk.getClass().getName());
SessionState.get().getHiveHistory().startQuery(queryStr, queryId); SessionState.get().getHiveHistory().logPlanProgress(plan); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_NUM_TASKS, String.valueOf(jobs)); SessionState.get().getHiveHistory().setIdToTableMap(plan.getIdToTableNameMap()); SessionState.get().getHiveHistory().setTaskProperty(queryId, tsk.getId(), Keys.TASK_RET_CODE, String.valueOf(exitVal)); SessionState.get().getHiveHistory().endTask(queryId, tsk); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(0)); SessionState.get().getHiveHistory().printRowCount(queryId); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(12)); SessionState.get().getHiveHistory().endQuery(queryId);
SessionState.get().getHiveHistory().startQuery(queryStr, conf.getVar(HiveConf.ConfVars.HIVEQUERYID)); SessionState.get().getHiveHistory().logPlanProgress(plan); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_NUM_TASKS, String.valueOf(jobs)); SessionState.get().getHiveHistory().setIdToTableMap(plan.getIdToTableNameMap()); SessionState.get().getHiveHistory().setTaskProperty(queryId, tsk.getId(), Keys.TASK_RET_CODE, String.valueOf(exitVal)); SessionState.get().getHiveHistory().endTask(queryId, tsk); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(0)); SessionState.get().getHiveHistory().printRowCount(queryId); SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(12)); SessionState.get().getHiveHistory().endQuery(queryId);
/** * from StreamJob.java. */ public void jobInfo(RunningJob rj) { if (ShimLoader.getHadoopShims().isLocalMode(job)) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, ConfVars.MAPREDBIN) + " job -kill " + rj.getID()); } }
@Override public void run(SessionState session, Set<ReadEntity> reads, Set<WriteEntity> writes, UserGroupInformation ugi) throws Exception { HiveHistory history = session.getHiveHistory(); session.out.println(" PreExecute Query " + session.getCmd()); }
/** * from StreamJob.java. */ public void jobInfo(RunningJob rj) { if (ShimLoader.getHadoopShims().isLocalMode(job)) { console.printInfo("Job running in-process (local Hadoop)"); } else { if (SessionState.get() != null) { SessionState.get().getHiveHistory().setTaskProperty(queryId, getId(), Keys.TASK_HADOOP_ID, rj.getID().toString()); } console.printInfo(getJobStartMsg(rj.getID()) + ", Tracking URL = " + rj.getTrackingURL()); console.printInfo("Kill Command = " + HiveConf.getVar(job, HiveConf.ConfVars.HADOOPBIN) + " job -kill " + rj.getID()); } }
@Override public void logPlanProgress(SessionState ss) throws IOException { ss.getHiveHistory().logPlanProgress(queryPlan); }
private void addToHistory(SparkJobRef jobRef) { console.printInfo("Starting Spark Job = " + jobRef.getJobId()); if (SessionState.get() != null) { SessionState.get().getHiveHistory() .setQueryProperty(SessionState.get().getQueryId(), Keys.SPARK_JOB_ID, jobRef.getJobId()); } }