public void addJobInfo(String id, String key, String value) { Map<String, String> info = Maps.newHashMap(); info.put(key, value); addJobInfo(id, info); }
@Override public void onLogEvent(String infoKey, Map<String, String> info) { // only care three properties here if (ExecutableConstants.SPARK_JOB_ID.equals(infoKey) || ExecutableConstants.YARN_APP_ID.equals(infoKey) || ExecutableConstants.YARN_APP_URL.equals(infoKey)) { getManager().addJobInfo(getId(), info); } } });
public final void addExtraInfo(String key, String value) { getManager().addJobInfo(getId(), key, value); }
protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { if (!isDiscarded()) { getManager().addJobInfo(getId(), END_TIME, Long.toString(System.currentTimeMillis())); String output = null; if (exception != null) { final StringWriter out = new StringWriter(); exception.printStackTrace(new PrintWriter(out)); output = out.toString(); } getManager().updateJobOutput(getId(), ExecutableState.ERROR, null, output); } }
private ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException { Map<String, String> info = new HashMap<>(); try { logger.info("spark_job_id:" + appId + " resumed"); info.put(ExecutableConstants.SPARK_JOB_ID, appId); while (!isPaused() && !isDiscarded()) { String status = getAppState(appId); if (status.equals("FAILED") || status.equals("KILLED")) { mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed"); return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed"); } if (status.equals("SUCCEEDED")) { mgr.addJobInfo(getId(), info); return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished"); } Thread.sleep(5000); } killAppRetry(appId); if (isDiscarded()) { return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded"); } else { return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped"); } } catch (Exception e) { logger.error("error run spark job:", e); return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } }
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException { String cmd = getParam("cmd"); cmd = String.format(Locale.ROOT, "%s/bin/sqoop import -Dorg.apache.sqoop.splitter.allow_text_splitter=true " + generateSqoopConfigArgString() + cmd, config.getSqoopHome()); stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd)); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException { String cmd = getParam("cmd"); stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd)); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement(getCreateTableStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Create and distribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); Map<String, String> info = stepLogger.getInfo(); //get the flat Hive table size Matcher matcher = HDFS_LOCATION.matcher(cmd); if (matcher.find()) { String hiveFlatTableHdfsUrl = matcher.group(1); long size = getFileSize(hiveFlatTableHdfsUrl); info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, "" + size); logger.info("HDFS_Bytes_Writen: " + size); } getManager().addJobInfo(getId(), info); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
getManager().addJobInfo(getId(), ExecutableConstants.HDFS_BYTES_WRITTEN, "" + dataSize); return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());
getManager().addJobInfo(getId(), joblogInfo); return new ExecuteResult(ExecuteResult.State.SUCCEED, patternedLogger.getBufferedLog()); getManager().addJobInfo(getId(), extra); return new ExecuteResult(ExecuteResult.State.ERROR, result != null ? result.getSecond() : ""); } catch (Exception e) {
setMapReduceWaitTime(waitTime); mgr.addJobInfo(getId(), hadoopCmdOutput.getInfo()); status = newStatus; if (status.isComplete()) { final Map<String, String> info = hadoopCmdOutput.getInfo(); readCounters(hadoopCmdOutput, info); mgr.addJobInfo(getId(), info);
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { logger.info("executing:" + getCmd()); final PatternedLogger patternedLogger = new PatternedLogger(logger); final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger); getManager().addJobInfo(getId(), patternedLogger.getInfo()); return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond()) : ExecuteResult.createFailed(new ShellException(result.getSecond())); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); return ExecuteResult.createError(e); } }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getCmd()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
private void redistributeTable(KylinConfig config, int numReducers) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n"); hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); hiveCmdBuilder.addStatement(getRedistributeDataStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Redistribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to redistribute flat hive table"); } }
public void addJobInfo(String id, String key, String value) { Map<String, String> info = Maps.newHashMap(); info.put(key, value); addJobInfo(id, info); }
@Override public void onLogEvent(String infoKey, Map<String, String> info) { // only care three properties here if (ExecutableConstants.SPARK_JOB_ID.equals(infoKey) || ExecutableConstants.YARN_APP_ID.equals(infoKey) || ExecutableConstants.YARN_APP_URL.equals(infoKey)) { getManager().addJobInfo(getId(), info); } } });
public final void addExtraInfo(String key, String value) { getManager().addJobInfo(getId(), key, value); }
protected void onExecuteError(Throwable exception, ExecutableContext executableContext) { if (!isDiscarded()) { getManager().addJobInfo(getId(), END_TIME, Long.toString(System.currentTimeMillis())); String output = null; if (exception != null) { final StringWriter out = new StringWriter(); exception.printStackTrace(new PrintWriter(out)); output = out.toString(); } getManager().updateJobOutput(getId(), ExecutableState.ERROR, null, output); } }
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException { String cmd = getParam("cmd"); stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd)); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { logger.info("executing:" + getCmd()); final PatternedLogger patternedLogger = new PatternedLogger(logger); final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger); getManager().addJobInfo(getId(), patternedLogger.getInfo()); return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond()) : ExecuteResult.createFailed(new ShellException(result.getSecond())); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); return ExecuteResult.createError(e); } }