@Override public Pair<Integer, String> call() throws Exception { Pair<Integer, String> result; try { result = exec.execute(cmd, patternedLogger); } catch (Exception e) { logger.error("error run spark job:", e); result = new Pair<>(-1, e.getMessage()); } return result; } };
CliCommandExecutor exec = new CliCommandExecutor(); PatternedLogger patternedLogger = new PatternedLogger(logger); exec.execute(cmd, patternedLogger); } catch (IOException e) { throw new InternalErrorException("Failed to perform one-click migrating", e);
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
public void copyFile(String localFile, String destDir) throws IOException { if (remoteHost == null) copyNative(localFile, destDir); else copyRemote(localFile, destDir); }
logger.info("cmd: " + cmd); final ExecutorService executorService = Executors.newSingleThreadExecutor(); final CliCommandExecutor exec = new CliCommandExecutor(); final PatternedLogger patternedLogger = new PatternedLogger(logger, new PatternedLogger.ILogListener() { @Override
logger.info("cmd: " + cmd); final ExecutorService executorService = Executors.newSingleThreadExecutor(); final CliCommandExecutor exec = new CliCommandExecutor(); final PatternedLogger patternedLogger = new PatternedLogger(logger, new PatternedLogger.ILogListener() { @Override
public void copyFile(String localFile, String destDir) throws IOException { if (remoteHost == null) copyNative(localFile, destDir); else copyRemote(localFile, destDir); }
public Pair<Integer, String> execute(String command) throws IOException { return execute(command, new SoutLogger()); }
@Test public void testBeeline() throws IOException { String lineSeparator = java.security.AccessController .doPrivileged(new sun.security.action.GetPropertyAction("line.separator")); System.setProperty("kylin.source.hive.client", "beeline"); System.setProperty("kylin.source.hive.beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim(); hqlFile = hqlFile.substring(0, hqlFile.length() - ";exit $ret_code".length()); String createFileCmd = cmd.substring(0, cmd.indexOf("EOL\n", cmd.indexOf("EOL\n") + 1) + 3); CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); Pair<Integer, String> execute = cliCommandExecutor.execute(createFileCmd); String hqlStatement = FileUtils.readFileToString(new File(hqlFile), Charset.defaultCharset()); assertEquals( "USE default;" + lineSeparator + "DROP TABLE `test`;" + lineSeparator + "SHOW TABLES;" + lineSeparator, hqlStatement); assertBeelineCmd(cmd); FileUtils.forceDelete(new File(hqlFile)); }
protected ShellCmd(String executeCmd, ICommandOutput out, String host, String user, String password, boolean async) { this.executeCommand = executeCmd; this.output = out; cliCommandExecutor = new CliCommandExecutor(); cliCommandExecutor.setRunAtRemote(host, user, password); this.isAsync = async; }
public void copyFile(String localFile, String destDir) throws IOException { if (remoteHost == null) copyNative(localFile, destDir); else copyRemote(localFile, destDir); }
private void addShellOutput(String cmd, String destDir, String filename) { try { File destDirFile = null; if (!StringUtils.isEmpty(destDir)) { destDirFile = new File(exportDir, destDir); FileUtils.forceMkdir(destDirFile); } else { destDirFile = exportDir; } Pair<Integer, String> result = cmdExecutor.execute(cmd); String output = result.getSecond(); FileUtils.writeStringToFile(new File(destDirFile, filename), output, Charset.defaultCharset()); } catch (Exception e) { logger.warn("Failed to run command: " + cmd + ".", e); } } }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
public void copyFile(String localFile, String destDir) throws IOException { if (remoteHost == null) copyNative(localFile, destDir); else copyRemote(localFile, destDir); }
private void killApp(String appId) throws IOException, InterruptedException { CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); String killCmd = String.format(Locale.ROOT, "yarn application -kill %s", appId); executor.execute(killCmd); }
protected ShellCmd(String executeCmd, ICommandOutput out, String host, String user, String password, boolean async) { this.executeCommand = executeCmd; this.output = out; cliCommandExecutor = new CliCommandExecutor(); cliCommandExecutor.setRunAtRemote(host, user, password); this.isAsync = async; }
private String getDefaultMapRedClasspath() { String classpath = ""; try { CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); String output = executor.execute("mapred classpath").getSecond(); classpath = output.trim().replace(':', ','); } catch (IOException e) { logger.error("Failed to run: 'mapred classpath'.", e); } return classpath; }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
private boolean isYarnAppSucc(String applicationId) throws IOException { final String yarnCmd = "yarn application -status " + applicationId; final String cmdOutput = kylinConfig.getCliCommandExecutor().execute(yarnCmd).getSecond(); final Map<String, String> params = Maps.newHashMap(); final String[] cmdOutputLines = StringUtil.split(cmdOutput, "\n"); for (String cmdOutputLine : cmdOutputLines) { String[] pair = StringUtil.split(cmdOutputLine, ":"); if (pair.length >= 2) { params.put(pair[0].trim(), pair[1].trim()); } } for (Map.Entry<String, String> e : params.entrySet()) { logger.info(e.getKey() + ":" + e.getValue()); } if (params.containsKey("State") && params.get("State").equals("RUNNING")) { return true; } return params.containsKey("Final-State") && params.get("Final-State").equals("SUCCEEDED"); }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }