protected void walToHFiles(List<String> dirPaths, List<String> tableList) throws IOException { Tool player = new WALPlayer(); // Player reads all files in arbitrary directory structure and creates // a Map task for each file. We use ';' as separator // because WAL file names contains ',' String dirs = StringUtils.join(dirPaths, ';'); String jobname = "Incremental_Backup-" + backupId ; Path bulkOutputPath = getBulkOutputDir(); conf.set(WALPlayer.BULK_OUTPUT_CONF_KEY, bulkOutputPath.toString()); conf.set(WALPlayer.INPUT_FILES_SEPARATOR_KEY, ";"); conf.setBoolean(WALPlayer.MULTI_TABLES_SUPPORT, true); conf.set(JOB_NAME_CONF_KEY, jobname); String[] playerArgs = { dirs, StringUtils.join(tableList, ",") }; try { player.setConf(conf); int result = player.run(playerArgs); if(result != 0) { throw new IOException("WAL Player failed"); } conf.unset(WALPlayer.INPUT_FILES_SEPARATOR_KEY); conf.unset(JOB_NAME_CONF_KEY); } catch (IOException e) { throw e; } catch (Exception ee) { throw new IOException("Can not convert from directory " + dirs + " (check Hadoop, HBase and WALPlayer M/R job logs) ", ee); } }
public static int runMRJob(Tool tool, String[] args) throws Exception { Configuration conf = tool.getConf(); if (conf == null) { conf = new Configuration(); } GenericOptionsParser parser = getParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); }
result = player.run(playerArgs); if (succeeded(result)) {
@Override public int run(String[] args) throws Exception { if (args.length == 0) { printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } Tool tool; switch (args[0]) { case VALIDATE_CP_NAME: tool = new CoprocessorValidator(); break; case VALIDATE_DBE_NAME: tool = new DataBlockEncodingValidator(); break; case VALIDATE_HFILE: tool = new HFileContentValidator(); break; case "-h": printUsage(); return AbstractHBaseTool.EXIT_FAILURE; default: System.err.println("Unknown command: " + args[0]); printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } tool.setConf(getConf()); return tool.run(Arrays.copyOfRange(args, 1, args.length)); }
return tool.run(toolArgs);
int result = player.run(playerArgs); if (!succeeded(result)) { throw new IOException("Can not merge backup images for " + dirs
int result = player.run(playerArgs); if (succeeded(result)) {
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.distcp2"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc) ? true : false; }
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.distcp2"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc) ? true : false; }
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.DistCp"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc); }
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.DistCp"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc); }
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.DistCp"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc); }
protected void doExecute(List<Object> objects) throws Exception { org.apache.hadoop.conf.Configuration conf = getConfiguration(); Tool run; try { Constructor cns = tool.getDeclaredConstructor(); cns.setAccessible(true); run = (Tool) cns.newInstance(); run.setConf(conf); } catch (NoSuchMethodException e) { Constructor cns = tool.getDeclaredConstructor(org.apache.hadoop.conf.Configuration.class); cns.setAccessible(true); run = (Tool) cns.newInstance(conf); } String[] args = new String[objects.size()]; for (int i = 0; i < args.length; i++) { args[i] = objects.get(i) != null ? objects.get(i).toString() : null; } run.run(args); }
/** * Program entry. * @param args arguments * @return the exit code * @throws Exception if failed to execute command */ public static int exec(String... args) throws Exception { LOG.info("[OT-CLEAN-I00000] Start Hadoop FS cleaning tool"); long start = System.currentTimeMillis(); Tool tool = new Clean(); tool.setConf(new Configuration()); int exit = tool.run(args); // no generic options long end = System.currentTimeMillis(); LOG.info(MessageFormat.format( "[OT-CLEAN-I00999] Finish Hadoop FS cleaning tool (exit-code={0}, elapsed={1}ms)", exit, end - start)); return exit; }
@Override public int run(String[] args) throws Exception { if ( args.length != 2 ) { System.err.printf("Usage: %s [generic options] <input> <output>\n", getClass().getName()); ToolRunner.printGenericCommandUsage(System.err); return -1; } Configuration configuration = getConf(); boolean overrideOutput = configuration.getBoolean(OPTION_OVERRIDE_OUTPUT, OPTION_OVERRIDE_OUTPUT_DEFAULT); FileSystem fs = FileSystem.get(new Path(args[1]).toUri(), configuration); if ( overrideOutput ) { fs.delete(new Path(args[1]), true); } Tool driver = new Freebase2RDFDriver(configuration); driver.run(new String[] { args[0], args[1] }); return 0; }
public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ if(conf == null) { conf = new Configuration(); } GenericOptionsParser parser = new GenericOptionsParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs);
public static int runMRJob(Tool tool, String[] args) throws Exception { Configuration conf = tool.getConf(); if (conf == null) { conf = new Configuration(); } GenericOptionsParser parser = getParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); }
public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ if(conf == null) { conf = new Configuration(); } GenericOptionsParser parser = getParser(conf, args); tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); } private static synchronized GenericOptionsParser getParser(Configuration conf, String[] args) throws Exception { return new GenericOptionsParser(conf, args); }
/** * Runs the given <code>Tool</code> by {@link Tool#run(String[])}, after * parsing with the given generic arguments. Uses the given * <code>Configuration</code>, or builds one if null. * * Sets the <code>Tool</code>'s configuration with the possibly modified * version of the <code>conf</code>. * * @param conf <code>Configuration</code> for the <code>Tool</code>. * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ if(conf == null) { conf = new Configuration(); } GenericOptionsParser parser = new GenericOptionsParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); }
@Override public int run(String[] args) throws Exception { callback.setConf(getConf()); return callback.run(args); } }