/** * Main entry point for jar file. * * @param args The command line parameters. * @throws Throwable When the selection fails. */ public static void main(String[] args) throws Throwable { ProgramDriver pgd = new ProgramDriver(); pgd.addClass(ImportFromFile.NAME, ImportFromFile.class, "Import from file"); pgd.addClass(ImportFromFile2.NAME, ImportFromFile2.class, "Import from file (with dependencies)"); pgd.addClass(AnalyzeData.NAME, AnalyzeData.class, "Analyze imported JSON"); pgd.addClass(AnalyzeSnapshotData.NAME, AnalyzeSnapshotData.class, "Analyze imported JSON from snapshot"); pgd.addClass(ParseJson.NAME, ParseJson.class, "Parse JSON into columns"); pgd.addClass(ParseJson2.NAME, ParseJson2.class, "Parse JSON into columns (map only)"); pgd.addClass(ParseJsonMulti.NAME, ParseJsonMulti.class, "Parse JSON into multiple tables"); pgd.driver(args); } }
/** * API compatible with Hadoop 1.x */ public void driver(String[] argv) throws Throwable { if (run(argv) == -1) { System.exit(-1); } }
System.out.println("An example program must be given as the" + " first argument."); printUsage(programs); return -1; if (pgm == null) { System.out.println("Unknown program '" + args[0] + "' chosen."); printUsage(programs); return -1;
public static void main(String[] args) throws Throwable { ProgramDriver pgd = new ProgramDriver(); pgd.addClass(RowCounter.NAME, RowCounter.class, "Count rows in HBase table."); pgd.addClass(CellCounter.NAME, CellCounter.class, "Count cells in HBase table."); pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS."); pgd.addClass(Import.NAME, Import.class, "Import data written by Export."); pgd.addClass(ImportTsv.NAME, ImportTsv.class, "Import data in TSV format."); pgd.addClass(BulkLoadHFilesTool.NAME, BulkLoadHFilesTool.class, "Complete a bulk data load."); pgd.addClass(CopyTable.NAME, CopyTable.class, "Export a table from local cluster to peer cluster."); pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, "Compare" + " data from tables in two different clusters. It" + " doesn't work for incrementColumnValues'd cells since" + " timestamp is changed after appending to WAL."); pgd.addClass(WALPlayer.NAME, WALPlayer.class, "Replay WAL files."); pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, "Export" + " the specific snapshot to a given FileSystem."); ProgramDriver.class.getMethod("driver", new Class [] {String[].class}). invoke(pgd, new Object[]{args}); } }
public static void main(String argv[]){ int exitCode = -1; ProgramDriver pgd = new ProgramDriver(); try { pgd.addClass("wordcount", WordCount.class, "A native Tez wordcount program that counts the words in the input files."); pgd.addClass("orderedwordcount", OrderedWordCount.class, "Word Count with words sorted on frequency"); pgd.addClass("simplesessionexample", SimpleSessionExample.class, "Example to run multiple dags in a session"); pgd.addClass("hashjoin", HashJoinExample.class, "Identify all occurences of lines in file1 which also occur in file2 using hash join"); pgd.addClass("sortmergejoin", SortMergeJoinExample.class, "Identify all occurences of lines in file1 which also occur in file2 using sort merge join"); pgd.addClass("joindatagen", JoinDataGen.class, "Generate data to run the joinexample"); pgd.addClass("joinvalidate", JoinValidate.class, "Validate data generated by joinexample and joindatagen"); pgd.addClass("cartesianproduct", CartesianProduct.class, "Cartesian product of two datasets"); exitCode = pgd.run(argv); } catch(Throwable e){ e.printStackTrace(); } System.exit(exitCode); }
public static void main(String[] args) { ProgramDriver programDriver = new ProgramDriver(); int exitCode = -1; try { programDriver.addClass("wordcount-hbase", WordCountHBase.class, "A map/reduce program that counts the words in the input files."); programDriver.addClass("export-table", Export.class, "A map/reduce program that exports a table to a file."); //programDriver.addClass("cellcounter", CellCounter.class, "Count them cells!"); programDriver.driver(args); exitCode = programDriver.run(args); } catch (Throwable e) { e.printStackTrace(); } System.exit(exitCode); } }
/** * @param args * @throws Throwable */ public static void main(String[] args) throws Throwable { pgd.addClass(RowCounter.NAME, RowCounter.class, "Count rows in HBase table"); ProgramDriver.class.getMethod("driver", new Class[] { String[].class }) .invoke(pgd, new Object[] { args }); } }
public static void main(String[] args) throws Throwable { try { ProgramDriver programDriver = new ProgramDriver(); Properties mainClasses = new Properties(); InputStream propsStream = Thread.currentThread() programDriver.driver(args); programDriver.driver(argsList.toArray(new String[argsList.size()])); } catch (Throwable e) { log.error("TamingTextDriver failed with args: " + Arrays.toString(args) + '\n' + e.getMessage());
public MapredTestDriver() { this(new ProgramDriver()); }
@Test public void testDriverMainMethod() throws Throwable { ProgramDriver programDriverMock = mock(ProgramDriver.class); Driver.setProgramDriver(programDriverMock); Driver.main(new String[]{}); verify(programDriverMock).driver(Mockito.any()); } }
public static void main(String argv[]){ int exitCode = -1; ProgramDriver pgd = new ProgramDriver(); try { pgd.addClass("wordcount", WordCount.class, "A map/reduce program that counts the words in the input files."); pgd.addClass("wordmean", WordMean.class, "A map/reduce program that counts the average length of the words in the input files."); pgd.addClass("wordmedian", WordMedian.class, "A map/reduce program that counts the median length of the words in the input files."); pgd.addClass("wordstandarddeviation", WordStandardDeviation.class, "A map/reduce program that counts the standard deviation of the length of the words in the input files."); pgd.addClass("aggregatewordcount", AggregateWordCount.class, "An Aggregate based map/reduce program that counts the words in the input files."); pgd.addClass("aggregatewordhist", AggregateWordHistogram.class, "An Aggregate based map/reduce program that computes the histogram of the words in the input files."); pgd.addClass("grep", Grep.class, "A map/reduce program that counts the matches of a regex in the input."); pgd.addClass("randomwriter", RandomWriter.class, "A map/reduce program that writes 10GB of random data per node."); pgd.addClass("randomtextwriter", RandomTextWriter.class, "A map/reduce program that writes 10GB of random textual data per node."); pgd.addClass("sort", Sort.class, "A map/reduce program that sorts the data written by the random writer."); pgd.addClass("pi", QuasiMonteCarlo.class, QuasiMonteCarlo.DESCRIPTION); pgd.addClass("bbp", BaileyBorweinPlouffe.class, BaileyBorweinPlouffe.DESCRIPTION); pgd.addClass("distbbp", DistBbp.class, DistBbp.DESCRIPTION); pgd.addClass("pentomino", DistributedPentomino.class, "A map/reduce tile laying program to find solutions to pentomino problems.");
/** * @param args * @throws Throwable */ public static void main(String[] args) throws Throwable { ProgramDriver pgd = new ProgramDriver(); pgd.addClass(RowCounter.NAME, RowCounter.class, "Count rows in HBase table"); ProgramDriver.class.getMethod("driver", new Class [] {String[].class}). invoke(pgd, new Object[]{args}); } }
/** * <p>main.</p> * * @param args an array of {@link java.lang.String} objects. */ public static void main(String[] args) { ProgramDriver programDriver = new ProgramDriver(); int exitCode = -1; try { programDriver.addClass("export-table", Export.class, "A map/reduce program that exports a table to a file."); programDriver.addClass("import-table", Import.class, "A map/reduce program that imports a table to a file."); programDriver.driver(args); exitCode = programDriver.run(args); } catch (Throwable e) { e.printStackTrace(); } System.exit(exitCode); } }
public CoreTestDriver(ProgramDriver pgd) { this.pgd = pgd; try { pgd.addClass("testsetfile", TestSetFile.class, "A test for flat files of binary key/value pairs."); pgd.addClass("testarrayfile", TestArrayFile.class, "A test for flat files of binary key/value pairs."); pgd.addClass("testrpc", TestRPC.class, "A test for rpc."); pgd.addClass("testipc", TestIPC.class, "A test for ipc."); } catch(Throwable e) { e.printStackTrace(); } }
ProgramDriver programDriver = new ProgramDriver(); for (Object key : mainClasses.keySet()) { String keyString = (String) key; programDriver.driver(args); return; programDriver.driver(argsList.toArray(new String[argsList.size()]));
public CoreTestDriver() { this(new ProgramDriver()); }
public void run(String argv[]) { try { pgd.driver(argv); } catch(Throwable e) { e.printStackTrace(); } }
public static void main(String[] args) throws Throwable { ProgramDriver pgd = new ProgramDriver(); pgd.addClass(RollbackUpdatesMrJob.NAME, RollbackUpdatesMrJob.class, "Rolls back updates"); // TODO: add UpdatesProcessingMrJob to the list pgd.driver(args); } }
public static void main(String args[]) { int exitCode = -1; ProgramDriver pgd = new ProgramDriver(); try { pgd.addClass( "MNISTConverter", MNISTConverter.class, "A utility program that converts MNIST training and label datasets " + "into HDFS sequence file."); pgd.addClass("MNISTEvaluator", MNISTEvaluator.class, "A utility program that evaluates trained model for the MNIST dataset"); pgd.addClass( "MultiLayerPerceptron", MultiLayerPerceptron.class, "An example program that trains a multilayer perceptron model from HDFS sequence file."); pgd.addClass("ExclusiveOrConverter", ExclusiveOrConverter.class, "A utility program that converts ExclusiveOR training and label datasets "); pgd.addClass( "ExclusiveOrRecurrentMultiLayerPerceptron", ExclusiveOrRecurrentMultiLayerPerceptron.class, "An example program that trains a recurrent multilayer perceptron model with exclusive or" + " from HDFS sequence file."); pgd.addClass( "MnistRecurrentMultiLayerPerceptron", MnistRecurrentMultiLayerPerceptron.class, "An example program that trains a recurrent multilayer perceptron model with MNIST" + " from HDFS sequence file."); exitCode = pgd.run(args);
/** * @param args * @throws Throwable */ public static void main(String[] args) throws Throwable { ProgramDriver pgd = new ProgramDriver(); pgd.addClass(RowCounter.NAME, RowCounter.class, "Count rows in HBase table"); pgd.addClass(CellCounter.NAME, CellCounter.class, "Count cells in HBase table"); pgd.addClass(Export.NAME, Export.class, "Write table data to HDFS."); pgd.addClass(Import.NAME, Import.class, "Import data written by Export."); pgd.addClass(ImportTsv.NAME, ImportTsv.class, "Import data in TSV format."); pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class, "Complete a bulk data load."); pgd.addClass(CopyTable.NAME, CopyTable.class, "Export a table from local cluster to peer cluster"); pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, "Compare" + " the data from tables in two different clusters. WARNING: It" + " doesn't work for incrementColumnValues'd cells since the" + " timestamp is changed after being appended to the log."); ProgramDriver.class.getMethod("driver", new Class [] {String[].class}). invoke(pgd, new Object[]{args}); } }