@Override public int run(String[] args) throws Exception { Options opts = new Options(); Integer exitCode = new MyArgumentParser().parseArgs(args, getConf(), opts); if (exitCode != null) { return exitCode; } return run(opts); }
@Override public int run(String[] args) throws Exception { Options opts = new Options(); Integer exitCode = new MyArgumentParser().parseArgs(args, getConf(), opts); if (exitCode != null) { return exitCode; } return run(opts); }
if (getConf().getBoolean("isMR1", false) && "local".equals(getConf().get("mapred.job.tracker"))) { throw new IllegalStateException( "Running with LocalJobRunner (i.e. all of Hadoop inside a single JVM) is not supported " + getConf().set("mapred.fairscheduler.pool", options.fairSchedulerPool); getConf().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); getConf().setBoolean("mapred.used.genericoptionsparser", true); Utils.setLogConfigFile(options.log4jConfigFile, getConf()); addDistributedCacheFile(options.log4jConfigFile, getConf()); if (getConf().get(mapreduceJobUserClasspathFirst) == null) { getConf().setBoolean(mapreduceJobUserClasspathFirst, true); LOG.info("Using " + mapreduceJobUserClasspathFirst + "=" + getConf().get(mapreduceJobUserClasspathFirst)); job = Job.getInstance(getConf()); job.setJarByClass(getClass()); Job randomizerJob = randomizeManyInputFiles(getConf(), fullInputList, outputStep2Dir, numLinesPerRandomizerSplit); if (!waitForCompletion(randomizerJob, options.isVerbose)) { return -1; // job failed int mtreeMergeIteration = 1; while (reducers > options.shards) { // run a mtree merge iteration job = Job.getInstance(getConf()); job.setJarByClass(getClass());
if (getConf().getBoolean("isMR1", false) && "local".equals(getConf().get("mapred.job.tracker"))) { throw new IllegalStateException( "Running with LocalJobRunner (i.e. all of Hadoop inside a single JVM) is not supported " + getConf().set("mapred.fairscheduler.pool", options.fairSchedulerPool); getConf().setInt(SolrOutputFormat.SOLR_RECORD_WRITER_MAX_SEGMENTS, options.maxSegments); getConf().setBoolean("mapred.used.genericoptionsparser", true); Utils.setLogConfigFile(options.log4jConfigFile, getConf()); addDistributedCacheFile(options.log4jConfigFile, getConf()); job = Job.getInstance(getConf()); job.setJarByClass(getClass()); Job randomizerJob = randomizeManyInputFiles(getConf(), fullInputList, outputStep2Dir, numLinesPerRandomizerSplit); if (!waitForCompletion(randomizerJob, options.isVerbose)) { return -1; // job failed int mtreeMergeIteration = 1; while (reducers > options.shards) { // run a mtree merge iteration job = Job.getInstance(getConf()); job.setJarByClass(getClass()); job.setJobName(getClass().getName() + "/" + Utils.getShortClassName(TreeMergeMapper.class));