@Override public void initialize() throws Exception { MapReduceContext context = getContext(); metrics.count("beforeSubmit", 1); Job hadoopJob = context.getHadoopJob(); AggregateMetricsByTag.configureJob(hadoopJob); String metricName = context.getRuntimeArguments().get("metric"); Long startTs = Long.valueOf(context.getRuntimeArguments().get("startTs")); Long stopTs = Long.valueOf(context.getRuntimeArguments().get("stopTs")); String tag = context.getRuntimeArguments().get("tag"); context.addInput(Input.ofDataset("timeSeries", table.getInputSplits(2, Bytes.toBytes(metricName), startTs, stopTs, Bytes.toBytes(tag)))); beforeSubmitTable.write(Bytes.toBytes("beforeSubmit"), Bytes.toBytes("beforeSubmit:done")); String frequentFlushing = context.getRuntimeArguments().get("frequentFlushing"); if (frequentFlushing != null) { hadoopJob.getConfiguration().setInt("c.mapper.flush.freq", 1); hadoopJob.getConfiguration().setInt("c.reducer.flush.freq", 1); } metrics.count("beforeSubmit", 1); context.addOutput(Output.ofDataset("timeSeries")); }