/** {@inheritDoc} */ @Override protected void cleanup(Context context) throws IOException { Preconditions.checkNotNull(mTableContext); mBulkImporter.cleanup(mTableContext); mTableContext.close(); mTableContext = null; try { super.cleanup(context); } catch (InterruptedException ie) { throw new IOException(ie); } }
/** * Configures the MapReduce mapper for the job. * * @param job The Hadoop MR job. * @throws IOException If there is an error. */ protected void configureMapper(Job job) throws IOException { // Set the map class in the job configuration. final FijiMapper<?, ?, ?, ?> mapper = getMapper(); if (null == mapper) { throw new JobConfigurationException("Must specify a mapper"); } if (mapper instanceof Configurable) { ((Configurable) mapper).setConf(job.getConfiguration()); } job.setMapperClass(((Mapper<?, ?, ?, ?>) mapper).getClass()); // Set the map output key and map output value types in the job configuration. job.setMapOutputKeyClass(mapper.getOutputKeyClass()); job.setMapOutputValueClass(mapper.getOutputValueClass()); configureAvro(job, mapper); configureHTableInput(job, mapper); }
/** {@inheritDoc} */ @Override protected void setup(Context context) throws IOException { try { super.setup(context); } catch (InterruptedException ie) { throw new IOException(ie); } final Configuration conf = context.getConfiguration(); mBulkImporter = FijiBulkImporters.create(conf); mTableContext = FijiTableContextFactory.create(context); mBulkImporter.setup(mTableContext); }
/** {@inheritDoc} */ @Override protected void setup(Context context) throws IOException { try { super.setup(context); } catch (InterruptedException ie) { throw new IOException(ie); } if (context.getInputSplit() instanceof TableSplit) { TableSplit taskSplit = (TableSplit) context.getInputSplit(); LOG.info("Setting up map task on region [{} -- {}]", toHex(taskSplit.getStartRow()), toHex(taskSplit.getEndRow())); } }
/** {@inheritDoc} */ @Override protected void cleanup(Context context) throws IOException { if (context.getInputSplit() instanceof TableSplit) { TableSplit taskSplit = (TableSplit) context.getInputSplit(); LOG.info("Cleaning up task on region [{} -- {}]", toHex(taskSplit.getStartRow()), toHex(taskSplit.getEndRow())); } try { super.cleanup(context); } catch (InterruptedException ie) { throw new IOException(ie); } }