/** * Sets the configuration. This is used to set the details for the table to * be scanned. * * @param configuration The configuration to set. * @see org.apache.hadoop.conf.Configurable#setConf( * org.apache.hadoop.conf.Configuration) */ @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION", justification="Intentional") public void setConf(Configuration configuration) { this.conf = configuration; Scan scan = null; if (conf.get(SCAN) != null) { try { scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN)); } catch (IOException e) { LOG.error("An error occurred.", e); } } else { try { scan = createScanFromConfiguration(conf); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); } } setScan(scan); }
/** * Sets the configuration. This is used to set the details for the table to * be scanned. * * @param configuration The configuration to set. * @see org.apache.hadoop.conf.Configurable#setConf( * org.apache.hadoop.conf.Configuration) */ @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION", justification="Intentional") public void setConf(Configuration configuration) { this.conf = configuration; Scan scan = null; if (conf.get(SCAN) != null) { try { scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN)); } catch (IOException e) { LOG.error("An error occurred.", e); } } else { try { scan = createScanFromConfiguration(conf); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); } } setScan(scan); }
/** * Sets the configuration. This is used to set the details for the table to * be scanned. * * @param configuration The configuration to set. * @see org.apache.hadoop.conf.Configurable#setConf( * org.apache.hadoop.conf.Configuration) */ @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION", justification="Intentional") public void setConf(Configuration configuration) { this.conf = configuration; Scan scan = null; if (conf.get(SCAN) != null) { try { scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN)); } catch (IOException e) { LOG.error("An error occurred.", e); } } else { try { scan = createScanFromConfiguration(conf); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); } } setScan(scan); }
TableInputFormat tableInputFormat = getNewTableInputFormat(connection, tableName); tableInputFormat.setConf(connection.getConfiguration()); tableInputFormat.setScan(scan);
@Override public InputFormat getInputFormat() { TableInputFormat inputFormat = new HBaseTableIFBuilder() .withLimit(limit_) .withGt(gt_) .withGte(gte_) .withLt(lt_) .withLte(lte_) .withConf(m_conf) .build(); inputFormat.setScan(scan); return inputFormat; }
/** * Exec the HbaseSplit for a query against an Hbase table. * <p> * Does a whole bunch of fun stuff! Splitting on row ID ranges, applying secondary indexes, column pruning, * all sorts of sweet optimizations. What you have here is an important method. * * @param session Current session * @param split HbaseSplit * @param columnHandles List of HbaseColumnHandle * @return RecordReader<ImmutableBytesWritable , Result> for {@link org.apache.hadoop.mapreduce.RecordReader} */ public RecordReader<ImmutableBytesWritable, Result> execSplit(ConnectorSession session, HbaseSplit split, List<HbaseColumnHandle> columnHandles) throws IllegalAccessException, NoSuchFieldException, IOException, InterruptedException { TableName tableName = TableName.valueOf(split.getSchema(), split.getTable()); Scan scan = TabletSplitMetadata.convertStringToScan(split.getSplitMetadata().getScan()); buildScan(scan, session, columnHandles); TableInputFormat tableInputFormat = getNewTableInputFormat(connection, tableName); tableInputFormat.setScan(scan); RecordReader<ImmutableBytesWritable, Result> resultRecordReader = tableInputFormat.createRecordReader(new TableSplit( TableName.valueOf(split.getSplitMetadata().getTableName()), scan, split.getSplitMetadata().getStartRow(), split.getSplitMetadata().getEndRow(), split.getSplitMetadata().getRegionLocation(), split.getSplitMetadata().getLength() ), null); resultRecordReader.initialize(null, null); return resultRecordReader; }