public void close() { this.recordReaderImpl.close(); }
/** * @return ImmutableBytesWritable * * @see org.apache.hadoop.mapred.RecordReader#createKey() */ public ImmutableBytesWritable createKey() { return this.recordReaderImpl.createKey(); }
/** * @return RowResult * * @see org.apache.hadoop.mapred.RecordReader#createValue() */ public Result createValue() { return this.recordReaderImpl.createValue(); }
/** * Build the scanner. Not done in constructor to allow for extension. * * @throws IOException */ public void init() throws IOException { this.recordReaderImpl.restart(this.recordReaderImpl.getStartRow()); }
/** * Build the scanner. Not done in constructor to allow for extension. * * @throws IOException */ public void init() throws IOException { restart(startRow); }
public long getPos() { // This should be the ordinal tuple in the range; // not clear how to calculate... return this.recordReaderImpl.getPos(); }
/** * @param htable the {@link org.apache.hadoop.hbase.HTableDescriptor} to scan. */ public void setHTable(Table htable) { this.recordReaderImpl.setHTable(htable); }
/** * @param inputColumns the columns to be placed in {@link Result}. */ public void setInputColumns(final byte [][] inputColumns) { this.recordReaderImpl.setInputColumns(inputColumns); }
/** * @param key HStoreKey as input key. * @param value MapWritable as input value * @return true if there was more data * @throws IOException */ public boolean next(ImmutableBytesWritable key, Result value) throws IOException { return this.recordReaderImpl.next(key, value); } }
/** * * @param endRow the last row in the split */ public void setEndRow(final byte [] endRow) { this.recordReaderImpl.setEndRow(endRow); }
/** * Build the scanner. Not done in constructor to allow for extension. * * @throws IOException */ public void init() throws IOException { this.recordReaderImpl.restart(this.recordReaderImpl.getStartRow()); }
/** * Restart from survivable exceptions by creating a new scanner. * * @param firstRow * @throws IOException */ public void restart(byte[] firstRow) throws IOException { this.recordReaderImpl.restart(firstRow); }
public float getProgress() { // Depends on the total number of tuples and getPos return this.recordReaderImpl.getPos(); }
/** * @param htable the {@link org.apache.hadoop.hbase.HTableDescriptor} to scan. */ public void setHTable(Table htable) { this.recordReaderImpl.setHTable(htable); }
/** * @param inputColumns the columns to be placed in {@link Result}. */ public void setInputColumns(final byte [][] inputColumns) { this.recordReaderImpl.setInputColumns(inputColumns); }
/** * @param key HStoreKey as input key. * @param value MapWritable as input value * @return true if there was more data * @throws IOException */ public boolean next(ImmutableBytesWritable key, Result value) throws IOException { return this.recordReaderImpl.next(key, value); } }
/** * * @param endRow the last row in the split */ public void setEndRow(final byte [] endRow) { this.recordReaderImpl.setEndRow(endRow); }
/** * Build the scanner. Not done in constructor to allow for extension. * * @throws IOException */ public void init() throws IOException { this.recordReaderImpl.restart(this.recordReaderImpl.getStartRow()); }
public long getPos() { // This should be the ordinal tuple in the range; // not clear how to calculate... return this.recordReaderImpl.getPos(); }