@Override public V createValue() { return delegate.createValue(); }
public void setBaseAndInnerReader( final org.apache.hadoop.mapred.RecordReader<NullWritable, VectorizedRowBatch> baseReader) { this.baseReader = baseReader; this.innerReader = null; this.vectorizedRowBatchBase = baseReader.createValue(); }
@Override public V createValue() { return curReader.createValue(); }
@Override public V createValue() { return (V) recordReader.createValue(); }
public V createValue() { return (V) recordReader.createValue(); }
public V createValue() { return (V) recordReader.createValue(); }
@Override public V createValue() { return (V) recordReader.createValue(); }
public FooterAwareRecordReader(RecordReader<K, V> delegate, int footerCount, JobConf job) throws IOException { this.delegate = requireNonNull(delegate, "delegate is null"); this.job = requireNonNull(job, "job is null"); checkArgument(footerCount > 0, "footerCount is expected to be positive"); footerBuffer.initializeBuffer(job, delegate, footerCount, delegate.createKey(), delegate.createValue()); }
PassThruOffsetReader(RecordReader sourceReader) { this.sourceReader = sourceReader; key = sourceReader.createKey(); value = (Writable)sourceReader.createValue(); }
public LlapRowRecordReader(Configuration conf, Schema schema, RecordReader<NullWritable, ? extends Writable> reader) throws IOException { this.conf = conf; this.schema = schema; this.reader = reader; this.data = reader.createValue(); try { this.serde = initSerDe(conf); } catch (SerDeException err) { throw new IOException(err); } }
@Override public ResultWritable createValue() { return new ResultWritable(rr.createValue()); }
public void internalInit(Properties tableProperties, RecordReader<Object, Object> reader) { key = reader.createKey(); value = reader.createValue(); }
public void internalInit(Properties tableProperties, RecordReader<Object, Object> reader) { key = reader.createKey(); value = reader.createValue(); }
public void internalInit(Properties tableProperties, RecordReader<Object, Object> reader) { key = reader.createKey(); value = reader.createValue(); }
public void internalInit(Properties tableProperties, RecordReader<Object, Object> reader) { key = reader.createKey(); value = reader.createValue(); }
public void internalInit(Properties tableProperties, RecordReader<Object, Object> reader) { key = reader.createKey(); value = reader.createValue(); }
@BeforeClass @SuppressWarnings("unchecked") public static void init() { recordReader = mock(RecordReader.class); when(recordReader.createValue()).thenReturn(new Object()); }
@Override public void open(HadoopInputSplit split) throws IOException { // enforce sequential open() calls synchronized (OPEN_MUTEX) { this.recordReader = this.mapredInputFormat.getRecordReader(split.getHadoopInputSplit(), jobConf, new HadoopDummyReporter()); if (this.recordReader instanceof Configurable) { ((Configurable) this.recordReader).setConf(jobConf); } key = this.recordReader.createKey(); value = this.recordReader.createValue(); this.fetched = false; } }
public static List<ArrayWritable> read(Path parquetFile) throws IOException { List<ArrayWritable> records = new ArrayList<ArrayWritable>(); RecordReader<NullWritable, ArrayWritable> reader = new MapredParquetInputFormat(). getRecordReader(new FileSplit( parquetFile, 0, fileLength(parquetFile), (String[]) null), new JobConf(), null); NullWritable alwaysNull = reader.createKey(); ArrayWritable record = reader.createValue(); while (reader.next(alwaysNull, record)) { records.add(record); record = reader.createValue(); // a new value so the last isn't clobbered } return records; }
@Override public void sourcePrepare(FlowProcess<JobConf> flowProcess, SourceCall<Object[], RecordReader> sourceCall) throws IOException { super.sourcePrepare(flowProcess, sourceCall); Object[] context = new Object[SRC_CTX_SIZE]; context[SRC_CTX_KEY] = sourceCall.getInput().createKey(); context[SRC_CTX_VALUE] = sourceCall.getInput().createValue(); // as the tuple _might_ vary (some objects might be missing), we use a map rather then a collection Settings settings = loadSettings(flowProcess.getConfigCopy(), true); context[SRC_CTX_ALIASES] = CascadingUtils.alias(settings); context[SRC_CTX_OUTPUT_JSON] = settings.getOutputAsJson(); sourceCall.setContext(context); }