private void fetchNext() throws IOException { try { this.hasNext = this.recordReader.nextKeyValue(); } catch (InterruptedException e) { throw new IOException("Could not fetch next KeyValue pair.", e); } finally { this.fetched = true; } }
protected void fetchNext() throws IOException { try { this.hasNext = this.recordReader.nextKeyValue(); } catch (InterruptedException e) { throw new IOException("Could not fetch next KeyValue pair.", e); } finally { this.fetched = true; } }
/** {@inheritDoc} */ @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (cancelled) throw new HadoopTaskCancelledException("Task cancelled."); return reader.nextKeyValue(); }
@Override public boolean hasNext() { try { boolean retVal = curRecReader.nextKeyValue(); if (retVal) { return true; } // if its false, we need to close recordReader. curRecReader.close(); return false; } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } }
/** * {@inheritDoc}. * * This method will throw a {@link ClassCastException} if type {@link #<D>} is not compatible * with type {@link #<K>} if keys are supposed to be read, or if it is not compatible with type * {@link #<V>} if values are supposed to be read. */ @Override @SuppressWarnings("unchecked") public D readRecord(@Deprecated D reuse) throws DataRecordException, IOException { try { if (this.recordReader.nextKeyValue()) { return this.readKeys ? (D) this.recordReader.getCurrentKey() : (D) this.recordReader.getCurrentValue(); } } catch (InterruptedException ie) { throw new IOException(ie); } return null; }
@Override public boolean next(ImmutableBytesWritable rowKey, ResultWritable value) throws IOException { boolean next = false; try { next = recordReader.nextKeyValue(); if (next) { rowKey.set(recordReader.getCurrentValue().getRow()); value.setResult(recordReader.getCurrentValue()); } } catch (InterruptedException e) { throw new IOException(e); } return next; } };
if (firstRecord) { // key & value are already read. firstRecord = false; } else if (!realReader.nextKeyValue()) { eof = true; // strictly not required, just for consistency return false;
if (firstRecord) { // key & value are already read. firstRecord = false; } else if (!realReader.nextKeyValue()) { eof = true; // strictly not required, just for consistency return false;
@Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO titan05 integration -- the duplicate() call may be unnecessary final TinkerVertex maybeNullTinkerVertex = deser.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue()); if (null != maybeNullTinkerVertex) { vertex = new VertexWritable(maybeNullTinkerVertex); //vertexQuery.filterRelationsOf(vertex); // TODO reimplement vertexquery filtering return true; } } return false; }
public Tuple getNext() throws IOException { try { if (!reader.nextKeyValue()) { return null;
@Override public Tuple getNext() throws IOException { try { HCatRecord hr = (HCatRecord) (reader.nextKeyValue() ? reader.getCurrentValue() : null); Tuple t = PigHCatUtil.transformToTuple(hr, outputSchema); // TODO : we were discussing an iter interface, and also a LazyTuple // change this when plans for that solidifies. return t; } catch (ExecException e) { int errCode = 6018; String errMsg = "Error while reading input"; throw new ExecException(errMsg, errCode, PigException.REMOTE_ENVIRONMENT, e); } catch (Exception eOther) { int errCode = 6018; String errMsg = "Error converting read value to tuple"; throw new ExecException(errMsg, errCode, PigException.REMOTE_ENVIRONMENT, eOther); } }
@Test public void testRecordReader() throws Exception { List<String> paths = Lists.newArrayList("/path1", "/path2"); GobblinWorkUnitsInputFormat.GobblinSplit split = new GobblinWorkUnitsInputFormat.GobblinSplit(paths); GobblinWorkUnitsInputFormat inputFormat = new GobblinWorkUnitsInputFormat(); RecordReader<LongWritable, Text> recordReader = inputFormat.createRecordReader(split, new TaskAttemptContextImpl(new Configuration(), new TaskAttemptID("a", 1, TaskType.MAP, 1, 1))); recordReader.nextKeyValue(); Assert.assertEquals(recordReader.getCurrentKey().get(), 0); Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path1"); recordReader.nextKeyValue(); Assert.assertEquals(recordReader.getCurrentKey().get(), 1); Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path2"); Assert.assertFalse(recordReader.nextKeyValue()); }
if (realReader.nextKeyValue()) { firstRecord = true; valueObj = realReader.getCurrentValue();
if (realReader.nextKeyValue()) { firstRecord = true; valueObj = realReader.getCurrentValue();
RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac); rr.initialize(splits.get(i), tac); while (rr.nextKeyValue()) { readCount++;
assertTrue("Expected at least one record", recordReader.nextKeyValue()); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertTrue("Expected to read a second record", recordReader.nextKeyValue()); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertFalse("Expected only 2 records", recordReader.nextKeyValue());
assertTrue("Expected at least one record", recordReader.nextKeyValue()); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertTrue("Expected to read a second record", recordReader.nextKeyValue()); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertFalse("Expected only 2 records", recordReader.nextKeyValue());
while (rr.nextKeyValue()) { byte[] row = rr.getCurrentKey().get(); verifyRowFromMap(rr.getCurrentKey(), rr.getCurrentValue());
@Override public Edge next() { try { while (true) { if (this.edgeIterator.hasNext()) return new HadoopEdge(this.edgeIterator.next(), this.graph); if (this.readers.isEmpty()) throw FastNoSuchElementException.instance(); if (this.readers.peek().nextKeyValue()) { this.edgeIterator = this.readers.peek().getCurrentValue().get().edges(Direction.OUT); } else { this.readers.remove().close(); } } } catch (final Exception e) { throw new IllegalStateException(e.getMessage(), e); } }
@Override public Vertex next() { try { if (this.nextVertex != null) { final Vertex temp = this.nextVertex; this.nextVertex = null; return temp; } else { while (!this.readers.isEmpty()) { if (this.readers.peek().nextKeyValue()) return new HadoopVertex(this.readers.peek().getCurrentValue().get(), this.graph); else this.readers.remove().close(); } } throw FastNoSuchElementException.instance(); } catch (final Exception e) { throw new IllegalStateException(e.getMessage(), e); } }