/** {@inheritDoc} */ @Override public IgniteInternalFuture<byte[]> readData(HadoopIgfsStreamDelegate delegate, long pos, int len, @Nullable byte[] outBuf, int outOff, int outLen) { IgfsInputStream stream = delegate.target(); try { byte[] res = null; if (outBuf != null) { int outTailLen = outBuf.length - outOff; if (len <= outTailLen) stream.readFully(pos, outBuf, outOff, len); else { stream.readFully(pos, outBuf, outOff, outTailLen); int remainderLen = len - outTailLen; res = new byte[remainderLen]; stream.readFully(pos, res, 0, remainderLen); } } else { res = new byte[len]; stream.readFully(pos, res, 0, len); } return new GridFinishedFuture<>(res); } catch (IllegalStateException | IOException e) { HadoopIgfsStreamEventListener lsnr = lsnrs.get(delegate); if (lsnr != null) lsnr.onError(e.getMessage()); return new GridFinishedFuture<>(e); } }
/** * Read some data from the given file with the given offset. * * @param path File path. * @param off Offset. * @param len Length. * @throws Exception If failed. */ private void read(IgfsPath path, int off, int len) throws Exception { IgfsInputStream is = igfsPrimary.open(path); is.readFully(off, new byte[len]); is.close(); }
byte[] buf = new byte[chunk.length]; is.readFully(pos, buf);
i1 = read; actIn.readFully(pos + seek, buf1, 0, read);
is.readFully(0, buf); // Will generate no events.
in.readFully(pos, readBuf);
is.readFully(0, new byte[blockSize * 2]); is.close();