@Override public void readFully(long position, byte[] buffer, int bufferOffset, int bufferLength) throws IOException { long start = System.nanoTime(); inputStream.readFully(position, buffer, bufferOffset, bufferLength); long readDuration = System.nanoTime() - start; stats.readDataBytesPerSecond(bufferLength, readDuration); readTimeNanos += readDuration; readBytes += bufferLength; }
public int read(ByteBuffer buffer, long currentOffset) throws IOException { byte[] bufferByte = new byte[buffer.capacity()]; this.fileStream.readFully(currentOffset, bufferByte); buffer.put(bufferByte); return buffer.capacity(); } }
/** * Checks whether the archive starts with a LFH. If it doesn't, * it may be an empty archive. */ private boolean startsWithLocalFileHeader() throws IOException { archive.seek(0); archive.readFully(WORD_BUF); return Arrays.equals(WORD_BUF, ZipOutputStream.LFH_SIG); }
private void readInternal(long position, byte[] buffer, int bufferOffset, int bufferLength) { try { inputStream.readFully(position, buffer, bufferOffset, bufferLength); } catch (PrestoException e) { // just in case there is a Presto wrapper or hook throw e; } catch (Exception e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, format("Error reading from %s at position %s", id, position), e); } }
@Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { try { in.readFully(position, buffer, offset, length); } catch (FileNotFoundException e) { tryOpen().readFully(position, buffer, offset, length); } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() tryOpen().readFully(position, buffer, offset, length); } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() tryOpen().readFully(position, buffer, offset, length); } }
private static byte [] readFile(final FileSystem fs, final Path path) throws IOException { FSDataInputStream tmpIn = fs.open(path); try { byte [] rawData = new byte[tmpIn.available()]; tmpIn.readFully(rawData); return rawData; } finally { tmpIn.close(); } }
/** * position readable again. */ @Override public void readFully(long pos, byte[] b, int offset, int length) throws IOException { validatePositionedReadArgs(pos, b, offset, length); if (length == 0) { return; } if (start + length + pos > end) { throw new EOFException("Not enough bytes to read."); } underLyingStream.readFully(pos + start, b, offset, length); }
/** * Searches for the "End of central dir record", parses * it and positions the stream at the first central directory * record. */ private void positionAtCentralDirectory32() throws IOException { skipBytes(CFD_LOCATOR_OFFSET); archive.readFully(WORD_BUF); archive.seek(ZipLong.getValue(WORD_BUF)); }
@Override protected void readInternal(long position, byte[] buffer, int bufferOffset, int bufferLength) { try { long readStart = System.nanoTime(); inputStream.readFully(position, buffer, bufferOffset, bufferLength); stats.readDataBytesPerSecond(bufferLength, System.nanoTime() - readStart); } catch (PrestoException e) { // just in case there is a Presto wrapper or hook throw e; } catch (Exception e) { String message = format("Error reading from %s at position %s", this, position); if (e.getClass().getSimpleName().equals("BlockMissingException")) { throw new PrestoException(HIVE_MISSING_DATA, message, e); } if (e instanceof IOException) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, message, e); } throw new PrestoException(HIVE_UNKNOWN_ERROR, message, e); } } }
public void addSampleFile(Path inputPath, JobConf job) throws IOException { FileSystem fs = inputPath.getFileSystem(job); FSDataInputStream input = fs.open(inputPath); try { int count = input.readInt(); for (int i = 0; i < count; i++) { byte[] key = new byte[input.readInt()]; input.readFully(key); sampled.add(key); } } finally { IOUtils.closeStream(input); } }
private static TableDescriptor readTableDescriptor(FileSystem fs, FileStatus status) throws IOException { int len = Ints.checkedCast(status.getLen()); byte [] content = new byte[len]; FSDataInputStream fsDataInputStream = fs.open(status.getPath()); try { fsDataInputStream.readFully(content); } finally { fsDataInputStream.close(); } TableDescriptor htd = null; try { htd = TableDescriptorBuilder.parseFrom(content); } catch (DeserializationException e) { throw new IOException("content=" + Bytes.toShort(content), e); } return htd; }
public void addSampleFile(Path inputPath, JobConf job) throws IOException { FileSystem fs = inputPath.getFileSystem(job); FSDataInputStream input = fs.open(inputPath); try { int count = input.readInt(); for (int i = 0; i < count; i++) { byte[] key = new byte[input.readInt()]; input.readFully(key); sampled.add(key); } } finally { IOUtils.closeStream(input); } }
private String readFully(Path file) throws IOException { FSDataInputStream in = fs.open(file); byte[] b = new byte[in.available()]; in.readFully(b); in.close(); return new String(b); }
private void corruptDataFile(final String file, final Configuration conf, final int addRemoveBytes) throws Exception { Path bPath = new Path(file); Path cPath = new Path(bPath.getParent(), bPath.getName() + ".corrupt"); FileSystem fs = bPath.getFileSystem(conf); FileStatus fileStatus = fs.getFileStatus(bPath); int len = addRemoveBytes == Integer.MIN_VALUE ? 0 : (int) fileStatus.getLen() + addRemoveBytes; byte[] buffer = new byte[len]; FSDataInputStream fdis = fs.open(bPath); fdis.readFully(0, buffer, 0, (int) Math.min(fileStatus.getLen(), buffer.length)); fdis.close(); FSDataOutputStream fdos = fs.create(cPath, true); fdos.write(buffer, 0, buffer.length); fdos.close(); fs.delete(bPath, false); fs.rename(cPath, bPath); }
private void corruptDataFile(final String file, final Configuration conf, final int addRemoveBytes) throws Exception { Path bPath = new Path(file); Path cPath = new Path(bPath.getParent(), bPath.getName() + ".corrupt"); FileSystem fs = bPath.getFileSystem(conf); FileStatus fileStatus = fs.getFileStatus(bPath); int len = addRemoveBytes == Integer.MIN_VALUE ? 0 : (int) fileStatus.getLen() + addRemoveBytes; byte[] buffer = new byte[len]; FSDataInputStream fdis = fs.open(bPath); fdis.readFully(0, buffer, 0, (int) Math.min(fileStatus.getLen(), buffer.length)); fdis.close(); FSDataOutputStream fdos = fs.create(cPath, true); fdos.write(buffer, 0, buffer.length); fdos.close(); fs.delete(bPath, false); fs.rename(cPath, bPath); }
/** {@inheritDoc} */ @Override public synchronized void readFully(long pos, byte[] buf, int off, int len) throws IOException { readStart(); try { is.readFully(pos, buf, off, len); } finally { readEnd(); } total += len; if (clientLog.isLogEnabled()) clientLog.logRandomRead(logStreamId, pos, len); }
/** {@inheritDoc} */ @Override public synchronized void readFully(long pos, byte[] buf) throws IOException { readStart(); try { is.readFully(pos, buf); } finally { readEnd(); } total += buf.length; if (clientLog.isLogEnabled()) clientLog.logRandomRead(logStreamId, pos, buf.length); }
@Override public Model load(Path f) throws IOException { FSDataInputStream is = _hfs.open(f); byte buf[] = MemoryManager.malloc1((int) _hfs.getContentSummary(f).getLength()); try { is.readFully(buf); } finally { is.close(); } AutoBuffer ab=ab4read(buf); Model m = loadHeader(ab); m.getModelSerializer().load(m, ab); if (m._key!=null) { DKV.put(m._key, m); } return m; }
@Test public void testWriteLargeChunk() throws IOException, InterruptedException, ExecutionException { Path f = new Path("/" + name.getMethodName()); EventLoop eventLoop = EVENT_LOOP_GROUP.next(); FanOutOneBlockAsyncDFSOutput out = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, (short) 3, 1024 * 1024 * 1024, eventLoop, CHANNEL_CLASS); byte[] b = new byte[50 * 1024 * 1024]; ThreadLocalRandom.current().nextBytes(b); out.write(b); out.flush(false); assertEquals(b.length, out.flush(false).get().longValue()); out.close(); assertEquals(b.length, FS.getFileStatus(f).getLen()); byte[] actual = new byte[b.length]; try (FSDataInputStream in = FS.open(f)) { in.readFully(actual); } assertArrayEquals(b, actual); } }
@Test public void testReadingOldHTDFromFS() throws IOException, DeserializationException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); Path rootdir = UTIL.getDataTestDir(name); FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); Path descriptorFile = fstd.updateTableDescriptor(htd); try (FSDataOutputStream out = fs.create(descriptorFile, true)) { out.write(TableDescriptorBuilder.toByteArray(htd)); } FSTableDescriptors fstd2 = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir); TableDescriptor td2 = fstd2.get(htd.getTableName()); assertEquals(htd, td2); FileStatus descriptorFile2 = FSTableDescriptors.getTableInfoPath(fs, fstd2.getTableDir(htd.getTableName())); byte[] buffer = TableDescriptorBuilder.toByteArray(htd); try (FSDataInputStream in = fs.open(descriptorFile2.getPath())) { in.readFully(buffer); } TableDescriptor td3 = TableDescriptorBuilder.parseFrom(buffer); assertEquals(htd, td3); }