/** * Constructor for FsckPackParser * * @param db * the object database which stores repository's data. * @param channel * readable channel of the pack file. */ public FsckPackParser(ObjectDatabase db, ReadableChannel channel) { super(db, Channels.newInputStream(channel)); this.channel = channel; setCheckObjectCollisions(false); this.crc = new CRC32(); this.blockSize = channel.blockSize() > 0 ? channel.blockSize() : 65536; }
private byte[] readFromChannel(long block) throws IOException { channel.position(block * blockSize); ByteBuffer buf = ByteBuffer.allocate(blockSize); int totalBytesRead = 0; while (totalBytesRead < blockSize) { int bytesRead = channel.read(buf); if (bytesRead == -1) { if (totalBytesRead == 0) { return null; } return Arrays.copyOf(buf.array(), totalBytesRead); } totalBytesRead += bytesRead; } return buf.array(); }
ByteBuffer buf = newCopyBuffer(out, rc); if (ctx.getOptions().getStreamPackBufferSize() > 0) rc.setReadAheadBytes(ctx.getOptions().getStreamPackBufferSize()); long position = 12; long remaining = length - (12 + 20); position += n; remaining -= n; rc.position(position); packHeadSkipped = true; continue;
int sz = ctx.getOptions().getStreamPackBufferSize(); if (sz > 0) { rc.setReadAheadBytes(sz); rc.close();
static int read(ReadableChannel rc, ByteBuffer buf) throws IOException { int n; do { n = rc.read(buf); } while (0 < n && buf.hasRemaining()); return buf.position(); }
@Override public long size() throws IOException { long n = file.length; if (n < 0) { n = open().size(); file.length = n; } return n; }
@Override public void close() { if (ch != null) { try { ch.close(); } catch (IOException e) { // Ignore read close failures. } finally { ch = null; } } } }
@Override public ByteBuffer read(long pos, int cnt) throws IOException { if (ch == null && readAhead > 0 && notInCache(pos)) { open().setReadAheadBytes(readAhead); } DfsBlock block = cache.getOrLoad(file, pos, ctx, ch); if (block.start == pos && block.size() >= cnt) { return block.zeroCopyByteBuffer(cnt); } byte[] dst = new byte[cnt]; ByteBuffer buf = ByteBuffer.wrap(dst); buf.position(ctx.copy(file, pos, dst, 0, cnt)); return buf; }
InputStream in = Channels.newInputStream(rc); int wantSize = 8192; int bs = rc.blockSize(); if (0 < bs && bs < wantSize) bs = (wantSize / bs) * bs; bs = wantSize; idx = PackIndex.read(new BufferedInputStream(in, bs)); ctx.stats.readIdxBytes += rc.position(); } finally { ctx.stats.readIdxMicros += elapsedMicros(start);
ByteBuffer buf = newCopyBuffer(out, rc); if (ctx.getOptions().getStreamPackBufferSize() > 0) rc.setReadAheadBytes(ctx.getOptions().getStreamPackBufferSize()); long position = 12; long remaining = length - (12 + 20); position += n; remaining -= n; rc.position(position); continue;
int sz = ctx.getOptions().getStreamPackBufferSize(); if (sz > 0) { rc.setReadAheadBytes(sz); rc.close();
static int read(ReadableChannel rc, ByteBuffer buf) throws IOException { int n; do { n = rc.read(buf); } while (0 < n && buf.hasRemaining()); return buf.position(); }
@Override public long size() throws IOException { long n = file.length; if (n < 0) { n = open().size(); file.length = n; } return n; }
@Override public void close() { if (ch != null) { try { ch.close(); } catch (IOException e) { // Ignore read close failures. } finally { ch = null; } } } }
@Override public ByteBuffer read(long pos, int cnt) throws IOException { if (ch == null && readAhead > 0 && notInCache(pos)) { open().setReadAheadBytes(readAhead); } DfsBlock block = cache.getOrLoad(file, pos, ctx, ch); if (block.start == pos && block.size() >= cnt) { return block.zeroCopyByteBuffer(cnt); } byte[] dst = new byte[cnt]; ByteBuffer buf = ByteBuffer.wrap(dst); buf.position(ctx.copy(file, pos, dst, 0, cnt)); return buf; }