void push(ChunkKey key) { push(Collections.singleton(key)); }
void push(ChunkKey key) { push(Collections.singleton(key)); }
private void push(List<String> list) { List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size()); for (String keyString : list) keys.add(ChunkKey.fromString(keyString)); push(keys); }
private void push(List<String> list) { List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size()); for (String keyString : list) keys.add(ChunkKey.fromString(keyString)); push(keys); }
private PackChunk useReadyChunk(ChunkKey key) { PackChunk chunk = ready.remove(key); status.put(chunk.getChunkKey(), Status.DONE); bytesReady -= chunk.getTotalSize(); if (automaticallyPushHints) { push(chunk.getMeta()); maybeStartGet(); } return chunk; }
private PackChunk useReadyChunk(ChunkKey key) { PackChunk chunk = ready.remove(key); status.put(chunk.getChunkKey(), Status.DONE); bytesReady -= chunk.getTotalSize(); if (automaticallyPushHints) { push(chunk.getMeta()); maybeStartGet(); } return chunk; }
@Override public void walkAdviceBeginCommits(RevWalk rw, Collection<RevCommit> roots) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_COMMIT, readerOptions.getWalkCommitsPrefetchRatio()); p.push(this, roots); prefetcher = p; }
@Override public void walkAdviceBeginCommits(RevWalk rw, Collection<RevCommit> roots) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_COMMIT, readerOptions.getWalkCommitsPrefetchRatio()); p.push(this, roots); prefetcher = p; }
void push(ChunkMeta meta) { if (meta == null) return; ChunkMeta.PrefetchHint hint; switch (objectType) { case OBJ_COMMIT: hint = meta.getCommitPrefetch(); break; case OBJ_TREE: hint = meta.getTreePrefetch(); break; default: return; } if (hint != null) { synchronized (this) { if (followEdgeHints && 0 < hint.getEdgeCount()) push(hint.getEdgeList()); else push(hint.getSequentialList()); } } }
private void init() throws IOException { ObjectWithInfo<T> c; while ((c = nextObjectWithInfo()) != null) { ChunkKey chunkKey = c.chunkKey; Collection<ObjectWithInfo<T>> list = byChunk.get(chunkKey); if (list == null) { list = new ArrayList<ObjectWithInfo<T>>(); byChunk.put(chunkKey, list); if (prefetcher == null) { int limit = reader.getOptions().getChunkLimit(); int ratio = reader.getOptions().getOpenQueuePrefetchRatio(); int prefetchLimit = (int) (limit * (ratio / 100.0)); reader.getRecentChunks().setMaxBytes(limit - prefetchLimit); prefetcher = new Prefetcher(reader, 0, prefetchLimit); } prefetcher.push(chunkKey); } list.add(c); } chunkItr = byChunk.values().iterator(); } }
void push(ChunkMeta meta) { if (meta == null) return; ChunkMeta.PrefetchHint hint; switch (objectType) { case OBJ_COMMIT: hint = meta.getCommitPrefetch(); break; case OBJ_TREE: hint = meta.getTreePrefetch(); break; default: return; } if (hint != null) { synchronized (this) { if (followEdgeHints && 0 < hint.getEdgeCount()) push(hint.getEdgeList()); else push(hint.getSequentialList()); } } }
private void init() throws IOException { ObjectWithInfo<T> c; while ((c = nextObjectWithInfo()) != null) { ChunkKey chunkKey = c.chunkKey; Collection<ObjectWithInfo<T>> list = byChunk.get(chunkKey); if (list == null) { list = new ArrayList<ObjectWithInfo<T>>(); byChunk.put(chunkKey, list); if (prefetcher == null) { int limit = reader.getOptions().getChunkLimit(); int ratio = reader.getOptions().getOpenQueuePrefetchRatio(); int prefetchLimit = (int) (limit * (ratio / 100.0)); reader.getRecentChunks().setMaxBytes(limit - prefetchLimit); prefetcher = new Prefetcher(reader, 0, prefetchLimit); } prefetcher.push(chunkKey); } list.add(c); } chunkItr = byChunk.values().iterator(); } }
@Override public void walkAdviceBeginTrees(ObjectWalk ow, RevCommit min, RevCommit max) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_TREE, readerOptions.getWalkTreesPrefetchRatio()); p.push(this, min.getTree(), max.getTree()); prefetcher = p; }
@Override public void walkAdviceBeginTrees(ObjectWalk ow, RevCommit min, RevCommit max) throws IOException { endPrefetch(); // Don't assign the prefetcher right away. Delay until its // configured as push might invoke our own methods that may // try to call back into the active prefetcher. // Prefetcher p = prefetch(OBJ_TREE, readerOptions.getWalkTreesPrefetchRatio()); p.push(this, min.getTree(), max.getTree()); prefetcher = p; }
prefetch.push(order);
void push(DhtReader ctx, Collection<RevCommit> roots) { // Approximate walk by using hints from the most recent commit. // Since the commits were recently parsed by the reader, we can // ask the reader for their chunk locations and most likely get // cache hits. int time = -1; PackChunk chunk = null; for (RevCommit cmit : roots) { if (time < cmit.getCommitTime()) { ChunkAndOffset p = ctx.getChunkGently(cmit); if (p != null && p.chunk.getMeta() != null) { time = cmit.getCommitTime(); chunk = p.chunk; } } } if (chunk != null) { synchronized (this) { status.put(chunk.getChunkKey(), Status.DONE); push(chunk.getMeta()); } } }
void push(DhtReader ctx, Collection<RevCommit> roots) { // Approximate walk by using hints from the most recent commit. // Since the commits were recently parsed by the reader, we can // ask the reader for their chunk locations and most likely get // cache hits. int time = -1; PackChunk chunk = null; for (RevCommit cmit : roots) { if (time < cmit.getCommitTime()) { ChunkAndOffset p = ctx.getChunkGently(cmit); if (p != null && p.chunk.getMeta() != null) { time = cmit.getCommitTime(); chunk = p.chunk; } } } if (chunk != null) { synchronized (this) { status.put(chunk.getChunkKey(), Status.DONE); push(chunk.getMeta()); } } }
void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx) throws IOException { if (keyList == null) init(); // Clear the recent chunks because all of the reader's // chunk limit should be made available for prefetch. int cacheLimit = ctx.getOptions().getChunkLimit(); ctx.getRecentChunks().setMaxBytes(0); try { Prefetcher p = new Prefetcher(ctx, 0, cacheLimit); p.push(Arrays.asList(keyList)); copyPack(out, p, validate); } finally { ctx.getRecentChunks().setMaxBytes(cacheLimit); } }
void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx) throws IOException { if (keyList == null) init(); // Clear the recent chunks because all of the reader's // chunk limit should be made available for prefetch. int cacheLimit = ctx.getOptions().getChunkLimit(); ctx.getRecentChunks().setMaxBytes(0); try { Prefetcher p = new Prefetcher(ctx, 0, cacheLimit); p.push(Arrays.asList(keyList)); copyPack(out, p, validate); } finally { ctx.getRecentChunks().setMaxBytes(cacheLimit); } }
push(startKey); maybeStartGet();