@Override public ObjectStream openStream() throws IOException { return delegate().openStream(); } }
private void hashLargeObject(ObjectLoader obj) throws IOException, TableFullException { boolean text; try (ObjectStream in1 = obj.openStream()) { text = !RawText.isBinary(in1); } try (ObjectStream in2 = obj.openStream()) { hash(in2, in2.getSize(), text); } }
static PushCertificate read(TreeWalk tw) throws IOException { if (tw == null || (tw.getRawMode(0) & TYPE_FILE) != TYPE_FILE) { return null; } ObjectLoader loader = tw.getObjectReader().open(tw.getObjectId(0), OBJ_BLOB); try (InputStream in = loader.openStream(); Reader r = new BufferedReader( new InputStreamReader(in, UTF_8))) { return PushCertificateParser.fromReader(r); } }
AttributesNode load(ObjectReader reader) throws IOException { AttributesNode r = new AttributesNode(); ObjectLoader loader = reader.open(objectId); if (loader != null) { try (InputStream in = loader.openStream()) { r.parse(in); } } return r.getRules().isEmpty() ? null : r; } }
private static AttributesNode loadAttributes(ObjectReader reader, AnyObjectId id) throws IOException { AttributesNode r = new AttributesNode(); try (InputStream in = reader.open(id, OBJ_BLOB).openStream()) { r.parse(in); } return r.getRules().isEmpty() ? noAttributes() : r; }
/** {@inheritDoc} */ @Override public ObjectStream openStream() throws MissingObjectException, IOException { WindowCursor wc = new WindowCursor(db); InputStream in; try { in = new PackInputStream(pack, objectOffset + headerLength, wc); } catch (IOException packGone) { // If the pack file cannot be pinned into the cursor, it // probably was repacked recently. Go find the object // again and open the stream from that location instead. // return wc.open(getObjectId(), type).openStream(); } in = new BufferedInputStream( // new InflaterInputStream( // in, // wc.inflater(), // 8192), // 8192); return new ObjectStream.Filter(type, size, in); }
IOException { if (isLarge()) { try (ObjectStream in = openStream()) { final long sz = in.getSize(); byte[] tmp = new byte[8192];
@Override public ObjectStream openStream() throws IOException { @SuppressWarnings("resource") // Explicitly closed below final DfsReader ctx = db.newReader(); if (srcPack != packKey) { try { // Post DfsInserter.flush() use the normal code path. // The newly created pack is registered in the cache. return ctx.open(id, type).openStream(); } finally { ctx.close(); } } int bufsz = 8192; final Inflater inf = ctx.inflater(); return new ObjectStream.Filter(type, size, new BufferedInputStream(new InflaterInputStream( new ReadBackStream(pos), inf, bufsz), bufsz)) { @Override public void close() throws IOException { ctx.close(); super.close(); } }; }
/** * Loosen objects in a pack file which are not also in the newly-created * pack files. * * @param inserter * @param reader * @param pack * @param existing * @throws IOException */ private void loosen(ObjectDirectoryInserter inserter, ObjectReader reader, PackFile pack, HashSet<ObjectId> existing) throws IOException { for (PackIndex.MutableEntry entry : pack) { ObjectId oid = entry.toObjectId(); if (existing.contains(oid)) { continue; } existing.add(oid); ObjectLoader loader = reader.open(oid); inserter.insert(loader.getType(), loader.getSize(), loader.openStream(), true /* create this object even though it's a duplicate */); } }
return source.open(id, type).openStream();
return ctx.open(obj, type).openStream();
final byte[] curBuffer = new byte[readBuffer.length]; long sz = info.size; try (ObjectStream cur = readCurs.open(obj, info.type).openStream()) { if (cur.getSize() != sz) { throw new IOException(MessageFormat.format(
File repoDir = new File("test-git"); // open the repository Repository repository = new Repository(repoDir); // find the HEAD ObjectId lastCommitId = repository.resolve(Constants.HEAD); // now we have to get the commit RevWalk revWalk = new RevWalk(repository); RevCommit commit = revWalk.parseCommit(lastCommitId); // and using commit's tree find the path RevTree tree = commit.getTree(); TreeWalk treeWalk = new TreeWalk(repository); treeWalk.addTree(tree); treeWalk.setRecursive(true); treeWalk.setFilter(PathFilter.create(path)); if (!treeWalk.next()) { return null; } ObjectId objectId = treeWalk.getObjectId(0); ObjectLoader loader = repository.open(objectId); // and then one can use either InputStream in = loader.openStream() // or loader.copyTo(out)
private static void runBuiltinFilterCommand(Repository repo, CheckoutMetadata checkoutMetadata, ObjectLoader ol, OutputStream channel) throws MissingObjectException, IOException { boolean isMandatory = repo.getConfig().getBoolean( ConfigConstants.CONFIG_FILTER_SECTION, ConfigConstants.CONFIG_SECTION_LFS, ConfigConstants.CONFIG_KEY_REQUIRED, false); FilterCommand command = null; try { command = FilterCommandRegistry.createFilterCommand( checkoutMetadata.smudgeFilterCommand, repo, ol.openStream(), channel); } catch (IOException e) { LOG.error(JGitText.get().failedToDetermineFilterDefinition, e); if (!isMandatory) { // In case an IOException occurred during creating of the // command then proceed as if there would not have been a // builtin filter (only if the filter is not mandatory). ol.copyTo(channel); } else { throw e; } } if (command != null) { while (command.run() != -1) { // loop as long as command.run() tells there is work to do } } }
/** {@inheritDoc} */ @Override public Note merge(Note base, Note ours, Note theirs, ObjectReader reader, ObjectInserter inserter) throws IOException { if (ours == null) return theirs; if (theirs == null) return ours; if (ours.getData().equals(theirs.getData())) return ours; ObjectLoader lo = reader.open(ours.getData()); ObjectLoader lt = reader.open(theirs.getData()); try (UnionInputStream union = new UnionInputStream(lo.openStream(), lt.openStream())) { ObjectId noteData = inserter.insert(Constants.OBJ_BLOB, lo.getSize() + lt.getSize(), union); return new Note(ours, noteData); } } }
try { result = fs.execute(filterProcessBuilder, ol.openStream()); rc = result.getRc(); if (rc == 0) {
try (InputStream stream = ldr.openStream()) { int off = 0; int left = head.length;
loader.openStream(), true, true /* abort if binary */)) { dcInLen = computeLength(dcIn); loader.openStream(), true)) { byte[] autoCrLfHash = computeHash(dcIn, dcInLen); boolean changed = getEntryObjectId()
return getCachedBytes(); try (ObjectStream in = openStream()) { long sz = in.getSize(); if (sizeLimit < sz)
@NotNull @Override public InputStream inputStream(@NotNull GitObject<? extends ObjectId> objectId) throws IOException { return new InputWrapper(objectId.openObject().openStream()); }