private static String computeKey(Repository repo) { if (repo instanceof DfsRepository) { DfsRepository dfs = (DfsRepository) repo; return dfs.getDescription().getRepositoryName(); } if (repo.getDirectory() != null) { return repo.getDirectory().toURI().toString(); } throw new IllegalArgumentException(); } }
/** {@inheritDoc} */ @Override public void create(boolean bare) throws IOException { if (exists()) throw new IOException(MessageFormat.format( JGitText.get().repositoryAlreadyExists, "")); //$NON-NLS-1$ String master = Constants.R_HEADS + Constants.MASTER; RefUpdate.Result result = updateRef(Constants.HEAD, true).link(master); if (result != RefUpdate.Result.NEW) throw new IOException(result.name()); }
PackList scanPacks(PackList original) throws IOException { PackList o, n; synchronized (packList) { do { o = packList.get(); if (o != original) { // Another thread did the scan for us, while we // were blocked on the monitor above. // return o; } n = scanPacksImpl(o); if (n == o) return n; } while (!packList.compareAndSet(o, n)); } getRepository().fireEvent(new DfsPacksChangedEvent()); return n; }
/** {@inheritDoc} */ @Override public void scanForRepoChanges() throws IOException { getRefDatabase().refresh(); getObjectDatabase().clearCache(); }
/** * Initialize DFS fsck. * * @param repository * the dfs repository to check. */ public DfsFsck(DfsRepository repository) { repo = repository; objdb = repo.getObjectDatabase(); }
/** * Check if the repository already exists. * * @return true if the repository exists; false if it is new. * @throws java.io.IOException * the repository cannot be checked. */ public boolean exists() throws IOException { if (getRefDatabase() instanceof DfsRefDatabase) { return ((DfsRefDatabase) getRefDatabase()).exists(); } return true; }
/** * Exclude objects from the compacted pack. * * @param pack * objects to not include. * @return {@code this}. * @throws java.io.IOException * pack index cannot be loaded. */ public DfsPackCompactor exclude(DfsPackFile pack) throws IOException { final PackIndex idx; try (DfsReader ctx = (DfsReader) repo.newObjectReader()) { idx = pack.getPackIndex(ctx); } return exclude(idx); }
/** * Get configuration to write new reftables with. * * @return configuration to write new reftables with. */ public ReftableConfig getReftableConfig() { return new ReftableConfig(getRepository().getConfig()); }
/** * Initialize a garbage collector. * * @param repository * repository objects to be packed will be read from. */ public DfsGarbageCollector(DfsRepository repository) { repo = repository; refdb = repo.getRefDatabase(); objdb = repo.getObjectDatabase(); newPackDesc = new ArrayList<>(4); newPackStats = new ArrayList<>(4); newPackObj = new ArrayList<>(4); packConfig = new PackConfig(repo); packConfig.setIndexVersion(2); }
/** {@inheritDoc} */ @Override public BatchRefUpdate newBatchUpdate() { DfsObjDatabase odb = getRepository().getObjectDatabase(); return new ReftableBatchRefUpdate(this, odb); }
pm.beginTask(JGitText.get().countingObjects, ProgressMonitor.UNKNOWN); try (ObjectWalk ow = new ObjectWalk(repo)) { for (Ref r : repo.getRefDatabase().getRefs()) { ObjectId objectId = r.getObjectId(); if (objectId == null) {
/** * Exclude objects from the compacted pack. * * @param pack * objects to not include. * @return {@code this}. * @throws java.io.IOException * pack index cannot be loaded. */ public DfsPackCompactor exclude(DfsPackFile pack) throws IOException { final PackIndex idx; try (DfsReader ctx = (DfsReader) repo.newObjectReader()) { idx = pack.getPackIndex(ctx); } return exclude(idx); }
@Override public ReftableConfig getReftableConfig() { ReftableConfig cfg = new ReftableConfig(); cfg.setAlignBlocks(false); cfg.setIndexObjects(false); cfg.fromConfig(getRepository().getConfig()); return cfg; }
/** {@inheritDoc} */ @Override public void scanForRepoChanges() throws IOException { getRefDatabase().refresh(); getObjectDatabase().clearCache(); }
@Override boolean exists() throws IOException { DfsObjDatabase odb = getRepository().getObjectDatabase(); return odb.getReftables().length > 0; }
/** {@inheritDoc} */ @Override public synchronized String createNonce(Repository repo, long timestamp) throws IllegalStateException { String path; if (repo instanceof DfsRepository) { path = ((DfsRepository) repo).getDescription().getRepositoryName(); } else { File directory = repo.getDirectory(); if (directory != null) { path = directory.getPath(); } else { throw new IllegalStateException(); } } String input = path + ":" + String.valueOf(timestamp); //$NON-NLS-1$ byte[] rawHmac = mac.doFinal(input.getBytes(UTF_8)); return Long.toString(timestamp) + "-" + toHex(rawHmac); //$NON-NLS-1$ }
@Override public void create(boolean bare) throws IOException { if (exists()) throw new IOException(MessageFormat.format( JGitText.get().repositoryAlreadyExists, "")); //$NON-NLS-1$ String master = Constants.R_HEADS + Constants.MASTER; RefUpdate.Result result = updateRef(Constants.HEAD, true).link(master); if (result != RefUpdate.Result.NEW) throw new IOException(result.name()); }
/** * Commit a pack and index pair that was written to the DFS. * <p> * Committing the pack/index pair makes them visible to readers. The JGit * DFS code always writes the pack, then the index. This allows a simple * commit process to do nothing if readers always look for both files to * exist and the DFS performs atomic creation of the file (e.g. stream to a * temporary file and rename to target on close). * <p> * During pack compaction or GC the new pack file may be replacing other * older files. Implementations should remove those older files (if any) as * part of the commit of the new file. * <p> * This method is a trivial wrapper around * {@link #commitPackImpl(Collection, Collection)} that calls the * implementation and fires events. * * @param desc * description of the new packs. * @param replaces * if not null, list of packs to remove. * @throws java.io.IOException * the packs cannot be committed. On failure a rollback must * also be attempted by the caller. */ protected void commitPack(Collection<DfsPackDescription> desc, Collection<DfsPackDescription> replaces) throws IOException { commitPackImpl(desc, replaces); getRepository().fireEvent(new DfsPacksChangedEvent()); }
/** * Check if the repository already exists. * * @return true if the repository exists; false if it is new. * @throws java.io.IOException * the repository cannot be checked. */ public boolean exists() throws IOException { if (getRefDatabase() instanceof DfsRefDatabase) { return ((DfsRefDatabase) getRefDatabase()).exists(); } return true; }
/** * Exclude objects from the compacted pack. * * @param pack * objects to not include. * @return {@code this}. * @throws IOException * pack index cannot be loaded. */ public DfsPackCompactor exclude(DfsPackFile pack) throws IOException { final PackIndex idx; try (DfsReader ctx = (DfsReader) repo.newObjectReader()) { idx = pack.getPackIndex(ctx); } return exclude(idx); }