public MapDBHashMap(File f) { this.db = DBMaker.fileDB(f).closeOnJvmShutdown().transactionEnable().make(); this.hashMap = db.hashMap(f.getName()) .keySerializer(Serializer.STRING) .valueSerializer(Serializer.STRING) .createOrOpen(); }
/** * Copies all the contents of this this back-end to the target one. * * @param target the {@code MapDbPersistenceBackend} to copy the database contents to * * @throws UnsupportedOperationException if the current {@link DB} contains {@link Collection}s which are not {@link * Map}s */ @SuppressWarnings({"unchecked", "rawtypes"}) // Unchecked cast: 'Map' to 'Map<...>' public void copyTo(MapDbPersistenceBackend target) { for (Map.Entry<String, Object> entry : db.getAll().entrySet()) { Object collection = entry.getValue(); if (collection instanceof Map) { Map fromMap = (Map) collection; Map toMap = target.db.hashMap(entry.getKey()).createOrOpen(); toMap.putAll(fromMap); } else { throw new UnsupportedOperationException("Cannot copy MapDB backend: store type " + collection.getClass().getSimpleName() + " is not supported"); } } } }
@NotNull public static HTreeMap<String, Long> getCacheSize(@NotNull GitFilter filter, @NotNull DB cacheDb) { return cacheDb.hashMap("cache.filter." + filter.getName() + ".size", Serializer.STRING, Serializer.LONG).createOrOpen(); }
@Override protected Map<Long, Persistable> getUpdateMap(String sessionID, String typeID, String workerID, boolean createIfRequired) { SessionTypeWorkerId id = new SessionTypeWorkerId(sessionID, typeID, workerID); if (updates.containsKey(id)) { return updates.get(id); } if (!createIfRequired) { return null; } String compositeKey = COMPOSITE_KEY_HEADER + sessionID + COMPOSITE_KEY_SEPARATOR + typeID + COMPOSITE_KEY_SEPARATOR + workerID; Map<Long, Persistable> updateMap; updateMapLock.lock(); try { //Try again, in case another thread created it before lock was acquired in this thread if (updates.containsKey(id)) { return updates.get(id); } updateMap = db.hashMap(compositeKey).keySerializer(Serializer.LONG) .valueSerializer(new PersistableSerializer<>()).createOrOpen(); updates.put(id, updateMap); } finally { updateMapLock.unlock(); } return updateMap; }
/** * Constructs a new {@code AbstractMapDbBackend} wrapping the provided {@code database}. * * @param database the {@link org.mapdb.DB} used to create and manage {@link org.mapdb.HTreeMap}s * * @see MapDbBackendFactory */ @SuppressWarnings("unchecked") protected AbstractMapDbBackend(DB database) { checkNotNull(database, "database"); this.database = database; this.containers = database.hashMap("containers") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forId())) .valueSerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forSingleFeature())) .createOrOpen(); this.instances = database.hashMap("instances") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forId())) .valueSerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forClass())) .createOrOpen(); this.singleFeatures = database.hashMap("features/single") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forSingleFeature())) .valueSerializer(Serializer.ELSA) .createOrOpen(); }
public GitRepository(@NotNull LocalContext context, @NotNull Repository repository, @NotNull GitPusher pusher, @NotNull String branch, boolean renameDetection, @NotNull LockManagerFactory lockManagerFactory) throws IOException { this.context = context; final SharedContext shared = context.getShared(); shared.getOrCreate(GitSubmodules.class, GitSubmodules::new).register(repository); this.repository = repository; this.binaryCache = shared.getCacheDB().hashMap("cache.binary", Serializer.STRING, Serializer.BOOLEAN).createOrOpen(); this.revisionCache = context.getShared().getCacheDB().hashMap(String.format("cache-revision.%s.%s.v%s", context.getName(), renameDetection ? 1 : 0, revisionCacheVersion), objectIdSerializer, cacheRevisionSerializer).createOrOpen(); this.pusher = pusher; this.renameDetection = renameDetection; this.lockManagerFactory = lockManagerFactory; this.gitFilters = GitFilterHelper.createFilters(context); final Ref svnBranchRef = LayoutHelper.initRepository(repository, branch); this.svnBranch = svnBranchRef.getName(); this.gitBranch = Constants.R_HEADS + branch; final String repositoryId = loadRepositoryId(repository, svnBranchRef); this.uuid = UUID.nameUUIDFromBytes((repositoryId + "\0" + gitBranch).getBytes(StandardCharsets.UTF_8)).toString(); log.info("[{}]: registered branch: {}", context.getName(), gitBranch); }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
public GitConverter(@NotNull DB cache, @NotNull Path basePath, @NotNull String[] globs) throws IOException, InvalidPatternException { this.basePath = basePath; this.cache = cache; this.globs = globs.clone(); this.matchers = convertGlobs(globs); Arrays.sort(globs); for (String glob : globs) { new FileNameMatcher(glob, '/'); } tempPath = basePath.resolve("lfs/tmp"); Files.createDirectories(tempPath); //noinspection unchecked cacheMeta = cache.<String, MetaData>hashMap("meta") .keySerializer(Serializer.STRING) .valueSerializer(new SerializerJava()) .createOrOpen(); }
/** * Constructs a new {@code MapDbBackendIndices} wrapping the provided {@code database}. * * @param database the {@link org.mapdb.DB} used to creates the used {@link org.mapdb.HTreeMap}s and manage the * database * * @see MapDbBackendFactory */ @SuppressWarnings("unchecked") protected MapDbBackendIndices(DB database) { super(database); manyFeatures = database.hashMap("features/many") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forManyFeature())) .valueSerializer(Serializer.ELSA) .createOrOpen(); }
protected Storage(){ /* create or open-connection the databases optionally with with crash corruption resistance "transactionEnable" and Mmap if on 64 bit system: */ accountsDB = getDB(pathAccountsDB,false,true); blocksDB = getDB(pathBlocksDB,false,true); transactionsDB = getDB(pathTransactionsDB,false,true); contractCodeDB = getDB(pathContractCodeDB,false,true); contractStatesDB = getDB(pathContractStateDB,false,true); /* create or open the the keyvalue stores: */ accountsMap = accountsDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); blocksMap = blocksDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); transactionsMap = transactionsDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); contractCodeMap = contractCodeDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); contractStatesMap = contractStatesDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); //testing }
private ConcurrentMap<Integer, byte[]> getMap(String dataset) { return db.hashMap(dataset, Serializer.INTEGER, Serializer.BYTE_ARRAY).createOrOpen(); }
@NotNull public static HTreeMap<String, String> getCacheMd5(@NotNull GitFilter filter, @NotNull DB cacheDb) { return cacheDb.hashMap("cache.filter." + filter.getName() + ".md5", Serializer.STRING, Serializer.STRING).createOrOpen(); }
@Override public Map<Integer, SubscriberState> createMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } statusStorageMap = db.hashMap("statusStorageMap").keySerializer(Serializer.INTEGER) .valueSerializer(new StatusStorageSerializer()).createOrOpen(); return statusStorageMap; }
@Override public Map<Integer, SubscriberState> createMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } statusStorageMap = db.hashMap("statusStorageMap").keySerializer(Serializer.INTEGER) .valueSerializer(new StatusStorageSerializer()).createOrOpen(); return statusStorageMap; }
@Override public Map<Integer, SubscriberState> createMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } statusStorageMap = db.hashMap("statusStorageMap").keySerializer(Serializer.INTEGER) .valueSerializer(new StatusStorageSerializer()).createOrOpen(); return statusStorageMap; }
private ConcurrentMap<Integer, byte[]> getMap(String dataset) { return db.hashMap(dataset, Serializer.INTEGER, Serializer.BYTE_ARRAY).createOrOpen(); }
public static ConcurrentMap setup() { DB db = DBMaker.fileDB("test.db").fileMmapEnable().fileMmapPreclearDisable() .allocateStartSize(124 * 1024 * 1024).allocateIncrement(5 * 1024 * 1024).make(); ConcurrentMap<?, ?> dictionaryUpdates = db.hashMap("test").createOrOpen(); return dictionaryUpdates; }
@Override public <K, V> Map<K, V> getMap(String name) { return db.<K, V>hashMap(name, JAVA, JAVA).createOrOpen(); }