.keySerializer(Serializer.STRING) .valueSerializer(Serializer.STRING) .expireAfterCreate(cacheTimeout, TimeUnit.HOURS) .createOrOpen();
private void loadEmbedding(File mapPath, int dimension) throws FileNotFoundException { vectors = db.hashMap("some_other_map", Serializer.STRING, Serializer.DOUBLE_ARRAY).create(); this.dimension = dimension;
.createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen(); .createOrOpen();
ConcurrentMap<String, byte[]> concurrentMap = (db != null) ? db.hashMap(viewName, Serializer.STRING, Serializer.BYTE_ARRAY) .createOrOpen() : null; String key = DigestUtils.sha1Hex(str + views); if (!overwrite && concurrentMap != null && concurrentMap.containsKey(key)) {
ConcurrentMap<String, byte[]> concurrentMap = (db != null) ? db.hashMap(viewName, Serializer.STRING, Serializer.BYTE_ARRAY) .createOrOpen() : null; String key = DigestUtils.sha1Hex(str + views); if (!overwrite && concurrentMap != null && concurrentMap.containsKey(key)) {
.keySerializer(new IdSerializer()) .valueSerializer(Serializer.JAVA) .createOrOpen(); .keySerializer(new IdSerializer()) .valueSerializer(Serializer.JAVA) .createOrOpen(); .keySerializer(new FeatureKeySerializer()) .valueSerializer(Serializer.JAVA) .createOrOpen(); .keySerializer(new MultivaluedFeatureKeySerializer()) .valueSerializer(Serializer.JAVA) .createOrOpen();
.expireMaxSize(10000) .expireAfterGet(5, TimeUnit.DAYS) .createOrOpen();
storageMetaData = db.hashMap("storageMetaData").keySerializer(new SessionTypeIdSerializer()) .valueSerializer(new PersistableSerializer<StorageMetaData>()).createOrOpen(); staticInfo = db.hashMap("staticInfo").keySerializer(new SessionTypeWorkerIdSerializer()) .valueSerializer(new PersistableSerializer<>()).createOrOpen(); classToInteger = db.hashMap("classToInteger").keySerializer(Serializer.STRING) .valueSerializer(Serializer.INTEGER).createOrOpen(); integerToClass = db.hashMap("integerToClass").keySerializer(Serializer.INTEGER) .valueSerializer(Serializer.STRING).createOrOpen(); Map<Long, Persistable> m = db.hashMap(s).keySerializer(Serializer.LONG) .valueSerializer(new PersistableSerializer<>()).open(); String[] arr = s.split(COMPOSITE_KEY_SEPARATOR); arr[0] = arr[0].substring(COMPOSITE_KEY_HEADER.length()); //Remove header...
@Override protected Map<Long, Persistable> getUpdateMap(String sessionID, String typeID, String workerID, boolean createIfRequired) { SessionTypeWorkerId id = new SessionTypeWorkerId(sessionID, typeID, workerID); if (updates.containsKey(id)) { return updates.get(id); } if (!createIfRequired) { return null; } String compositeKey = COMPOSITE_KEY_HEADER + sessionID + COMPOSITE_KEY_SEPARATOR + typeID + COMPOSITE_KEY_SEPARATOR + workerID; Map<Long, Persistable> updateMap; updateMapLock.lock(); try { //Try again, in case another thread created it before lock was acquired in this thread if (updates.containsKey(id)) { return updates.get(id); } updateMap = db.hashMap(compositeKey).keySerializer(Serializer.LONG) .valueSerializer(new PersistableSerializer<>()).createOrOpen(); updates.put(id, updateMap); } finally { updateMapLock.unlock(); } return updateMap; }
/** * Constructs a new {@code AbstractMapDbBackend} wrapping the provided {@code database}. * * @param database the {@link org.mapdb.DB} used to create and manage {@link org.mapdb.HTreeMap}s * * @see MapDbBackendFactory */ @SuppressWarnings("unchecked") protected AbstractMapDbBackend(DB database) { checkNotNull(database, "database"); this.database = database; this.containers = database.hashMap("containers") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forId())) .valueSerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forSingleFeature())) .createOrOpen(); this.instances = database.hashMap("instances") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forId())) .valueSerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forClass())) .createOrOpen(); this.singleFeatures = database.hashMap("features/single") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forSingleFeature())) .valueSerializer(Serializer.ELSA) .createOrOpen(); }
/** * Copies all the contents of this this back-end to the target one. * * @param target the {@code MapDbPersistenceBackend} to copy the database contents to * * @throws UnsupportedOperationException if the current {@link DB} contains {@link Collection}s which are not {@link * Map}s */ @SuppressWarnings({"unchecked", "rawtypes"}) // Unchecked cast: 'Map' to 'Map<...>' public void copyTo(MapDbPersistenceBackend target) { for (Map.Entry<String, Object> entry : db.getAll().entrySet()) { Object collection = entry.getValue(); if (collection instanceof Map) { Map fromMap = (Map) collection; Map toMap = target.db.hashMap(entry.getKey()).createOrOpen(); toMap.putAll(fromMap); } else { throw new UnsupportedOperationException("Cannot copy MapDB backend: store type " + collection.getClass().getSimpleName() + " is not supported"); } } } }
public GitRepository(@NotNull LocalContext context, @NotNull Repository repository, @NotNull GitPusher pusher, @NotNull String branch, boolean renameDetection, @NotNull LockManagerFactory lockManagerFactory) throws IOException { this.context = context; final SharedContext shared = context.getShared(); shared.getOrCreate(GitSubmodules.class, GitSubmodules::new).register(repository); this.repository = repository; this.binaryCache = shared.getCacheDB().hashMap("cache.binary", Serializer.STRING, Serializer.BOOLEAN).createOrOpen(); this.revisionCache = context.getShared().getCacheDB().hashMap(String.format("cache-revision.%s.%s.v%s", context.getName(), renameDetection ? 1 : 0, revisionCacheVersion), objectIdSerializer, cacheRevisionSerializer).createOrOpen(); this.pusher = pusher; this.renameDetection = renameDetection; this.lockManagerFactory = lockManagerFactory; this.gitFilters = GitFilterHelper.createFilters(context); final Ref svnBranchRef = LayoutHelper.initRepository(repository, branch); this.svnBranch = svnBranchRef.getName(); this.gitBranch = Constants.R_HEADS + branch; final String repositoryId = loadRepositoryId(repository, svnBranchRef); this.uuid = UUID.nameUUIDFromBytes((repositoryId + "\0" + gitBranch).getBytes(StandardCharsets.UTF_8)).toString(); log.info("[{}]: registered branch: {}", context.getName(), gitBranch); }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
/** * Create the storage map * * @return */ @Override public Map<Integer, Long> createUpdatedMap() { if (storageFile == null) { //In-Memory Stats Storage db = DBMaker.memoryDB().make(); } else { db = DBMaker.fileDB(storageFile).closeOnJvmShutdown().transactionEnable() //Default to Write Ahead Log - lower performance, but has crash protection .make(); } updated = db.hashMap("updated").keySerializer(Serializer.INTEGER).valueSerializer(Serializer.LONG) .createOrOpen(); return updated; }
public GitConverter(@NotNull DB cache, @NotNull Path basePath, @NotNull String[] globs) throws IOException, InvalidPatternException { this.basePath = basePath; this.cache = cache; this.globs = globs.clone(); this.matchers = convertGlobs(globs); Arrays.sort(globs); for (String glob : globs) { new FileNameMatcher(glob, '/'); } tempPath = basePath.resolve("lfs/tmp"); Files.createDirectories(tempPath); //noinspection unchecked cacheMeta = cache.<String, MetaData>hashMap("meta") .keySerializer(Serializer.STRING) .valueSerializer(new SerializerJava()) .createOrOpen(); }
/** * Constructs a new {@code MapDbBackendIndices} wrapping the provided {@code database}. * * @param database the {@link org.mapdb.DB} used to creates the used {@link org.mapdb.HTreeMap}s and manage the * database * * @see MapDbBackendFactory */ @SuppressWarnings("unchecked") protected MapDbBackendIndices(DB database) { super(database); manyFeatures = database.hashMap("features/many") .keySerializer(new SerializerDecorator<>(SERIALIZER_FACTORY.forManyFeature())) .valueSerializer(Serializer.ELSA) .createOrOpen(); }
protected Storage(){ /* create or open-connection the databases optionally with with crash corruption resistance "transactionEnable" and Mmap if on 64 bit system: */ accountsDB = getDB(pathAccountsDB,false,true); blocksDB = getDB(pathBlocksDB,false,true); transactionsDB = getDB(pathTransactionsDB,false,true); contractCodeDB = getDB(pathContractCodeDB,false,true); contractStatesDB = getDB(pathContractStateDB,false,true); /* create or open the the keyvalue stores: */ accountsMap = accountsDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); blocksMap = blocksDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); transactionsMap = transactionsDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); contractCodeMap = contractCodeDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); contractStatesMap = contractStatesDB.hashMap("map").keySerializer(Serializer.BYTE_ARRAY).valueSerializer(Serializer.BYTE_ARRAY).createOrOpen(); //testing }
private ConcurrentMap<Integer, byte[]> getMap(String dataset) { return db.hashMap(dataset, Serializer.INTEGER, Serializer.BYTE_ARRAY).createOrOpen(); }
@NotNull public static HTreeMap<String, String> getCacheMd5(@NotNull GitFilter filter, @NotNull DB cacheDb) { return cacheDb.hashMap("cache.filter." + filter.getName() + ".md5", Serializer.STRING, Serializer.STRING).createOrOpen(); }