@Override public void put(String key, Object value) { byte[] data = serialize(value); try { db.put(key.getBytes(), data); } catch (Exception e) { LOG.error("Failed to put key into cache, " + key, e); } }
void handleEvictedMetadata(RocksDbKey key, RocksDbValue val) { try { store.db.put(key.getRaw(), val.getRaw()); } catch (Exception e) { throw new RuntimeException(e); } }
@Override public void put(K key, V value) throws IOException { try { rocksDb.put(columnFamily, serializer.serialize(key), serializer.serialize(value)); } catch (RocksDBException e) { throw new IOException(String.format("Failed to put data, key=%s, value=%s", key, value), e); } }
void updateInternal(byte[] key, SV valueToStore) { try { // write the new value to RocksDB backend.db.put(columnFamily, writeOptions, key, getValueBytes(valueToStore)); } catch (RocksDBException e) { throw new FlinkRuntimeException("Error while adding value to RocksDB", e); } } }
@Override public void putBlock(long id, BlockMeta meta) { try { // Overwrites the key if it already exists. mDb.put(mBlockMetaColumn, Longs.toByteArray(id), meta.toByteArray()); } catch (RocksDBException e) { throw new RuntimeException(e); } }
@Override public void put(UK userKey, UV userValue) throws IOException, RocksDBException { byte[] rawKeyBytes = serializeCurrentKeyWithGroupAndNamespacePlusUserKey(userKey, userKeySerializer); byte[] rawValueBytes = serializeValueNullSensitive(userValue, userValueSerializer); backend.db.put(columnFamily, writeOptions, rawKeyBytes, rawValueBytes); }
@Override public void addChild(long parentId, String childName, Long childId) { try { mDb.put(mEdgesColumn, mDisableWAL, RocksUtils.toByteArray(parentId, childName), Longs.toByteArray(childId)); } catch (RocksDBException e) { throw new RuntimeException(e); } }
@Override public Status insert(final String table, final String key, final Map<String, ByteIterator> values) { try { if (!COLUMN_FAMILIES.containsKey(table)) { createColumnFamily(table); } final ColumnFamilyHandle cf = COLUMN_FAMILIES.get(table).getHandle(); rocksDb.put(cf, key.getBytes(UTF_8), serializeValues(values)); return Status.OK; } catch(final RocksDBException | IOException e) { LOGGER.error(e.getMessage(), e); return Status.ERROR; } }
@Override public void addLocation(long id, BlockLocation location) { byte[] key = RocksUtils.toByteArray(id, location.getWorkerId()); try { mDb.put(mBlockLocationsColumn, key, location.toByteArray()); } catch (RocksDBException e) { throw new RuntimeException(e); } }
@Override public void put(byte[] key, byte[] val) { resetDbLock.readLock().lock(); try { if (logger.isTraceEnabled()) logger.trace("~> RocksDbDataSource.put(): " + name + ", key: " + toHexString(key) + ", " + (val == null ? "null" : val.length)); if (val != null) { db.put(key, val); } else { db.delete(key); } if (logger.isTraceEnabled()) logger.trace("<~ RocksDbDataSource.put(): " + name + ", key: " + toHexString(key) + ", " + (val == null ? "null" : val.length)); } catch (RocksDBException e) { logger.error("Failed to put into db '{}'", name, e); hintOnTooManyOpenFiles(e); throw new RuntimeException(e); } finally { resetDbLock.readLock().unlock(); } }
@Override public void writeInode(MutableInode<?> inode) { try { mDb.put(mInodesColumn, mDisableWAL, Longs.toByteArray(inode.getId()), inode.toProto().toByteArray()); } catch (RocksDBException e) { throw new RuntimeException(e); } }
@Override public Status update(final String table, final String key, final Map<String, ByteIterator> values) { //TODO(AR) consider if this would be faster with merge operator try { if (!COLUMN_FAMILIES.containsKey(table)) { createColumnFamily(table); } final ColumnFamilyHandle cf = COLUMN_FAMILIES.get(table).getHandle(); final Map<String, ByteIterator> result = new HashMap<>(); final byte[] currentValues = rocksDb.get(cf, key.getBytes(UTF_8)); if(currentValues == null) { return Status.NOT_FOUND; } deserializeValues(currentValues, null, result); //update result.putAll(values); //store rocksDb.put(cf, key.getBytes(UTF_8), serializeValues(result)); return Status.OK; } catch(final RocksDBException | IOException e) { LOGGER.error(e.getMessage(), e); return Status.ERROR; } }
public void build(ILookupTable srcLookupTable) { File dbFolder = new File(dbPath); if (dbFolder.exists()) { logger.info("remove rocksdb folder:{} to rebuild table cache:{}", dbPath, tableDesc.getIdentity()); FileUtils.deleteQuietly(dbFolder); } else { logger.info("create new rocksdb folder:{} for table cache:{}", dbPath, tableDesc.getIdentity()); dbFolder.mkdirs(); } logger.info("start to build lookup table:{} to rocks db:{}", tableDesc.getIdentity(), dbPath); try (RocksDB rocksDB = RocksDB.open(options, dbPath)) { // todo use batch may improve write performance for (String[] row : srcLookupTable) { KV kv = encoder.encode(row); rocksDB.put(kv.getKey(), kv.getValue()); } } catch (RocksDBException e) { logger.error("error when put data to rocksDB", e); throw new RuntimeException("error when write data to rocks db", e); } logger.info("source table:{} has been written to rocks db:{}", tableDesc.getIdentity(), dbPath); } }
@Override public void updateInternal(List<V> values) { Preconditions.checkNotNull(values, "List of values to add cannot be null."); clear(); if (!values.isEmpty()) { try { backend.db.put( columnFamily, writeOptions, serializeCurrentKeyWithGroupAndNamespace(), serializeValueList(values, elementSerializer, DELIMITER)); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while updating data to RocksDB", e); } } }
@Override public void update(V value) { if (value == null) { clear(); return; } try { backend.db.put(columnFamily, writeOptions, serializeCurrentKeyWithGroupAndNamespace(), serializeValue(value)); } catch (Exception e) { throw new FlinkRuntimeException("Error while adding data to RocksDB", e); } }
@Override public void put(String key, Object value) throws FailStoreException { String valueString = JSON.toJSONString(value); WriteOptions writeOpts = new WriteOptions(); try { writeOpts.setSync(true); writeOpts.setDisableWAL(true); db.put(writeOpts, key.getBytes("UTF-8"), valueString.getBytes("UTF-8")); } catch (Exception e) { throw new FailStoreException(e); } finally { writeOpts.dispose(); } }
@Override public void put(String key, Object value) throws FailStoreException { String valueString = JSON.toJSONString(value); WriteOptions writeOpts = new WriteOptions(); try { writeOpts.setSync(true); writeOpts.setDisableWAL(true); db.put(writeOpts, key.getBytes("UTF-8"), valueString.getBytes("UTF-8")); } catch (Exception e) { throw new FailStoreException(e); } finally { writeOpts.dispose(); } }
backend.db.put(columnFamily, writeOptions, targetKey, dataOutputView.getCopyOfBuffer());
backend.db.put(columnFamily, writeOptions, targetKey, dataOutputView.getCopyOfBuffer());