@Override public boolean contains(UK userKey) throws IOException, RocksDBException { byte[] rawKeyBytes = serializeCurrentKeyWithGroupAndNamespacePlusUserKey(userKey, userKeySerializer); byte[] rawValueBytes = backend.db.get(columnFamily, rawKeyBytes); return (rawValueBytes != null); }
@Override public Optional<Long> getChildId(Long inodeId, String name) { byte[] id; try { id = mDb.get(mEdgesColumn, RocksUtils.toByteArray(inodeId, name)); } catch (RocksDBException e) { throw new RuntimeException(e); } if (id == null) { return Optional.empty(); } return Optional.of(Longs.fromByteArray(id)); }
@Override public UV get(UK userKey) throws IOException, RocksDBException { byte[] rawKeyBytes = serializeCurrentKeyWithGroupAndNamespacePlusUserKey(userKey, userKeySerializer); byte[] rawValueBytes = backend.db.get(columnFamily, rawKeyBytes); return (rawValueBytes == null ? null : deserializeUserValue(dataInputView, rawValueBytes, userValueSerializer)); }
@Override public Optional<BlockMeta> getBlock(long id) { byte[] meta; try { meta = mDb.get(mBlockMetaColumn, Longs.toByteArray(id)); } catch (RocksDBException e) { throw new RuntimeException(e); } if (meta == null) { return Optional.empty(); } try { return Optional.of(BlockMeta.parseFrom(meta)); } catch (Exception e) { throw new RuntimeException(e); } }
@Override public V get(K key) throws IOException { try { V ret = null; if (key != null) { byte[] rawKey = serializer.serialize(key); byte[] rawData = rocksDb.get(columnFamily, rawKey); ret = rawData != null ? (V) serializer.deserialize(rawData) : null; } return ret; } catch (RocksDBException e) { throw new IOException(String.format("Failed to get value by key-%s", key), e); } }
@Override public Object get(String key) { try { byte[] data = db.get(key.getBytes()); if (data != null) { try { return deserialize(data); } catch (Exception e) { LOG.error("Failed to deserialize obj of " + key, e); db.remove(key.getBytes()); return null; } } } catch (Exception ignored) { } return null; }
@Override public byte[] get(byte[] key) { resetDbLock.readLock().lock(); try { if (logger.isTraceEnabled()) logger.trace("~> RocksDbDataSource.get(): " + name + ", key: " + toHexString(key)); byte[] ret = db.get(readOpts, key); if (logger.isTraceEnabled()) logger.trace("<~ RocksDbDataSource.get(): " + name + ", key: " + toHexString(key) + ", " + (ret == null ? "null" : ret.length)); return ret; } catch (RocksDBException e) { logger.error("Failed to get from db '{}'", name, e); hintOnTooManyOpenFiles(e); throw new RuntimeException(e); } finally { resetDbLock.readLock().unlock(); } }
@Override public Optional<MutableInode<?>> getMutable(long id) { byte[] inode; try { inode = mDb.get(mInodesColumn, Longs.toByteArray(id)); } catch (RocksDBException e) { throw new RuntimeException(e); } if (inode == null) { return Optional.empty(); } try { return Optional.of(MutableInode.fromProto(InodeMeta.Inode.parseFrom(inode))); } catch (Exception e) { throw new RuntimeException(e); } }
SV getInternal(byte[] key) { try { byte[] valueBytes = backend.db.get(columnFamily, key); if (valueBytes == null) { return null; } dataInputView.setBuffer(valueBytes); return valueSerializer.deserialize(dataInputView); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while retrieving data from RocksDB", e); } }
@Override public List<V> getInternal() { try { byte[] key = serializeCurrentKeyWithGroupAndNamespace(); byte[] valueBytes = backend.db.get(columnFamily, key); return deserializeList(valueBytes); } catch (RocksDBException e) { throw new FlinkRuntimeException("Error while retrieving data from RocksDB", e); } }
@Override public String[] getRow(Array<String> key) { byte[] encodeKey = rowEncoder.encodeStringsWithLenPfx(key.data, false); try { byte[] value = rocksDB.get(encodeKey); if (value == null) { return null; } return rowEncoder.decode(new KV(encodeKey, value)); } catch (RocksDBException e) { throw new IllegalStateException("error when get key from rocksdb", e); } }
boolean populateFromKey(RocksDbKey key, Metric metric) throws MetricException { try { byte[] value = db.get(key.getRaw()); if (value == null) { return false; } RocksDbValue rdbValue = new RocksDbValue(value); rdbValue.populateMetric(metric); } catch (Exception e) { String message = "Failed to populate metric"; LOG.error(message, e); if (this.failureMeter != null) { this.failureMeter.mark(); } throw new MetricException(message, e); } return true; }
@Override public Status read(final String table, final String key, final Set<String> fields, final Map<String, ByteIterator> result) { try { if (!COLUMN_FAMILIES.containsKey(table)) { createColumnFamily(table); } final ColumnFamilyHandle cf = COLUMN_FAMILIES.get(table).getHandle(); final byte[] values = rocksDb.get(cf, key.getBytes(UTF_8)); if(values == null) { return Status.NOT_FOUND; } deserializeValues(values, fields, result); return Status.OK; } catch(final RocksDBException e) { LOGGER.error(e.getMessage(), e); return Status.ERROR; } }
@Override public Status update(final String table, final String key, final Map<String, ByteIterator> values) { //TODO(AR) consider if this would be faster with merge operator try { if (!COLUMN_FAMILIES.containsKey(table)) { createColumnFamily(table); } final ColumnFamilyHandle cf = COLUMN_FAMILIES.get(table).getHandle(); final Map<String, ByteIterator> result = new HashMap<>(); final byte[] currentValues = rocksDb.get(cf, key.getBytes(UTF_8)); if(currentValues == null) { return Status.NOT_FOUND; } deserializeValues(currentValues, null, result); //update result.putAll(values); //store rocksDb.put(cf, key.getBytes(UTF_8), serializeValues(result)); return Status.OK; } catch(final RocksDBException | IOException e) { LOGGER.error(e.getMessage(), e); return Status.ERROR; } }
@Override public V value() { try { byte[] valueBytes = backend.db.get(columnFamily, serializeCurrentKeyWithGroupAndNamespace()); if (valueBytes == null) { return getDefaultValue(); } dataInputView.setBuffer(valueBytes); return valueSerializer.deserialize(dataInputView); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while retrieving data from RocksDB.", e); } }
private String metadataIdToString(KeyType type, int id, Map<Integer, String> lookupCache) throws MetricException { String s = readOnlyStringMetadataCache.getMetadataString(id); if (s != null) { return s; } s = lookupCache.get(id); if (s != null) { return s; } // get from DB and add to lookup cache RocksDbKey key = new RocksDbKey(type, id); try { byte[] value = db.get(key.getRaw()); if (value == null) { throw new MetricException("Failed to find metadata string for id " + id + " of type " + type); } RocksDbValue rdbValue = new RocksDbValue(value); s = rdbValue.getMetdataString(); lookupCache.put(id, s); return s; } catch (RocksDBException e) { if (this.failureMeter != null) { this.failureMeter.mark(); } throw new MetricException("Failed to get from RocksDb", e); } }
@Override public void mergeNamespaces(N target, Collection<N> sources) { if (sources == null || sources.isEmpty()) { return; } try { // create the target full-binary-key setCurrentNamespace(target); final byte[] targetKey = serializeCurrentKeyWithGroupAndNamespace(); // merge the sources to the target for (N source : sources) { if (source != null) { setCurrentNamespace(source); final byte[] sourceKey = serializeCurrentKeyWithGroupAndNamespace(); byte[] valueBytes = backend.db.get(columnFamily, sourceKey); backend.db.delete(columnFamily, writeOptions, sourceKey); if (valueBytes != null) { backend.db.merge(columnFamily, writeOptions, targetKey, valueBytes); } } } } catch (Exception e) { throw new FlinkRuntimeException("Error while merging state in RocksDB", e); } }
@Override public byte[] getSerializedValue( final byte[] serializedKeyAndNamespace, final TypeSerializer<K> safeKeySerializer, final TypeSerializer<N> safeNamespaceSerializer, final TypeSerializer<V> safeValueSerializer) throws Exception { //TODO make KvStateSerializer key-group aware to save this round trip and key-group computation Tuple2<K, N> keyAndNamespace = KvStateSerializer.deserializeKeyAndNamespace( serializedKeyAndNamespace, safeKeySerializer, safeNamespaceSerializer); int keyGroup = KeyGroupRangeAssignment.assignToKeyGroup(keyAndNamespace.f0, backend.getNumberOfKeyGroups()); RocksDBSerializedCompositeKeyBuilder<K> keyBuilder = new RocksDBSerializedCompositeKeyBuilder<>( safeKeySerializer, backend.getKeyGroupPrefixBytes(), 32 ); keyBuilder.setKeyAndKeyGroup(keyAndNamespace.f0, keyGroup); byte[] key = keyBuilder.buildCompositeKeyNamespace(keyAndNamespace.f1, namespaceSerializer); return backend.db.get(columnFamily, key); }