@Override public void addAll(List<V> values) { Preconditions.checkNotNull(values, "List of values to add cannot be null."); if (!values.isEmpty()) { try { backend.db.merge( columnFamily, writeOptions, serializeCurrentKeyWithGroupAndNamespace(), serializeValueList(values, elementSerializer, DELIMITER)); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while updating data to RocksDB", e); } } }
@Override public void add(V value) { Preconditions.checkNotNull(value, "You cannot add null to a ListState."); try { backend.db.merge( columnFamily, writeOptions, serializeCurrentKeyWithGroupAndNamespace(), serializeValue(value, elementSerializer) ); } catch (Exception e) { throw new FlinkRuntimeException("Error while adding data to RocksDB", e); } }
@Override public void mergeNamespaces(N target, Collection<N> sources) { if (sources == null || sources.isEmpty()) { return; } try { // create the target full-binary-key setCurrentNamespace(target); final byte[] targetKey = serializeCurrentKeyWithGroupAndNamespace(); // merge the sources to the target for (N source : sources) { if (source != null) { setCurrentNamespace(source); final byte[] sourceKey = serializeCurrentKeyWithGroupAndNamespace(); byte[] valueBytes = backend.db.get(columnFamily, sourceKey); backend.db.delete(columnFamily, writeOptions, sourceKey); if (valueBytes != null) { backend.db.merge(columnFamily, writeOptions, targetKey, valueBytes); } } } } catch (Exception e) { throw new FlinkRuntimeException("Error while merging state in RocksDB", e); } }
/** * Merge a record to an existing key to a table and commit immediately */ @Override public void increase(String table, byte[] key, byte[] value) { try { rocksdb().merge(cf(table), key, value); } catch (RocksDBException e) { throw new BackendException(e); } }
/** * Add merge operand for key/value pair. * * @param key the specified key to be merged. * @param value the value to be merged with the current value for * the specified key. * * @throws RocksDBException thrown if error happens in underlying * native library. */ public void merge(final byte[] key, final byte[] value) throws RocksDBException { merge(nativeHandle_, key, 0, key.length, value, 0, value.length); }
/** * Add merge operand for key/value pair in a ColumnFamily. * * @param columnFamilyHandle {@link ColumnFamilyHandle} instance * @param key the specified key to be merged. * @param value the value to be merged with the current value for * the specified key. * * @throws RocksDBException thrown if error happens in underlying * native library. */ public void merge(final ColumnFamilyHandle columnFamilyHandle, final byte[] key, final byte[] value) throws RocksDBException { merge(nativeHandle_, key, 0, key.length, value, 0, value.length, columnFamilyHandle.nativeHandle_); }
/** * Add merge operand for key/value pair. * * @param writeOpts {@link WriteOptions} for this write. * @param key the specified key to be merged. * @param value the value to be merged with the current value for * the specified key. * * @throws RocksDBException thrown if error happens in underlying * native library. */ public void merge(final WriteOptions writeOpts, final byte[] key, final byte[] value) throws RocksDBException { merge(nativeHandle_, writeOpts.nativeHandle_, key, 0, key.length, value, 0, value.length); }
/** * Add merge operand for key/value pair. * * @param columnFamilyHandle {@link ColumnFamilyHandle} instance * @param writeOpts {@link WriteOptions} for this write. * @param key the specified key to be merged. * @param value the value to be merged with the current value for * the specified key. * * @throws RocksDBException thrown if error happens in underlying * native library. */ public void merge(final ColumnFamilyHandle columnFamilyHandle, final WriteOptions writeOpts, final byte[] key, final byte[] value) throws RocksDBException { merge(nativeHandle_, writeOpts.nativeHandle_, key, 0, key.length, value, 0, value.length, columnFamilyHandle.nativeHandle_); }
public boolean merge(String key, String val) { if (key.length() == 0) return false; try { db.merge(key.getBytes(), val.getBytes()); return true; } catch (Exception e) { e.printStackTrace(); log.error(e.getMessage()); } return false; }
/** * Merge a record to an existing key to a table and commit immediately */ @Override public void increase(String table, byte[] key, byte[] value) { try { rocksdb().merge(cf(table), key, value); } catch (RocksDBException e) { throw new BackendException(e); } }
@Override public void adjustCounter(byte[] key, long amount) { key.getClass(); Preconditions.checkState(!this.closed, "closed"); this.cursorTracker.poll(); final byte[] value = this.encodeCounter(amount); if (this.writeBatch != null) { assert RocksDBUtil.isInitialized(this.writeBatch); synchronized (this.writeBatch) { this.writeBatch.merge(key, value); } } else { assert RocksDBUtil.isInitialized(this.db); try { this.db.merge(key, value); } catch (RocksDBException e) { throw new RuntimeException("RocksDB error", e); } } }
@Override public void addAll(List<V> values) { Preconditions.checkNotNull(values, "List of values to add cannot be null."); if (!values.isEmpty()) { try { writeCurrentKeyWithGroupAndNamespace(); byte[] key = dataOutputView.getCopyOfBuffer(); byte[] premerge = getPreMergedValue(values, elementSerializer, dataOutputView); backend.db.merge(columnFamily, writeOptions, key, premerge); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while updating data to RocksDB", e); } } }
@Override public void addAll(List<V> values) { Preconditions.checkNotNull(values, "List of values to add cannot be null."); if (!values.isEmpty()) { try { writeCurrentKeyWithGroupAndNamespace(); byte[] key = dataOutputView.getCopyOfBuffer(); byte[] premerge = getPreMergedValue(values, elementSerializer, dataOutputView); backend.db.merge(columnFamily, writeOptions, key, premerge); } catch (IOException | RocksDBException e) { throw new FlinkRuntimeException("Error while updating data to RocksDB", e); } } }
@Override public void add(V value) throws IOException { try { writeCurrentKeyWithGroupAndNamespace(); byte[] key = keySerializationStream.toByteArray(); keySerializationStream.reset(); DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(keySerializationStream); valueSerializer.serialize(value, out); backend.db.merge(columnFamily, writeOptions, key, keySerializationStream.toByteArray()); } catch (Exception e) { throw new RuntimeException("Error while adding data to RocksDB", e); } }
backend.db.merge(columnFamily, writeOptions, targetKey, valueBytes);
bk.putLong(10); for (int i = 0; i < 10000; i++) { db.merge(k, bk.array());
@Override public void add(V value) { Preconditions.checkNotNull(value, "You cannot add null to a ListState."); try { writeCurrentKeyWithGroupAndNamespace(); byte[] key = dataOutputView.getCopyOfBuffer(); dataOutputView.clear(); elementSerializer.serialize(value, dataOutputView); backend.db.merge(columnFamily, writeOptions, key, dataOutputView.getCopyOfBuffer()); } catch (Exception e) { throw new FlinkRuntimeException("Error while adding data to RocksDB", e); } }
@Override public void add(V value) { Preconditions.checkNotNull(value, "You cannot add null to a ListState."); try { writeCurrentKeyWithGroupAndNamespace(); byte[] key = dataOutputView.getCopyOfBuffer(); dataOutputView.clear(); elementSerializer.serialize(value, dataOutputView); backend.db.merge(columnFamily, writeOptions, key, dataOutputView.getCopyOfBuffer()); } catch (Exception e) { throw new FlinkRuntimeException("Error while adding data to RocksDB", e); } }
@Override public void mergeNamespaces(N target, Collection<N> sources) { if (sources == null || sources.isEmpty()) { return; } // cache key and namespace final K key = backend.getCurrentKey(); final int keyGroup = backend.getCurrentKeyGroupIndex(); try { // create the target full-binary-key writeKeyWithGroupAndNamespace(keyGroup, key, target, dataOutputView); final byte[] targetKey = dataOutputView.getCopyOfBuffer(); // merge the sources to the target for (N source : sources) { if (source != null) { writeKeyWithGroupAndNamespace(keyGroup, key, source, dataOutputView); byte[] sourceKey = dataOutputView.getCopyOfBuffer(); byte[] valueBytes = backend.db.get(columnFamily, sourceKey); backend.db.delete(columnFamily, writeOptions, sourceKey); if (valueBytes != null) { backend.db.merge(columnFamily, writeOptions, targetKey, valueBytes); } } } } catch (Exception e) { throw new FlinkRuntimeException("Error while merging state in RocksDB", e); } }
@Override public void mergeNamespaces(N target, Collection<N> sources) { if (sources == null || sources.isEmpty()) { return; } // cache key and namespace final K key = backend.getCurrentKey(); final int keyGroup = backend.getCurrentKeyGroupIndex(); try { // create the target full-binary-key writeKeyWithGroupAndNamespace(keyGroup, key, target, dataOutputView); final byte[] targetKey = dataOutputView.getCopyOfBuffer(); // merge the sources to the target for (N source : sources) { if (source != null) { writeKeyWithGroupAndNamespace(keyGroup, key, source, dataOutputView); byte[] sourceKey = dataOutputView.getCopyOfBuffer(); byte[] valueBytes = backend.db.get(columnFamily, sourceKey); backend.db.delete(columnFamily, writeOptions, sourceKey); if (valueBytes != null) { backend.db.merge(columnFamily, writeOptions, targetKey, valueBytes); } } } } catch (Exception e) { throw new FlinkRuntimeException("Error while merging state in RocksDB", e); } }