@Override public long removeByPattern(String regex) throws IOException { Statement statement = Delete.deleteFromCurrentBucket().where(regexpContains("meta().id", regex)); final N1qlQueryResult result = bucket.query(N1qlQuery.simple(statement)); if (logger.isDebugEnabled()) { logger.debug("Deleted documents using regex {}, result={}", regex, result); } return result.info().mutationCount(); } }
@Override public WriteResponse write(D record) throws IOException { try { D doc = _bucket.upsert(record); return new GenericWriteResponse(doc); } catch (Exception e) { throw new IOException("Failed to write to Couchbase cluster", e); } }
/** * Performs the {@link #read(String, String, Set, Map)} operation via Key/Value ("get"). * * @param docId the document ID * @param fields the fields to be loaded * @param result the result map where the doc needs to be converted into * @return The result of the operation. */ private Status readKv(final String docId, final Set<String> fields, final Map<String, ByteIterator> result) throws Exception { RawJsonDocument loaded = bucket.get(docId, RawJsonDocument.class); if (loaded == null) { return Status.NOT_FOUND; } decode(loaded.content(), fields, result); return Status.OK; }
@Override public <K, V> boolean replace(AtomicCacheEntry<K, V, Long> entry, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException { final Long revision = entry.getRevision().orElse(0L); final String docId = toDocumentId(entry.getKey(), keySerializer); final Document doc = toDocument(docId, entry.getValue(), valueSerializer, revision); try { if (revision < 0) { // If the document does not exist yet, try to create one. try { bucket.insert(doc); return true; } catch (DocumentAlreadyExistsException e) { return false; } } bucket.replace(doc); return true; } catch (DocumentDoesNotExistException|CASMismatchException e) { return false; } }
@After public void clear() { if (getCouchbaseContainer().isIndex() && getCouchbaseContainer().isQuery() && getCouchbaseContainer().isPrimaryIndex()) { getBucket().query( N1qlQuery.simple(String.format("DELETE FROM `%s`", getBucket().name()), N1qlParams.build().consistency(ScanConsistency.STATEMENT_PLUS))); } else { getBucket().bucketManager().flush(); } }
@Test public void shouldCreateView() { View view = DefaultView.create(VIEW_NAME, VIEW_FUNCTION); DesignDocument document = DesignDocument.create(VIEW_NAME, Lists.newArrayList(view)); getBucket().bucketManager().insertDesignDocument(document); DesignDocument result = getBucket().bucketManager().getDesignDocument(VIEW_NAME); Assert.assertEquals(1, result.views().size()); View resultView = result.views().get(0); Assert.assertEquals(VIEW_NAME, resultView.name()); } }
@Override public void disconnect() { if (bucket != null) { bucket.close(); } }
/** * Execute native query. * * @param n1qlQuery * the n1ql query * @param em * the entity manager * @return the list */ public List executeNativeQuery(String n1qlQuery, EntityMetadata em) { N1qlQueryResult result = bucket .query(N1qlQuery.simple(n1qlQuery, N1qlParams.build().consistency(ScanConsistency.REQUEST_PLUS))); LOGGER.debug("Executed query : " + n1qlQuery + " on the " + bucket.name() + " Bucket"); validateQueryResults(n1qlQuery, result); return result.allRows(); }
public static boolean fillDB(CouchbaseCluster cluster) { Bucket couchbaseBucket = cluster.openBucket(BUCKET_NAME); couchbaseBucket.insert(JsonDocument.create("artist:vincent_van_gogh", VINCENT_VAN_GOGH)); N1qlQueryResult queryResult = couchbaseBucket.query(N1qlQuery.simple(String.format(QUERY, BUCKET_NAME), N1qlParams.build().consistency(ScanConsistency.REQUEST_PLUS))); couchbaseBucket.close(); return queryResult.info().resultCount() == 1; }
@Test @SuppressWarnings("unchecked") public void testUpsertViaCall() { testCall(graphDB, "CALL apoc.couchbase.upsert({host}, {bucket}, 'testUpsertViaCall', {data})", map("host", HOST, "bucket", BUCKET_NAME, "data", VINCENT_VAN_GOGH.toString()), r -> { assertTrue(r.get("content") instanceof Map); Map<String, Object> content = (Map<String, Object>) r.get("content"); assertTrue(content.get("notableWorks") instanceof List); List<String> notableWorks = (List<String>) content.get("notableWorks"); checkDocumentContent( (String) content.get("firstName"), (String) content.get("secondName"), (String) content.get("lastName"), notableWorks); couchbaseBucket.remove("testUpsertViaCall"); assertFalse(couchbaseBucket.exists("testUpsertViaCall")); }); }
@Override protected void onPersist(EntityMetadata entityMetadata, Object entity, Object id, List<RelationHolder> rlHolders) { JsonDocument doc = handler.getDocumentFromEntity(entityMetadata, entity, kunderaMetadata); if (!isUpdate) { bucket.insert(doc); LOGGER.debug("Inserted document with ID : " + doc.id() + " in the " + bucket.name() + " Bucket"); } else { bucket.upsert(doc); LOGGER.debug("Updated document with ID : " + doc.id() + " in the " + bucket.name() + " Bucket"); } }
RawJsonDocument document = bucket.get(docId, RawJsonDocument.class); if (document != null) { outputStreamCallback = out -> { BinaryDocument document = bucket.get(docId, BinaryDocument.class); if (document != null) { outputStreamCallback = out -> { updatedAttrs.put(CouchbaseAttributes.Bucket.key(), bucket.name()); updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId); updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
final ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue()); final Bucket bucket = openBucket(context); doc = bucket.upsert(doc, persistTo, replicateTo); updatedAttrs.put(CouchbaseAttributes.Bucket.key(), bucket.name()); updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId); updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
@Override public <K> boolean remove(K key, Serializer<K> serializer) throws IOException { try { bucket.remove(toDocumentId(key, serializer)); return true; } catch (DocumentDoesNotExistException e) { return false; } }
/** * Close bucket. * * @param bucket * the bucket */ public static void closeBucket(Bucket bucket) { if (bucket != null) { if (!bucket.close()) { LOGGER.error("Not able to close bucket [" + bucket.name() + "]."); throw new KunderaException("Not able to close bucket [" + bucket.name() + "]."); } else { LOGGER.debug("Bucket [" + bucket.name() + "] is closed!"); } } }
@Override public <K, V> boolean putIfAbsent(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException { final String docId = toDocumentId(key, keySerializer); final Document doc = toDocument(docId, value, valueSerializer); try { bucket.insert(doc); return true; } catch (DocumentAlreadyExistsException e) { return false; } }
AsyncBucket async = bucket.async(); bind.apply(AsyncBucket.class, name, async); Repository repo = bucket.repository(); AsyncRepository asyncrepo = repo.async(); buckets.forEach(n -> Try.apply(() -> r.require(n, Bucket.class).close()) .onFailure(x -> log.debug("bucket {} close operation resulted in exception", n, x)) .orElse(false));