@Override public long removeByPattern(String regex) throws IOException { Statement statement = Delete.deleteFromCurrentBucket().where(regexpContains("meta().id", regex)); final N1qlQueryResult result = bucket.query(N1qlQuery.simple(statement)); if (logger.isDebugEnabled()) { logger.debug("Deleted documents using regex {}, result={}", regex, result); } return result.info().mutationCount(); } }
String scanSpecQuery = "SELECT " + joinFields(fields) + " FROM `" + bucketName + "` WHERE meta().id >= '$1' LIMIT $2"; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( scanSpecQuery, JsonArray.from(formatId(table, startkey), recordcount),
@Test public void shouldExecuteN1ql() { getBucket().query(N1qlQuery.simple("INSERT INTO " + TEST_BUCKET + " (KEY, VALUE) VALUES ('" + ID + "', " + DOCUMENT + ")")); N1qlQueryResult query = getBucket().query(N1qlQuery.simple("SELECT * FROM " + TEST_BUCKET + " USE KEYS '" + ID + "'")); Assert.assertTrue(query.parseSuccess()); Assert.assertTrue(query.finalSuccess()); List<N1qlQueryRow> n1qlQueryRows = query.allRows(); Assert.assertEquals(1, n1qlQueryRows.size()); Assert.assertEquals(DOCUMENT, n1qlQueryRows.get(0).value().get(TEST_BUCKET).toString()); }
@After public void clear() { if (getCouchbaseContainer().isIndex() && getCouchbaseContainer().isQuery() && getCouchbaseContainer().isPrimaryIndex()) { getBucket().query( N1qlQuery.simple(String.format("DELETE FROM `%s`", getBucket().name()), N1qlParams.build().consistency(ScanConsistency.STATEMENT_PLUS))); } else { getBucket().bucketManager().flush(); } }
throws Exception { String readQuery = "SELECT " + joinFields(fields) + " FROM `" + bucketName + "` USE KEYS [$1]"; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( readQuery, JsonArray.from(docId),
/** * Performs the {@link #delete(String, String)} (String, String)} operation via N1QL ("DELETE"). * * If this option should be used, the "-p couchbase.kv=false" property must be set. * * @param docId the document ID. * @return The result of the operation. */ private Status deleteN1ql(final String docId) throws Exception { String deleteQuery = "DELETE FROM `" + bucketName + "` USE KEYS [$1]"; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( deleteQuery, JsonArray.from(docId), N1qlParams.build().adhoc(adhoc).maxParallelism(maxParallelism) )); if (!queryResult.parseSuccess() || !queryResult.finalSuccess()) { throw new DBException("Error while parsing N1QL Result. Query: " + deleteQuery + ", Errors: " + queryResult.errors()); } return Status.OK; }
/** * Performs the {@link #upsert(String, String, Map)} operation via N1QL ("UPSERT"). * * If this option should be used, the "-p couchbase.upsert=true -p couchbase.kv=false" properties must be set. * * @param docId the document ID * @param values the values to update the document with. * @return The result of the operation. */ private Status upsertN1ql(final String docId, final Map<String, ByteIterator> values) throws Exception { String upsertQuery = "UPSERT INTO `" + bucketName + "`(KEY,VALUE) VALUES ($1,$2)"; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( upsertQuery, JsonArray.from(docId, valuesToJsonObject(values)), N1qlParams.build().adhoc(adhoc).maxParallelism(maxParallelism) )); if (!queryResult.parseSuccess() || !queryResult.finalSuccess()) { throw new DBException("Error while parsing N1QL Result. Query: " + upsertQuery + ", Errors: " + queryResult.errors()); } return Status.OK; }
/** * Performs the {@link #insert(String, String, Map)} operation via N1QL ("INSERT"). * * If this option should be used, the "-p couchbase.kv=false" property must be set. * * @param docId the document ID * @param values the values to update the document with. * @return The result of the operation. */ private Status insertN1ql(final String docId, final Map<String, ByteIterator> values) throws Exception { String insertQuery = "INSERT INTO `" + bucketName + "`(KEY,VALUE) VALUES ($1,$2)"; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( insertQuery, JsonArray.from(docId, valuesToJsonObject(values)), N1qlParams.build().adhoc(adhoc).maxParallelism(maxParallelism) )); if (!queryResult.parseSuccess() || !queryResult.finalSuccess()) { throw new DBException("Error while parsing N1QL Result. Query: " + insertQuery + ", Errors: " + queryResult.errors()); } return Status.OK; }
/** * Performs the {@link #update(String, String, Map)} operation via N1QL ("UPDATE"). * * If this option should be used, the "-p couchbase.kv=false" property must be set. * * @param docId the document ID * @param values the values to update the document with. * @return The result of the operation. */ private Status updateN1ql(final String docId, final Map<String, ByteIterator> values) throws Exception { String fields = encodeN1qlFields(values); String updateQuery = "UPDATE `" + bucketName + "` USE KEYS [$1] SET " + fields; N1qlQueryResult queryResult = bucket.query(N1qlQuery.parameterized( updateQuery, JsonArray.from(docId), N1qlParams.build().adhoc(adhoc).maxParallelism(maxParallelism) )); if (!queryResult.parseSuccess() || !queryResult.finalSuccess()) { throw new DBException("Error while parsing N1QL Result. Query: " + updateQuery + ", Errors: " + queryResult.errors()); } return Status.OK; }
public void createBucket(BucketSettings bucketSetting, UserSettings userSettings, boolean primaryIndex) { ClusterManager clusterManager = getCouchbaseCluster().clusterManager(clusterUsername, clusterPassword); // Insert Bucket BucketSettings bucketSettings = clusterManager.insertBucket(bucketSetting); try { // Insert Bucket user clusterManager.upsertUser(AuthDomain.LOCAL, bucketSetting.name(), userSettings); } catch (Exception e) { logger().warn("Unable to insert user '" + bucketSetting.name() + "', maybe you are using older version"); } if (index) { Bucket bucket = getCouchbaseCluster().openBucket(bucketSettings.name(), bucketSettings.password()); new CouchbaseQueryServiceWaitStrategy(bucket).waitUntilReady(this); if (primaryIndex) { bucket.query(Index.createPrimaryIndex().on(bucketSetting.name())); } } }
private List<JsonObject> executeQuery(N1qlQuery query) { if(this.bucket.isClosed()){ throw new RuntimeException("bucket has been closed before performing the query"); } N1qlQueryResult queryResult = this.bucket.query(query); List<JsonObject> result = null; if (queryResult != null && queryResult.info().errorCount() == 0 && queryResult.info().resultCount() > 0) { result = new ArrayList<JsonObject>(); for (N1qlQueryRow queryRow : queryResult) { result.add(queryRow.value()); } } return result; } }
/** * Execute native query. * * @param n1qlQuery * the n1ql query * @param em * the entity manager * @return the list */ public List executeNativeQuery(String n1qlQuery, EntityMetadata em) { N1qlQueryResult result = bucket .query(N1qlQuery.simple(n1qlQuery, N1qlParams.build().consistency(ScanConsistency.REQUEST_PLUS))); LOGGER.debug("Executed query : " + n1qlQuery + " on the " + bucket.name() + " Bucket"); validateQueryResults(n1qlQuery, result); return result.allRows(); }
/** * Execute query. * * @param stmt * the statement * @param em * the entity manager * @return the list */ public List executeQuery(Statement stmt, EntityMetadata em) { N1qlQuery query = N1qlQuery.simple(stmt, N1qlParams.build().consistency(ScanConsistency.REQUEST_PLUS)); N1qlQueryResult list = bucket.query(query); LOGGER.debug("Executed query : " + query.toString() + " on the " + bucket.name() + " Bucket"); validateQueryResults(stmt.toString(), list); List records = new ArrayList<>(); for (N1qlQueryRow row : list) { MetamodelImpl metaModel = (MetamodelImpl) kunderaMetadata.getApplicationMetadata() .getMetamodel(em.getPersistenceUnit()); EntityType entityType = metaModel.entity(em.getEntityClazz()); JsonObject jsonObj = row.value().containsKey(em.getSchema()) ? row.value().getObject(em.getSchema()) : row.value(); records.add(handler.getEntityFromDocument(em.getEntityClazz(), jsonObj, entityType)); } return records; }
@Override public ViewResult doInBucket() { return client.query(query); } });
public static boolean fillDB(CouchbaseCluster cluster) { Bucket couchbaseBucket = cluster.openBucket(BUCKET_NAME); couchbaseBucket.insert(JsonDocument.create("artist:vincent_van_gogh", VINCENT_VAN_GOGH)); N1qlQueryResult queryResult = couchbaseBucket.query(N1qlQuery.simple(String.format(QUERY, BUCKET_NAME), N1qlParams.build().consistency(ScanConsistency.REQUEST_PLUS))); couchbaseBucket.close(); return queryResult.info().resultCount() == 1; }
@Override public List<DocumentEntity> n1qlQuery(Statement n1qlQuery, JsonObject params) throws NullPointerException { requireNonNull(n1qlQuery, "n1qlQuery is required"); requireNonNull(params, "params is required"); N1qlQueryResult result = bucket.query(N1qlQuery.parameterized(n1qlQuery, params)); return convert(result, database); }
private static List<String> getAllDocumentIds(final DesignDocument designDocument, final Bucket bucket) { final ViewQuery query = ViewQuery.from(designDocument.name(), designDocument.views().get(0).name()); query.stale(Stale.FALSE); final ViewResult result = bucket.query(query); return StreamSupport.stream(result.spliterator(), true).map(r -> r.id()).collect(Collectors.toList()); }
public void queryView(Bucket bucket, String design, String view) { ViewQuery queryAllFeatures = ViewQuery.from(design, view); ViewResult queryResult = bucket.query(queryAllFeatures); List<ViewRow> rows = queryResult.allRows(); for (ViewRow viewRow : rows) { System.out.println("Result view:" + viewRow.id() + ":" + viewRow.value()); } }
/** {@inheritDoc} */ @Override public Map<String, Property<?>> readAllProperties() { N1qlQuery queryFeatures = N1qlQuery.simple("SELECT * FROM " + couchBaseConnection.getFf4jPropertyBucketName()); N1qlQueryResult queryResult = getPropertyBucket().query(queryFeatures); Map<String, Property<?>> allProperties = new HashMap<>(); for (N1qlQueryRow row : queryResult.allRows()) { Property<?> p = PropertyJsonParser.parseProperty(row.value().get(couchBaseConnection.getFf4jPropertyBucketName()).toString()); allProperties.put(p.getName(), p); } return allProperties; }
private void createPrimaryIndex(Bucket bucket, CouchbaseBuildDefinition buildDefinition) { LOG.debug("Creating primary index in bucket '{}'", buildDefinition.getBucketName()); N1qlQueryResult result = bucket.query(N1qlQuery.simple( Index.createPrimaryIndex().on(bucket.name()) )); if (!result.finalSuccess()) { LOG.error("Failed to create primary index: {}", result.errors()); throw new EnvironmentBuilderException("Failed to create primary index for " + buildDefinition.getBucketName()); } LOG.debug("Primary index created in bucket '{}'", buildDefinition.getBucketName()); }