public Map<String, Object> query(final String indexName, final String fieldKey, final String fieldValue) { if (fieldKey == null || fieldValue.length() == 0) return null; // prepare request BoolQueryBuilder query = QueryBuilders.boolQuery(); query.filter(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(fieldKey, fieldValue))); SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(query) .setFrom(0) .setSize(1) .setTerminateAfter(1); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) return null; assert hits.length == 1; Map<String, Object> map = hits[0].getSource(); return map; }
@Override public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) { QueryBuilder q = QueryBuilders.boolQuery() .should(QueryBuilders.termQuery("status", "COMPLETED")) .should(QueryBuilders.termQuery("status", "FAILED")) .mustNot(QueryBuilders.existsQuery("archived")) .minimumShouldMatch(1); SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) .setTypes("workflow") .setQuery(q) .addSort("endTime", SortOrder.ASC) .setSize(archiveSearchBatchSize); SearchResponse response = s.execute().actionGet(); SearchHits hits = response.getHits(); logger.info("Archive search totalHits - {}", hits.getTotalHits()); return Arrays.stream(hits.getHits()) .map(hit -> hit.getId()) .collect(Collectors.toCollection(LinkedList::new)); }
/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); // FIXME: deprecated, "will be removed in 3.0, you should do a regular scroll instead, ordered by `_doc`" @SuppressWarnings("deprecation") SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.SCAN) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
/** * Get a search response for predefined aggregation task on a specific index. * @param index Name of ES index * @param aggr Pre-configured AggregationBuilder object * @return HashMap with parsed aggregations */ private SearchResponse getAggregationResponse(String index, @SuppressWarnings("rawtypes") AggregationBuilder aggr) { return this.elasticsearchClient.prepareSearch(index) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchAllQuery()) .setFrom(0) .setSize(0) .addAggregation(aggr) .execute().actionGet(); }
/** * Get the number of documents in the search index for a given search query * * @param q * the query * @return the count of all documents in the index which matches with the query */ private long count(final QueryBuilder q, final String indexName) { SearchResponse response = elasticsearchClient.prepareSearch(indexName).setQuery(q).setSize(0).execute().actionGet(); return response.getHits().getTotalHits(); }
private Query(final String indexName, QueryBuilder queryBuilder, String order_field, int resultCount) { //TODO: sort data using order_field // prepare request SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(queryBuilder) .setFrom(0) .setSize(resultCount); request.clearRescorers(); // get response SearchResponse response = request.execute().actionGet(); hitCount = (int) response.getHits().getTotalHits(); // evaluate search result SearchHit[] hits = response.getHits().getHits(); this.result = new ArrayList<Map<String, Object>>(hitCount); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSource(); this.result.add(map); } }
@Override public List<String> searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo) { DateTime dateTime = new DateTime(); QueryBuilder q = QueryBuilders.boolQuery() .must(QueryBuilders.rangeQuery("updateTime") .gt(dateTime.minusHours(lastModifiedHoursAgoFrom))) .must(QueryBuilders.rangeQuery("updateTime") .lt(dateTime.minusHours(lastModifiedHoursAgoTo))) .must(QueryBuilders.termQuery("status", "RUNNING")); SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) .setTypes("workflow") .setQuery(q) .setSize(5000) .addSort("updateTime", SortOrder.ASC); SearchResponse response = s.execute().actionGet(); return StreamSupport.stream(response.getHits().spliterator(), false) .map(hit -> hit.getId()) .collect(Collectors.toCollection(LinkedList::new)); }
BoolQueryBuilder suggest = QueryBuilders.boolQuery(); if (q != null && q.length() > 0) { suggest.should(QueryBuilders.fuzzyQuery(fieldName, q).fuzziness(Fuzziness.fromEdits(2))); suggest.should(QueryBuilders.moreLikeThisQuery(fieldName).like(q)); suggest.should(QueryBuilders.matchPhrasePrefixQuery(fieldName, q)); if (q.indexOf('*') >= 0 || q.indexOf('?') >= 0) suggest.should(QueryBuilders.wildcardQuery(fieldName, q)); SearchRequestBuilder request = this.elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(query) .setFrom(0) .setSize(resultCount) .addSort( SortBuilders.fieldSort(sort_field) .unmappedType(default_sort_type) SearchResponse response = request.execute().actionGet(); SearchHits rhits = response.getHits(); SearchHit[] hits = rhits.getHits(); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSource(); result.add(map); result.setHits(rhits.getTotalHits());
@Override public List<String> query(IndexQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.matchAllQuery()); srb.setPostFilter(getFilter(query.getCondition(),informations.get(query.getStore()))); if (!query.getOrder().isEmpty()) { List<IndexQuery.OrderEntry> orders = query.getOrder(); fsb.unmappedType(convertToEsDataType(datatype)); srb.addSort(fsb); srb.setFrom(0); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getCondition(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<String> result = new ArrayList<String>(hits.hits().length); for (SearchHit hit : hits) { result.add(hit.id());
@Override public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.queryStringQuery(query.getQuery())); srb.setFrom(query.getOffset()); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); //srb.setExplain(true); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getQuery(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<RawQuery.Result<String>> result = new ArrayList<RawQuery.Result<String>>(hits.hits().length); for (SearchHit hit : hits) { result.add(new RawQuery.Result<String>(hit.id(),hit.getScore())); } return result; }
public long count(final String index, final String histogram_timefield, final long millis) { try { SearchResponse response = elasticsearchClient.prepareSearch(index) .setSize(0) .setQuery(millis <= 0 ? QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery()) : QueryBuilders.rangeQuery(histogram_timefield).from(new Date(System.currentTimeMillis() - millis))) .execute() .actionGet(); return response.getHits().getTotalHits(); } catch (Throwable e) { DAO.severe(e); return 0; } }
QueryBuilder queryBuilder = expression.getFilterBuilder(); BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") .setQuery(fq) .setTypes(LOG_DOC_TYPE) .addSort(sortBuilder); SearchResponse response = srb.execute().actionGet(); return Arrays.stream(response.getHits().getHits()) .map(hit -> { String source = hit.getSourceAsString(); try { return objectMapper.readValue(source, TaskExecLog.class);
/** * Get all the indexed documents (no paginated results). Results are not sorted. */ public List<SearchHit> getDocuments(IndexType indexType) { SearchRequestBuilder req = SHARED_NODE.client().prepareSearch(indexType.getIndex()).setTypes(indexType.getType()).setQuery(matchAllQuery()); EsUtils.optimizeScrollRequest(req); req.setScroll(new TimeValue(60000)) .setSize(100); SearchResponse response = req.get(); List<SearchHit> result = newArrayList(); while (true) { Iterables.addAll(result, response.getHits()); response = SHARED_NODE.client().prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); // Break condition: No hits are returned if (response.getHits().getHits().length == 0) { break; } } return result; }
public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) { if (mapping.get_id() != null) { bulkRequestBuilder .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString()) .setDoc(esFieldData)); } else { SearchResponse response = transportClient.prepareSearch(mapping.get_index()) .setTypes(mapping.get_type()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) .setSize(MAX_BATCH_SIZE) .get(); for (SearchHit hit : response.getHits()) { bulkRequestBuilder .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId()) .setDoc(esFieldData)); } } }
public Map<String, Object> query(final String indexName, final String typeName, final QueryBuilder query) { SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName); if (typeName != null) request.setTypes(typeName); request .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(query) .setFrom(0) .setSize(1).setTerminateAfter(1); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result //long totalHitCount = response.getHits().getTotalHits(); SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) return null; assert hits.length == 1; Map<String, Object> map = hits[0].getSourceAsMap(); if (!map.containsKey("id")) map.put("id", hits[0].getId()); if (!map.containsKey("type")) map.put("type", hits[0].getType()); return map; }
private SearchResult<String> search(String indexName, String structuredQuery, int start, int size, List<String> sortOptions, String freeTextQuery, String docType) { try { QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); if (StringUtils.isNotEmpty(structuredQuery)) { Expression expression = Expression.fromString(structuredQuery); queryBuilder = expression.getFilterBuilder(); } BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName) .setQuery(fq) .setTypes(docType) .storedFields("_id") .setFrom(start) .setSize(size); if (sortOptions != null) { sortOptions.forEach(sortOption -> addSortOptionToSearchRequest(srb, sortOption)); } SearchResponse response = srb.get(); LinkedList<String> result = StreamSupport.stream(response.getHits().spliterator(), false) .map(SearchHit::getId) .collect(Collectors.toCollection(LinkedList::new)); long count = response.getHits().getTotalHits(); return new SearchResult<String>(count, result); } catch (ParserException e) { throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); } }
private void assertMemberInIndex(String organizationUuid, UserDto user, boolean isMember) { SearchRequestBuilder request = es.client().prepareSearch(UserIndexDefinition.INDEX_TYPE_USER) .setQuery(boolQuery() .must(termQuery(FIELD_ORGANIZATION_UUIDS, organizationUuid)) .must(termQuery(FIELD_UUID, user.getUuid()))); if (isMember) { assertThat(request.get().getHits().getHits()).hasSize(1); } else { assertThat(request.get().getHits().getHits()).isEmpty(); } }
private void deleteBulkRequest(String objectId, String index, String type, String routing, String parent) { if (logger.isTraceEnabled()) { logger.trace("bulkDeleteRequest - objectId: {} - index: {} - type: {} - routing: {} - parent: {}", objectId, index, type, routing, parent); } if (definition.getParentTypes() != null && definition.getParentTypes().contains(type)) { QueryBuilder builder = QueryBuilders.hasParentQuery(type, QueryBuilders.termQuery(MongoDBRiver.MONGODB_ID_FIELD, objectId)); SearchResponse response = esClient.prepareSearch(index).setQuery(builder).setRouting(routing) .addField(MongoDBRiver.MONGODB_ID_FIELD).execute().actionGet(); for (SearchHit hit : response.getHits().getHits()) { getBulkProcessor(index, hit.getType()).deleteBulkRequest(hit.getId(), routing, objectId); } } getBulkProcessor(index, type).deleteBulkRequest(objectId, routing, parent); }
Vector<HashMap<String, ByteIterator>> result) { try { final RangeQueryBuilder rangeQuery = rangeQuery("_id").gte(startkey); final SearchResponse response = client.prepareSearch(indexKey) .setTypes(table) .setQuery(rangeQuery) .setSize(recordcount) .execute() .actionGet(); for (SearchHit hit : response.getHits()) { entry = new HashMap<>(fields.size()); for (String field : fields) { entry.put(field, new StringByteIterator((String) hit.getSource().get(field)));