/** * Get the number of documents in the search index for a given search query * * @param q * the query * @return the count of all documents in the index which matches with the query */ private long count(final QueryBuilder q, final String indexName) { SearchResponse response = elasticsearchClient.prepareSearch(indexName).setQuery(q).setSize(0).execute().actionGet(); return response.getHits().getTotalHits(); }
@Override public SearchResponse get() { Profiler profiler = Profiler.createIfTrace(EsClient.LOGGER).start(); try { return super.execute().actionGet(); } catch (Exception e) { throw new IllegalStateException(String.format("Fail to execute %s", toString()), e); } finally { if (profiler.isTraceEnabled()) { profiler.stopTrace(toString()); } } }
public long countLocal(final String index, final String provider_hash) { try { SearchResponse response = elasticsearchClient.prepareSearch(index) .setSize(0) .setQuery(QueryBuilders.matchQuery("provider_hash", provider_hash)) .execute() .actionGet(); return response.getHits().getTotalHits(); } catch (Throwable e) { DAO.severe(e); return 0; } }
public long count(final String index, final String histogram_timefield, final long millis) { try { SearchResponse response = elasticsearchClient.prepareSearch(index) .setSize(0) .setQuery(millis <= 0 ? QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery()) : QueryBuilders.rangeQuery(histogram_timefield).from(new Date(System.currentTimeMillis() - millis))) .execute() .actionGet(); return response.getHits().getTotalHits(); } catch (Throwable e) { DAO.severe(e); return 0; } }
try { final RangeQueryBuilder rangeQuery = rangeQuery("_id").gte(startkey); final SearchResponse response = client.prepareSearch(indexKey) .setTypes(table) .setQuery(rangeQuery) .setSize(recordcount) .execute() .actionGet(); for (SearchHit hit : response.getHits()) { entry = new HashMap<>(fields.size()); for (String field : fields) {
/** * Get a search response for predefined aggregation task on a specific index. * @param index Name of ES index * @param aggr Pre-configured AggregationBuilder object * @return HashMap with parsed aggregations */ private SearchResponse getAggregationResponse(String index, @SuppressWarnings("rawtypes") AggregationBuilder aggr) { return this.elasticsearchClient.prepareSearch(index) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchAllQuery()) .setFrom(0) .setSize(0) .addAggregation(aggr) .execute().actionGet(); }
private Query(final String indexName, QueryBuilder queryBuilder, String order_field, int resultCount) { //TODO: sort data using order_field // prepare request SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(queryBuilder) .setFrom(0) .setSize(resultCount); request.clearRescorers(); // get response SearchResponse response = request.execute().actionGet(); hitCount = (int) response.getHits().getTotalHits(); // evaluate search result SearchHit[] hits = response.getHits().getHits(); this.result = new ArrayList<Map<String, Object>>(hitCount); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSource(); this.result.add(map); } }
@Override public List<String> searchArchivableWorkflows(String indexName, long archiveTtlDays) { QueryBuilder q = QueryBuilders.boolQuery() .should(QueryBuilders.termQuery("status", "COMPLETED")) .should(QueryBuilders.termQuery("status", "FAILED")) .mustNot(QueryBuilders.existsQuery("archived")) .minimumShouldMatch(1); SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) .setTypes("workflow") .setQuery(q) .addSort("endTime", SortOrder.ASC) .setSize(archiveSearchBatchSize); SearchResponse response = s.execute().actionGet(); SearchHits hits = response.getHits(); logger.info("Archive search totalHits - {}", hits.getTotalHits()); return Arrays.stream(hits.getHits()) .map(hit -> hit.getId()) .collect(Collectors.toCollection(LinkedList::new)); }
@Override public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.queryStringQuery(query.getQuery())); srb.setFrom(query.getOffset()); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); //srb.setExplain(true); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getQuery(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<RawQuery.Result<String>> result = new ArrayList<RawQuery.Result<String>>(hits.hits().length); for (SearchHit hit : hits) { result.add(new RawQuery.Result<String>(hit.id(),hit.getScore())); } return result; }
@Override public List<String> searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo) { DateTime dateTime = new DateTime(); QueryBuilder q = QueryBuilders.boolQuery() .must(QueryBuilders.rangeQuery("updateTime") .gt(dateTime.minusHours(lastModifiedHoursAgoFrom))) .must(QueryBuilders.rangeQuery("updateTime") .lt(dateTime.minusHours(lastModifiedHoursAgoTo))) .must(QueryBuilders.termQuery("status", "RUNNING")); SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) .setTypes("workflow") .setQuery(q) .setSize(5000) .addSort("updateTime", SortOrder.ASC); SearchResponse response = s.execute().actionGet(); return StreamSupport.stream(response.getHits().spliterator(), false) .map(hit -> hit.getId()) .collect(Collectors.toCollection(LinkedList::new)); }
final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") .setQuery(fq) .setTypes(LOG_DOC_TYPE) .addSort(sortBuilder); SearchResponse response = srb.execute().actionGet(); return Arrays.stream(response.getHits().getHits()) .map(hit -> { String source = hit.getSourceAsString();
public Map<String, Object> query(final String indexName, final String fieldKey, final String fieldValue) { if (fieldKey == null || fieldValue.length() == 0) return null; // prepare request BoolQueryBuilder query = QueryBuilders.boolQuery(); query.filter(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(fieldKey, fieldValue))); SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(query) .setFrom(0) .setSize(1) .setTerminateAfter(1); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) return null; assert hits.length == 1; Map<String, Object> map = hits[0].getSource(); return map; }
/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); // FIXME: deprecated, "will be removed in 3.0, you should do a regular scroll instead, ordered by `_doc`" @SuppressWarnings("deprecation") SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.SCAN) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
public List<Map<String, Object>> queryWithConstraints(final String indexName, final String fieldName, final String fieldValue, final Map<String, String> constraints, boolean latest) throws IOException { SearchRequestBuilder request = this.elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setFrom(0); BoolQueryBuilder bFilter = QueryBuilders.boolQuery(); bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery(fieldName, fieldValue)))); for (Object o : constraints.entrySet()) { @SuppressWarnings("rawtypes") Map.Entry entry = (Map.Entry) o; bFilter.filter(QueryBuilders.constantScoreQuery(QueryBuilders.termQuery((String) entry.getKey(), ((String) entry.getValue()).toLowerCase()))); } request.setQuery(bFilter); // get response SearchResponse response = request.execute().actionGet(); // evaluate search result ArrayList<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit: hits) { Map<String, Object> map = hit.getSource(); result.add(map); } return result; }
public LinkedHashMap<String, Long> fullDateHistogram(final String indexName, int timezoneOffset, String histogram_timefield) { // prepare request SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery())) .setFrom(0) .setSize(0); request.clearRescorers(); request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(1).interval(DateHistogramInterval.DAY)); // get response SearchResponse response = request.execute().actionGet(); // evaluate date histogram: InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield); LinkedHashMap<String, Long> list = new LinkedHashMap<>(); for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) { Calendar cal = Calendar.getInstance(DateParser.UTCtimeZone); org.joda.time.DateTime k = (org.joda.time.DateTime) bucket.getKey(); cal.setTime(k.toDate()); cal.add(Calendar.MINUTE, -timezoneOffset); long docCount = bucket.getDocCount(); list.put(DateParser.dayDateFormat.format(cal.getTime()), docCount); } return list; }
@Override public List<EventExecution> getEventExecutions(String event) { try { Expression expression = Expression.fromString("event='" + event + "'"); QueryBuilder queryBuilder = expression.getFilterBuilder(); BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") .setQuery(fq).setTypes(EVENT_DOC_TYPE) .addSort(SortBuilders.fieldSort("created") .order(SortOrder.ASC)); return mapEventExecutionsResponse(srb.execute().actionGet()); } catch (Exception e) { logger.error("Failed to get executions for event: {}", event, e); throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); } }
@Override public List<String> query(IndexQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.matchAllQuery()); srb.setPostFilter(getFilter(query.getCondition(),informations.get(query.getStore()))); if (!query.getOrder().isEmpty()) { if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getCondition(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query);
@Override public List<Message> getMessages(String queue) { try { Expression expression = Expression.fromString("queue='" + queue + "'"); QueryBuilder queryBuilder = expression.getFilterBuilder(); BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*") .setQuery(fq) .setTypes(MSG_DOC_TYPE) .addSort(SortBuilders.fieldSort("created").order(SortOrder.ASC)); return mapGetMessagesResponse(srb.execute().actionGet()); } catch (Exception e) { logger.error("Failed to get messages for queue: {}", queue, e); throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); } }
SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(queryBuilder) .setFrom(0) .setSize(resultCount); request.clearRescorers(); if (resultCount > 0) { SearchResponse response = request.execute().actionGet(); hitCount = (int) response.getHits().getTotalHits(); SearchHit[] hits = response.getHits().getHits(); this.result = new ArrayList<Map<String, Object>>(hitCount); for (SearchHit hit: hits) {