public List<String> terms(String fields, @Nullable String query, int size) { TermsAggregationBuilder termsAggregation = AggregationBuilders.terms(AGGREGATION_NAME) .field(fields) .size(size) .minDocCount(1); if (query != null) { termsAggregation.includeExclude(new IncludeExclude(".*" + escapeSpecialRegexChars(query) + ".*", null)); } SearchRequestBuilder request = client .prepareSearch(INDEX_TYPE_RULE, INDEX_TYPE_ACTIVE_RULE) .setQuery(matchAllQuery()) .setSize(0) .addAggregation(termsAggregation); SearchResponse esResponse = request.get(); return EsUtils.termsKeys(esResponse.getAggregations().get(AGGREGATION_NAME)); }
.setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(queryBuilder) .setFrom(0) .setSize(resultCount); request.clearRescorers(); if (field.equals(histogram_timefield)) { addTimeHistogram = true; request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(0).interval(dateHistogrammInterval)); } else { request.addAggregation(AggregationBuilders.terms(field).field(field).minDocCount(1).size(aggregationLimit)); SearchResponse response = request.execute().actionGet(); hitCount = (int) response.getHits().getTotalHits(); SearchHit[] hits = response.getHits().getHits(); this.result = new ArrayList<Map<String, Object>>(hitCount); for (SearchHit hit: hits) { for (String field: aggregationFields) { if (field.equals(histogram_timefield)) continue; // this has special handling below Terms fieldCounts = response.getAggregations().get(field); List<Bucket> buckets = fieldCounts.getBuckets(); InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield); ArrayList<Map.Entry<String, AtomicLong>> list = new ArrayList<>(); for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) {
.setQuery( boolQuery() .mustNot(existsQuery(FIELD_ISSUE_RESOLUTION)) .filter(termQuery(FIELD_ISSUE_ASSIGNEE_UUID, assigneeUuid)) .mustNot(termQuery(FIELD_ISSUE_TYPE, RuleType.SECURITY_HOTSPOT.name()))) .setSize(0); IntStream.range(0, projectUuids.size()).forEach(i -> { String projectUuid = projectUuids.get(i); long from = froms.get(i); request .addAggregation(AggregationBuilders .filter(projectUuid, boolQuery() .subAggregation( AggregationBuilders.terms("branchUuid").field(FIELD_ISSUE_BRANCH_UUID) .subAggregation( AggregationBuilders.count(COUNT).field(FIELD_ISSUE_KEY)) .subAggregation( AggregationBuilders.max("maxFuncCreatedAt").field(FIELD_ISSUE_FUNC_CREATED_AT)))); long lastIssueDate = (long) ((InternalMax) branchBucket.getAggregations().get("maxFuncCreatedAt")).getValue(); return Stream.of(new ProjectStatistics(branchBucket.getKeyAsString(), count, lastIssueDate)); }))
private static void addStatusFacetIfNeeded(SearchOptions options, Map<String, AggregationBuilder> aggregations, StickyFacetBuilder stickyFacetBuilder) { if (options.getFacets().contains(FACET_STATUSES)) { BoolQueryBuilder facetFilter = stickyFacetBuilder.getStickyFacetFilter(FIELD_RULE_STATUS); AggregationBuilder statuses = AggregationBuilders.filter(FACET_STATUSES + "_filter", facetFilter) .subAggregation( AggregationBuilders .terms(FACET_STATUSES) .field(FIELD_RULE_STATUS) .includeExclude(new IncludeExclude(Joiner.on('|').join(ALL_STATUSES_EXCEPT_REMOVED), RuleStatus.REMOVED.toString())) .size(ALL_STATUSES_EXCEPT_REMOVED.size())); aggregations.put(FACET_STATUSES, AggregationBuilders.global(FACET_STATUSES).subAggregation(statuses)); } }
return AggregationBuilders.filter(AGG_FILTER, QueryBuilders.matchAllQuery()) .subAggregation(AggregationBuilders.terms(AGG_TERMS) .field(field) .size(size > 0 ? size : 50) .order(termsOrder)); final BoolQueryBuilder filterQuery = QueryBuilders.boolQuery(); filterQuery.must(QueryBuilders.existsQuery(field)); }); return AggregationBuilders.filter(AGG_FILTER, filterQuery) .subAggregation(AggregationBuilders.terms(AGG_TERMS) .script(new Script(ScriptType.INLINE, "painless", scriptStringBuilder.toString(), Collections.emptyMap())) .size(size > 0 ? size : 50)
/** * Get AggregationBuilder to calculate sum, avg and doc count for given classifier. * @param classifierName Name of classifier * @return AggregationBuilder with required configuration */ private static TermsBuilder getClassifierAggregationBuilder(String classifierName) { String probabilityField = classifierName + "_probability"; return AggregationBuilders.terms("by_class").field(classifierName) .subAggregation( AggregationBuilders.avg("avg_probability").field(probabilityField) ) .subAggregation( AggregationBuilders.sum("sum_probability").field(probabilityField) ); }
public ProjectMeasuresStatistics searchTelemetryStatistics() { SearchRequestBuilder request = client .prepareSearch(INDEX_TYPE_PROJECT_MEASURES) .setFetchSource(false) .setSize(0); BoolQueryBuilder esFilter = boolQuery(); request.setQuery(esFilter); request.addAggregation(AggregationBuilders.terms(FIELD_LANGUAGES) .field(FIELD_LANGUAGES) .size(MAX_PAGE_SIZE) .minDocCount(1) .order(Terms.Order.count(false))); request.addAggregation(AggregationBuilders.nested(FIELD_NCLOC_LANGUAGE_DISTRIBUTION, FIELD_NCLOC_LANGUAGE_DISTRIBUTION) .subAggregation(AggregationBuilders.terms(FIELD_NCLOC_LANGUAGE_DISTRIBUTION + "_terms") .subAggregation(sum(FIELD_DISTRIB_NCLOC).field(FIELD_DISTRIB_NCLOC)))); request.addAggregation(AggregationBuilders.nested(NCLOC_KEY, FIELD_MEASURES) .subAggregation(AggregationBuilders.filter(NCLOC_KEY + "_filter", termQuery(FIELD_MEASURES_KEY, NCLOC_KEY)) .subAggregation(sum(NCLOC_KEY + "_filter_sum").field(FIELD_MEASURES_VALUE)))); statistics.setSum(metric, value); }); statistics.setProjectCountByLanguage(termsToMap(response.getAggregations().get(FIELD_LANGUAGES)));
public List<BranchStatistics> searchBranchStatistics(String projectUuid, List<String> branchUuids) { if (branchUuids.isEmpty()) { return Collections.emptyList(); } SearchRequestBuilder request = client.prepareSearch(INDEX_TYPE_ISSUE) .setRouting(projectUuid) .setQuery( boolQuery() .must(termsQuery(FIELD_ISSUE_BRANCH_UUID, branchUuids)) .mustNot(existsQuery(FIELD_ISSUE_RESOLUTION)) .must(termQuery(FIELD_ISSUE_IS_MAIN_BRANCH, Boolean.toString(false)))) .setSize(0) .addAggregation(AggregationBuilders.terms("branchUuids") .field(FIELD_ISSUE_BRANCH_UUID) .size(branchUuids.size()) .subAggregation(AggregationBuilders.terms("types") .field(FIELD_ISSUE_TYPE))); SearchResponse response = request.get(); return ((StringTerms) response.getAggregations().get("branchUuids")).getBuckets().stream() .map(bucket -> new BranchStatistics(bucket.getKeyAsString(), ((StringTerms) bucket.getAggregations().get("types")).getBuckets() .stream() .collect(uniqueIndex(StringTerms.Bucket::getKeyAsString, InternalTerms.Bucket::getDocCount)))) .collect(MoreCollectors.toList(branchUuids.size())); }
public Collection<org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket> termAggregation(String field){ SearchResponse response = client.prepareSearch(indexName) //no return for matches .setSize(0) .setQuery(QueryBuilders.matchAllQuery()) //return all terms .addAggregation(terms("agg").field(field).size(Integer.MAX_VALUE)) .execute().actionGet(); org.elasticsearch.search.aggregations.bucket.terms.Terms terms = response.getAggregations().get("agg"); Collection<org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket> buckets = terms.getBuckets(); return buckets; }
if (typeName != null) request.setTypes(typeName); request .setExplain(explain) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(queryBuilder) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) // DFS_QUERY_THEN_FETCH is slower but provides stability of search results request.clearRescorers(); for (WebMapping field: aggregationFields) { request.addAggregation(AggregationBuilders.terms(field.getMapping().name()).field(field.getMapping().name()).minDocCount(1).size(aggregationLimit)); SearchResponse response = request.execute().actionGet(); SearchHits searchHits = response.getHits(); hitCount = (int) searchHits.getTotalHits(); Terms fieldCounts = response.getAggregations().get(field.getMapping().name()); List<? extends Bucket> buckets = fieldCounts.getBuckets();
public LinkedHashMap<String, Long> fullDateHistogram(final String indexName, int timezoneOffset, String histogram_timefield) { // prepare request SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery())) .setFrom(0) .setSize(0); request.clearRescorers(); request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(1).interval(DateHistogramInterval.DAY)); // get response SearchResponse response = request.execute().actionGet(); // evaluate date histogram: InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield); LinkedHashMap<String, Long> list = new LinkedHashMap<>(); for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) { Calendar cal = Calendar.getInstance(DateParser.UTCtimeZone); org.joda.time.DateTime k = (org.joda.time.DateTime) bucket.getKey(); cal.setTime(k.toDate()); cal.add(Calendar.MINUTE, -timezoneOffset); long docCount = bucket.getDocCount(); list.put(DateParser.dayDateFormat.format(cal.getTime()), docCount); } return list; }
SearchResponse sr = transportClient.prepareSearch(es.getIndex()).setTypes(es.getType()) //要查询的表 .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("store_id", storeId)) //挨个设置查询条件,没有就不加,如果是字符串类型的,要加keyword后缀 .must(QueryBuilders.termQuery("store_name.keyword", storeName)) .must(QueryBuilders.rangeQuery("pay_date.keyword").gte(startDate).lte(endDate)) ).addAggregation( AggregationBuilders.terms("by_product_code").field("product_code.keyword").size(2000) //按货号分组,最多查500个货号.SKU直接改字段名字就可以 .subAggregation(AggregationBuilders.sum("quantity").field("quantity")) //分组计算销量汇总 .subAggregation(AggregationBuilders.sum("amount").field("amount")) //分组计算实付款汇总,需要加其他汇总的在这里依次加 .subAggregation(PipelineAggregatorBuilders.bucketSelector("sales_bucket_filter",script,"quantity"))//查询是否大于指定值 .order(BucketOrder.aggregation("amount", false))) //分组排序 Terms terms = sr.getAggregations().get("by_product_code"); //查询遍历第一个根据货号分组的aggregation System.out.println("【 " + entry.getKey() + " 】订单数 : " + entry.getDocCount() ); Sum sum0 = entry.getAggregations().get("quantity"); //取得销量的汇总 Sum sum1 = entry.getAggregations().get("amount"); //取得销量的汇总
idsFilterBuilder.addIds(ids); SearchResponse response = index.getClient().prepareSearch(index.getIndexName()).setSize(0). setTrackScores(false). setFetchSource(false).setExplain(false).setFetchSource(false). setQuery(QueryBuilders. boolQuery() .must(idsFilterBuilder) .should(QueryBuilders.matchAllQuery())) .addAggregation(terms("agg").field(labelField).size(Integer.MAX_VALUE)) .execute().actionGet(); Terms terms = response.getAggregations().get("agg"); Collection<Terms.Bucket> buckets = terms.getBuckets();
private OptionalLong getMinCreatedAt(Map<String, QueryBuilder> filters, QueryBuilder esQuery) { String facetNameAndField = CREATED_AT.getFieldName(); SearchRequestBuilder esRequest = client .prepareSearch(INDEX_TYPE_ISSUE) .setSize(0); BoolQueryBuilder esFilter = boolQuery(); filters.values().stream().filter(Objects::nonNull).forEach(esFilter::must); if (esFilter.hasClauses()) { esRequest.setQuery(QueryBuilders.boolQuery().must(esQuery).filter(esFilter)); } else { esRequest.setQuery(esQuery); } esRequest.addAggregation(AggregationBuilders.min(facetNameAndField).field(facetNameAndField)); Min minValue = esRequest.get().getAggregations().get(facetNameAndField); double actualValue = minValue.getValue(); if (Double.isInfinite(actualValue)) { return OptionalLong.empty(); } return OptionalLong.of((long)actualValue); }
List<String> additionalFields, List<String> filters) throws SearchEngineException { MultiMatchQueryBuilder qb = QueryBuilders.multiMatchQuery(term, DEFAULT_FIELDS) .minimumShouldMatch("2<25%"); if (additionalFields != null && additionalFields.size() > 0) { TopHitsBuilder topHitsBuilder = AggregationBuilders.topHits(HITS_AGGREGATION) .setFrom(0) .setSize(1); AggregationBuilder termsAgg = AggregationBuilders.terms(HITS_AGGREGATION) .field(GROUP_FIELD) .order(Terms.Order.aggregation(SCORE_AGGREGATION, false)) .size(start + rows) .subAggregation( AggregationBuilders.max(SCORE_AGGREGATION) .script(new Script("_score", ScriptService.ScriptType.INLINE, "expression", null))) .subAggregation(topHitsBuilder); .setTypes(getDocumentType()) .setQuery(qb) .setSize(0) .addAggregation(termsAgg) .addAggregation(AggregationBuilders.cardinality(COUNT_AGGREGATION).field(GROUP_FIELD)); LOGGER.debug("ES Query: {}", srb.toString());
RangeQueryBuilder dateFilter = QueryBuilders.rangeQuery("timestamp").includeLower(true) .includeUpper(true); dateFilter.from(from.toEpochMilli()); .setQuery(QueryBuilders.boolQuery().filter(dateFilter)) .setSize(0) .addAggregation(AggregationBuilders.terms("methods").field("method")) .addAggregation( AggregationBuilders.histogram("response_time_histogram").field("timeTaken").interval(100)) .addAggregation(AggregationBuilders.extendedStats("response_time_stats").field("timeTaken")) .addAggregation(AggregationBuilders.terms("response_status_stats").field("response.status")) .addAggregation(AggregationBuilders.terms("response_content_type_stats") .field("response.headers.Content-Type")) .addAggregation(AggregationBuilders.terms("top_uris").field("request.uri") .order(Terms.Order.aggregation("_count", false)).size(10)) .addAggregation(AggregationBuilders.terms("flop_uris").field("request.uri") .order(Terms.Order.aggregation("_count", true)).size(10)) .addAggregation(AggregationBuilders.dateHistogram("request_histogram").field("timestamp") .interval(new DateHistogramInterval(precision)) .subAggregation(AggregationBuilders.terms("methods").field("request.method"))); SearchResponse response = request.execute().actionGet();
ssb.aggregation(AggregationBuilders.terms(aggregation.getName()).field(aggregation.getField())); if (response.getHits() != null) { for (SearchHit hit : response.getHits()) { ESSearchHit esSearchHit = new ESSearchHit(); if (!hit.getFields().isEmpty()) { esSearchResponse.setTotalHits(response.getHits().getTotalHits().value); if (response.getAggregations() != null) { for (String name : response.getAggregations().asMap().keySet()) { Terms termsAgg = response.getAggregations().get(name); ESTermsAggregation aggregation = new ESTermsAggregation(name, null); for (Terms.Bucket bucket : termsAgg.getBuckets()) {
public void warmFieldData(String parentField, String childField) { ListenableActionFuture<SearchResponse> parentSearch = null; ListenableActionFuture<SearchResponse> childSearch = null; if (parentField != null) { parentSearch = client .prepareSearch(PARENT_INDEX) .setQuery(matchAllQuery()).addAggregation(terms("parentfield").field(parentField)).execute(); } if (childField != null) { childSearch = client .prepareSearch(CHILD_INDEX) .setQuery(matchAllQuery()).addAggregation(terms("childfield").field(childField)).execute(); } if (parentSearch != null) parentSearch.actionGet(); if (childSearch != null) childSearch.actionGet(); }
EventsCountHistogram histogram = null; if (maxHistogramIntervalSlots > 0) { final StatsBuilder timeRangeAgg = AggregationBuilders.stats("timeRange").field(Event.FIELD_TIMESTAMP); final SearchRequestBuilder timeRangeQuery = adaptRequestBuilder(esClient, getBaseRequestBuilder(esClient).setSize(0).addAggregation(timeRangeAgg)); try { final Aggregations aggregations = timeRangeQuery.execute().actionGet().getAggregations(); if (aggregations != null) { final Stats timeRangeStats = aggregations.get("timeRange"); final long timeRange = (long) (timeRangeStats.getMax() - timeRangeStats.getMin()); logger.debug("Time range query: {}", timeRangeQuery); final DateHistogramInterval interval = getInterval(timeRange, maxHistogramIntervalSlots, histogram); requestBuilder.addAggregation(AggregationBuilders.dateHistogram("eventsCount") .interval(interval).field(Event.FIELD_TIMESTAMP).order(Order.KEY_ASC)); final SearchResponse response = requestBuilder.execute().actionGet(); final List<EventPersistence.AspectEvent> events = new ArrayList<>(); for (final SearchHit h : response.getHits().getHits()) { try { final AspectEventImpl event = jsonMapper.readValue(h.getSourceAsString(), AspectEventImpl.class); if (response.getAggregations() != null) { for (final Bucket e : ((Histogram) response.getAggregations().get("eventsCount")).getBuckets()) { final DateTime key = (DateTime) e.getKey(); histogram.getEntries().add(new HistogramEntry(key.getMillis(), e.getDocCount()));
long beginMillis = endMillis - request.lookback; BoolQueryBuilder filter = boolQuery() .must(rangeQuery("timestamp_millis") .gte(beginMillis) .lte(endMillis)); filter.must(boolQuery() .should(nestedQuery( "annotations", termQuery("annotations.endpoint.serviceName", request.serviceName))) client.collectBucketKeys(indices, boolQuery().must(matchAllQuery()).filter(filter), AggregationBuilders.terms("traceId_agg") .field("traceId") .subAggregation(AggregationBuilders.min("timestamps_agg") .field("timestamp_millis")) .order(Order.aggregation("timestamps_agg", false))