private void processDateHistogram(Histogram aggregation) { LinkedHashMap<String, Long> facet = getOrCreateFacet(aggregation.getName()); for (Histogram.Bucket value : aggregation.getBuckets()) { String day = dateTimeToDate(value.getKeyAsString(), timeZone); if (value.getAggregations().getAsMap().containsKey(FACET_MODE_EFFORT)) { facet.put(day, Math.round(((Sum) value.getAggregations().get(FACET_MODE_EFFORT)).getValue())); } else { facet.put(day, value.getDocCount()); } } }
private void processHistogramAggregation(Histogram agg) { List<IntervalUnit> intervals = new ArrayList<>(); for (Histogram.Bucket h : agg.getBuckets()) { ExtendedStats hStats = h.getAggregations().get(AbstractFacetRequest.INTERNAL_STATS); if (hStats != null) { intervals.add(new IntervalUnit(((DateTime) h.getKey()).getMillis(), h.getDocCount(), h.getDocCount(), hStats.getSum(), hStats.getAvg(), hStats.getMin(), hStats.getMax())); } else { intervals.add(new IntervalUnit(((DateTime) h.getKey()).getMillis(), h.getDocCount(), h.getDocCount(), 0, 0, 0, 0)); } } addFacet(new HistogramResult(agg.getName(), intervals)); } }
public AbstractAggregationBuilder getFacet() { Assert.notNull(getName(), "Facet name can't be a null !!!"); Assert.isTrue(!StringUtils.isEmpty(field), "Please select field on which to build the facet !!!"); Assert.isTrue(interval > 0, "Please provide interval as positive value greater them zero !!!"); DateHistogramAggregationBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram(getName()); dateHistogramBuilder.field(field); if (timeUnit != null) { dateHistogramBuilder.dateHistogramInterval(timeUnit); } else { dateHistogramBuilder.interval(interval); } dateHistogramBuilder.subAggregation(AggregationBuilders.extendedStats(INTERNAL_STATS).field(field)); return dateHistogramBuilder; } }
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); String value = null; for (KVValue kv : field.getParams()) { if(kv.value.toString().contains("doc[")) { String script = kv.value + "; return " + kv.key; dateHistogram.script(new Script(script)); } else { value = kv.value.toString(); switch (kv.key.toLowerCase()) { case "interval": dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); break; case "field": dateHistogram.field(value); break; case "format": dateHistogram.format(value); break; case "time_zone": dateHistogram.timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone(ZoneOffset.of(value)))); break; case "min_doc_count": dateHistogram.minDocCount(Long.parseLong(value)); break; case "order": dateHistogram.order("desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : BucketOrder.key(true)); break; case "extended_bounds":
.field(CREATED_AT.getFieldName()) .dateHistogramInterval(bucketSize) .minDocCount(0L) .format(DateUtils.DATETIME_FORMAT) .timeZone(DateTimeZone.forOffsetMillis(system.getDefaultTimeZone().getRawOffset())) .extendedBounds(new ExtendedBounds(startInclusive ? startTime : (startTime + 1), endTime - 1L)); addEffortAggregationIfNeeded(query, dateHistogram); return Optional.of(dateHistogram);
public LinkedHashMap<String, Long> fullDateHistogram(final String indexName, int timezoneOffset, String histogram_timefield) { // prepare request SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName) .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery())) .setFrom(0) .setSize(0); request.clearRescorers(); request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(1).interval(DateHistogramInterval.DAY)); // get response SearchResponse response = request.execute().actionGet(); // evaluate date histogram: InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield); LinkedHashMap<String, Long> list = new LinkedHashMap<>(); for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) { Calendar cal = Calendar.getInstance(DateParser.UTCtimeZone); org.joda.time.DateTime k = (org.joda.time.DateTime) bucket.getKey(); cal.setTime(k.toDate()); cal.add(Calendar.MINUTE, -timezoneOffset); long docCount = bucket.getDocCount(); list.put(DateParser.dayDateFormat.format(cal.getTime()), docCount); } return list; }
.field(Message.FIELD_TIMESTAMP) .dateHistogramInterval(interval.toESInterval()) .subAggregation(createTermsBuilder(field, stackedFields, size, termsOrder)) .subAggregation(AggregationBuilders.missing("missing").field(field));
@Override public InternalAggregation buildEmptyAggregation() { EmptyBucketInfo emptyBucketInfo = null; if (minDocCount == 0) { emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); } return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, pipelineAggregators(), metaData()); }
if(kv.value.toString().contains("doc[")) { String script = kv.value + "; return " + kv.key; histogram.script(new Script(script)); } else { value = kv.value.toString(); switch (kv.key.toLowerCase()) { case "interval": histogram.interval(Long.parseLong(value)); break; case "field": histogram.field(value); break; case "min_doc_count": histogram.minDocCount(Long.parseLong(value)); break; case "extended_bounds": String[] bounds = value.split(":"); if (bounds.length == 2) histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); break; case "alias": break; histogram.order(order); break; default:
public HistogramResult histogram(String query, DateHistogramInterval interval, String filter, TimeRange range) { final DateHistogramAggregationBuilder histogramBuilder = AggregationBuilders.dateHistogram(AGG_HISTOGRAM) .field(Message.FIELD_TIMESTAMP) .dateHistogramInterval(interval.toESInterval()); final SearchSourceBuilder searchSourceBuilder = filteredSearchRequest(query, filter, range) .aggregation(histogramBuilder); final Set<String> affectedIndices = determineAffectedIndices(range, filter); if (affectedIndices.isEmpty()) { return DateHistogramResult.empty(query, searchSourceBuilder.toString(), interval); } final Search.Builder searchBuilder = new Search.Builder(searchSourceBuilder.toString()) .addType(IndexMapping.TYPE_MESSAGE) .addIndex(affectedIndices) .ignoreUnavailable(true) .allowNoIndices(true); final io.searchbox.core.SearchResult searchResult = wrapInMultiSearch(searchBuilder.build(), () -> "Unable to retrieve histogram"); recordEsMetrics(searchResult, range); final HistogramAggregation histogramAggregation = searchResult.getAggregations().getHistogramAggregation(AGG_HISTOGRAM); return new DateHistogramResult( histogramAggregation, query, searchSourceBuilder.toString(), interval, tookMsFromSearchResult(searchResult) ); }
@Override public InternalAggregation buildEmptyAggregation() { InternalDateHistogram.EmptyBucketInfo emptyBucketInfo = minDocCount == 0 ? new InternalDateHistogram.EmptyBucketInfo(rounding, buildEmptySubAggregations(), extendedBounds) : null; return new InternalDateHistogram(name, Collections.emptyList(), order, minDocCount, offset, emptyBucketInfo, formatter, keyed, pipelineAggregators(), metaData()); }
@Override public InternalAggregation buildEmptyAggregation() { InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, buildEmptySubAggregations()); return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData(), 1); }
@Override public InternalDateHistogram create(List<Bucket> buckets) { return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format, keyed, pipelineAggregators(), metaData); }
private BucketReduceResult mergeBucketsIfNeeded(List<Bucket> reducedBuckets, int reduceRoundingIdx, RoundingInfo reduceRoundingInfo, ReduceContext reduceContext) { while (reducedBuckets.size() > (targetBuckets * reduceRoundingInfo.getMaximumInnerInterval()) && reduceRoundingIdx < bucketInfo.roundingInfos.length - 1) { reduceRoundingIdx++; reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext); } return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx, 1); }
@Override protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, context, parent); } return createAggregator(valuesSource, parent, pipelineAggregators, metaData); }
@Override protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { return createAggregator(null, parent, pipelineAggregators, metaData); } }
@Override protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { return createAggregator(null, parent, pipelineAggregators, metaData); } }
@Override protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { return createAggregator(null, parent, pipelineAggregators, metaData); } }
if (field.equals(histogram_timefield)) { addTimeHistogram = true; request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(0).interval(dateHistogrammInterval)); } else { request.addAggregation(AggregationBuilders.terms(field).field(field).minDocCount(1).size(aggregationLimit)); InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield); ArrayList<Map.Entry<String, AtomicLong>> list = new ArrayList<>(); for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) { Calendar cal = Calendar.getInstance(DateParser.UTCtimeZone); org.joda.time.DateTime k = (org.joda.time.DateTime) bucket.getKey(); cal.setTime(k.toDate()); cal.add(Calendar.MINUTE, -timezoneOffset); long docCount = bucket.getDocCount(); Map.Entry<String, AtomicLong> entry = new AbstractMap.SimpleEntry<>( (dateHistogrammInterval == DateHistogramInterval.DAY ? DateParser.dayDateFormat : DateParser.minuteDateFormat).format(cal.getTime()),
boolean includeCardinality) { final DateHistogramAggregationBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram(AGG_HISTOGRAM) .field(Message.FIELD_TIMESTAMP) .dateHistogramInterval(interval.toESInterval()); dateHistogramBuilder.subAggregation(AggregationBuilders.stats(AGG_STATS).field(field)); } else { dateHistogramBuilder.subAggregation(AggregationBuilders.count(AGG_VALUE_COUNT).field(field)); dateHistogramBuilder.subAggregation(AggregationBuilders.cardinality(AGG_CARDINALITY).field(field));