private <E> Map timeseriesAggregate(List<E> result, CompiledQuery query) throws Exception { TimeSeriesAggregator aggregator = new TimeSeriesAggregator(query.getGroupByFields(), query.getAggregateFunctionTypes(), query.getAggregateFields(), query.getStartTime(), query.getEndTime(), query.getIntervalMin() ); for(E entity: result) aggregator.accumulate((TaggedLogAPIEntity) entity); if(this.jdbcEntityDefinition.isGenericMetric()) { return aggregator.getMetric(); } else { return aggregator.result(); } }
/** * Convert raw GroupbyKeyValue list into time-series data points hash map. * * @param result <code>List<GroupbyKeyValue></code> * @return Map<List<String>,List<double[]>> * @throws Exception */ private Map<List<String>, List<double[]>> convertToTimeSeriesDataPoints(List<GroupbyKeyValue> result) throws Exception { Map<List<String>, List<Double>> aggResultMap = this.keyValuesToMap(result); Map<List<String>, List<double[]>> timeseriesDataPoints = TimeSeriesAggregator.toMetric(aggResultMap, this.pointsNum, this.aggFuncNum); return timeseriesDataPoints; } }
public void accumulate(TaggedLogAPIEntity entity) throws Exception{ List<String> groupbyFieldValues = createGroup(entity); // TODO: make sure timestamp be in range of this.startTime to this.endTime in outer side // guard the time range to avoid to accumulate entities whose timestamp is bigger than endTime if(entity.getTimestamp() >= this.endTime || entity.getTimestamp() < this.startTime){ if(LOG.isDebugEnabled()) LOG.debug("Ignore in-coming entity whose timestamp > endTime or < startTime, timestamp: " + entity.getTimestamp() + ", startTime:" + startTime + ", endTime:" + endTime); this.ignoredEntityCounter ++; return; } // time series bucket index long located =(entity.getTimestamp() - startTime)/intervalms; groupbyFieldValues.add(String.valueOf(located)); List<Double> preAggregatedValues = createPreAggregatedValues(entity); bucket.addDatapoint(groupbyFieldValues, preAggregatedValues); }
reader = new GenericMetricEntityDecompactionStreamReader(this.prefix, searchCondition); TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, aggregateCondition.getAggregateFunctionTypes(), aggregateFields, obj.addAll(tsAgg.getMetric().entrySet()); return (List<T>) obj; } else { // has sort options return (List<T>) TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(), this.sortOptions, top);
final TimeSeriesAggregator aggregator = new TimeSeriesAggregator(groupbyFields, AggregateFunctionType.fromBytesList(aggregateFuncTypes), aggregatedFields, startTime, endTime, intervalMin); InternalReadReport report = this.asyncStreamRead(entityDefinition, scan, aggregator); List<GroupbyKeyValue> keyValues = aggregator.getGroupbyKeyValues();
LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, comp.aggregateFunctionTypes(), aggregateFields, condition.getStartTime(), condition.getEndTime(), intervalmin*60*1000); if(parallel <= 0){ obj.addAll(tsAgg.getMetric().entrySet()); if(comp.sortOptions() == null){ result.setObj(obj); }else{ // has sort options result.setObj(TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(), comp.sortOptions(), top));
if(LOG.isDebugEnabled()) LOG.debug("SCAN: "+scan.toJSON()); long _start = System.currentTimeMillis(); final TimeSeriesAggregator aggregator = new TimeSeriesAggregator(groupbyFields,AggregateFunctionType.fromBytesList(aggregateFuncTypes),aggregatedFields,startTime,endTime,intervalMin); InternalReadReport report = this.asyncStreamRead(entityDefinition, scan,aggregator); List<GroupbyKeyValue> keyValues = aggregator.getGroupbyKeyValues();
private <E> Map timeseriesAggregate(List<E> result, CompiledQuery query) throws Exception { TimeSeriesAggregator aggregator = new TimeSeriesAggregator(query.getGroupByFields(), query.getAggregateFunctionTypes(), query.getAggregateFields(), query.getStartTime(), query.getEndTime(), query.getIntervalMin() ); for(E entity: result) aggregator.accumulate((TaggedLogAPIEntity) entity); if(this.jdbcEntityDefinition.isGenericMetric()) { return aggregator.getMetric(); } else { return aggregator.result(); } }
LOG.info("Output: " + StringUtils.join(condition.getOutputFields(), ", ")); TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(groupbyFields, comp.aggregateFunctionTypes(), aggregateFields, DateTimeUtil.humanDateToDate(condition.getStartTime()).getTime(), DateTimeUtil.humanDateToDate(condition.getEndTime()).getTime(), intervalmin*60*1000); if(parallel <= 0){ obj.addAll(tsAgg.getMetric().entrySet()); if(comp.sortOptions() == null){ result.setObj(obj); }else{ // has sort options result.setObj(TimeSeriesPostFlatAggregateSort.sort(sortAgg.result(), tsAgg.getMetric(), comp.sortOptions(), top));
/** * Convert raw GroupbyKeyValue list into time-series data points hash map * * @param result <code>List<GroupbyKeyValue></code> * @return Map<List<String>,List<double[]>> * @throws Exception */ private Map<List<String>,List<double[]>> convertToTimeSeriesDataPoints(List<GroupbyKeyValue> result) throws Exception { Map<List<String>, List<Double>> aggResultMap = this.keyValuesToMap(result); Map<List<String>,List<double[]>> timeseriesDataPoints = TimeSeriesAggregator.toMetric(aggResultMap,this.pointsNum,this.aggFuncNum); return timeseriesDataPoints; } }
return toMetric(result,(int)((endTime-1-startTime)/intervalms + 1),this.numFunctions);