protected long aggregateMetricValue(MetricTimeSeries metricTimeSery) { long aggregateValue = 0L; for (TimeValue timeValue : metricTimeSery.getTimeValues()) { aggregateValue += timeValue.getValue(); } return aggregateValue; } }
@Override protected long limitedInterpolate(TimeValue start, TimeValue end, long ts) { long deltaX = ts - start.getTimestamp(); long totalX = end.getTimestamp() - start.getTimestamp(); long totalY = end.getValue() - start.getValue(); long deltaY = (int) (totalY * deltaX / totalX); return start.getValue() + deltaY; } }
private MetricQueryResult.TimeValue[] decorate(List<TimeValue> points) { MetricQueryResult.TimeValue[] timeValues = new MetricQueryResult.TimeValue[points.size()]; int k = 0; for (TimeValue timeValue : points) { timeValues[k++] = new MetricQueryResult.TimeValue(timeValue.getTimestamp(), timeValue.getValue()); } return timeValues; }
private MetricQueryResult.TimeValue[] decorate(List<TimeValue> points) { MetricQueryResult.TimeValue[] timeValues = new MetricQueryResult.TimeValue[points.size()]; int k = 0; for (TimeValue timeValue : points) { timeValues[k++] = new MetricQueryResult.TimeValue(timeValue.getTimestamp(), timeValue.getValue()); } return timeValues; }
private long getSingleValueFromTotals(MetricDataQuery query) { try { Collection<MetricTimeSeries> result = metricStore.query(query); if (result.isEmpty()) { return 0; } // since it is totals query and not groupBy specified, we know there's one time series List<TimeValue> timeValues = result.iterator().next().getTimeValues(); if (timeValues.isEmpty()) { return 0; } // since it is totals, we know there's one value only return timeValues.get(0).getValue(); } catch (Exception e) { throw Throwables.propagate(e); } } }
private long getTotals(MetricDataQuery query) throws Exception { // query must have resolution set to Integer.MAX_VALUE (i.e. "totals") Collection<MetricTimeSeries> result = metricStore.query(query); if (result.size() == 0) { return 0; } // since there's no group by condition, it'll return single time series always MetricTimeSeries timeSeries = result.iterator().next(); if (timeSeries.getTimeValues().isEmpty()) { return 0; } // since it is totals, it will have only one TimeValue or none return timeSeries.getTimeValues().get(0).getValue(); }
private long getSingleValueFromTotals(MetricDataQuery query) { try { Collection<MetricTimeSeries> result = metricStore.query(query); if (result.isEmpty()) { return 0; } // since it is totals query and not groupBy specified, we know there's one time series List<TimeValue> timeValues = result.iterator().next().getTimeValues(); if (timeValues.isEmpty()) { return 0; } // since it is totals, we know there's one value only return timeValues.get(0).getValue(); } catch (Exception e) { throw Throwables.propagate(e); } } }
private Map<String, Long> getTotalsWithSingleGroupByTag(MetricDataQuery query) throws Exception { // query must have resolution set to Integer.MAX_VALUE (i.e. "totals") Collection<MetricTimeSeries> result = metricStore.query(query); Map<String, Long> map = Maps.newHashMap(); for (MetricTimeSeries timeSeries : result) { // we know there's only ony group by tag String groupByTagValue = timeSeries.getTagValues().values().iterator().next(); // since it is totals, it will have only one TimeValue map.put(groupByTagValue, timeSeries.getTimeValues().get(0).getValue()); } return map; } }
@Override public TimeValue apply(TimeValue input) { return new TimeValue(input.getTimestamp(), input.getValue()); } }).iterator();
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of()); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); // initialize elements to zero for (String counterName : metricsToCounters.values()) { result.put(counterName, 0L); } for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } }
private void getAggregates(Map<String, String> tags, Map<String, String> metricsToCounters, Map<String, Long> result) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.<String>of()); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); // initialize elements to zero for (String counterName : metricsToCounters.values()) { result.put(counterName, 0L); } for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); result.put(metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } }
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID)); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID); allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } } }
private void queryGroupedAggregates(Map<String, String> tags, Table<String, String, Long> allTaskMetrics, Map<String, String> metricsToCounters) { Map<String, AggregationFunction> metrics = Maps.newHashMap(); // all map-reduce metrics are gauges for (String metric : metricsToCounters.keySet()) { metrics.put(metric, AggregationFunction.LATEST); } MetricDataQuery metricDataQuery = new MetricDataQuery(0, Integer.MAX_VALUE, Integer.MAX_VALUE, metrics, tags, ImmutableList.of(Constants.Metrics.Tag.INSTANCE_ID)); Collection<MetricTimeSeries> query = metricStore.query(metricDataQuery); for (MetricTimeSeries metricTimeSeries : query) { List<TimeValue> timeValues = metricTimeSeries.getTimeValues(); TimeValue timeValue = Iterables.getOnlyElement(timeValues); String taskId = metricTimeSeries.getTagValues().get(Constants.Metrics.Tag.INSTANCE_ID); allTaskMetrics.put(taskId, metricsToCounters.get(metricTimeSeries.getMetricName()), timeValue.getValue()); } } }
private Map<String, Long> getSparkDetails(ProgramId sparkProgram, String runId) { Map<String, String> context = new HashMap<>(); context.put(Constants.Metrics.Tag.NAMESPACE, sparkProgram.getNamespace()); context.put(Constants.Metrics.Tag.APP, sparkProgram.getApplication()); context.put(Constants.Metrics.Tag.SPARK, sparkProgram.getProgram()); context.put(Constants.Metrics.Tag.RUN_ID, runId); List<TagValue> tags = new ArrayList<>(); for (Map.Entry<String, String> entry : context.entrySet()) { tags.add(new TagValue(entry.getKey(), entry.getValue())); } MetricSearchQuery metricSearchQuery = new MetricSearchQuery(0, 0, Integer.MAX_VALUE, tags); Collection<String> metricNames = metricStore.findMetricNames(metricSearchQuery); Map<String, Long> overallResult = new HashMap<>(); for (String metricName : metricNames) { Collection<MetricTimeSeries> resultPerQuery = metricStore.query( new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, context, new ArrayList<String>())); for (MetricTimeSeries metricTimeSeries : resultPerQuery) { overallResult.put(metricTimeSeries.getMetricName(), metricTimeSeries.getTimeValues().get(0).getValue()); } } return overallResult; }
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, String metricName) { Map<String, String> tags = getMetricsTags(programRunId, profileId); MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>()); Collection<MetricTimeSeries> result = metricStore.query(query); if (result.isEmpty()) { return 0; } List<TimeValue> timeValues = result.iterator().next().getTimeValues(); if (timeValues.isEmpty()) { return 0; } return timeValues.get(0).getValue(); } }
protected long getProfileTotalMetric(String metricName) { Map<String, String> tags = new HashMap<>(); tags.put(Constants.Metrics.Tag.PROFILE, Profile.NATIVE_NAME); tags.put(Constants.Metrics.Tag.PROFILE_SCOPE, EntityScope.SYSTEM.name()); MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, "system." + metricName, AggregationFunction.SUM, tags, Collections.emptyList()); Collection<MetricTimeSeries> results = metricStore.query(query); if (results.isEmpty()) { return 0; } // since it is totals query and not groupBy specified, we know there's one time series List<TimeValue> timeValues = results.iterator().next().getTimeValues(); if (timeValues.isEmpty()) { return 0; } // since it is totals, we know there's one value only return timeValues.get(0).getValue(); }
@Override public Long call() throws Exception { Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery( 0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList())); if (metrics.isEmpty()) { return 0L; } Assert.assertEquals(1, metrics.size()); MetricTimeSeries ts = metrics.iterator().next(); Assert.assertEquals(1, ts.getTimeValues().size()); return ts.getTimeValues().get(0).getValue(); } }, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
private long getMetric(MetricStore metricStore, ProgramRunId programRunId, ProfileId profileId, String metricName) { Map<String, String> tags = ImmutableMap.<String, String>builder() .put(Constants.Metrics.Tag.PROFILE_SCOPE, profileId.getScope().name()) .put(Constants.Metrics.Tag.PROFILE, profileId.getProfile()) .put(Constants.Metrics.Tag.NAMESPACE, programRunId.getNamespace()) .put(Constants.Metrics.Tag.PROGRAM_TYPE, programRunId.getType().getPrettyName()) .put(Constants.Metrics.Tag.APP, programRunId.getApplication()) .put(Constants.Metrics.Tag.PROGRAM, programRunId.getProgram()) .build(); MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, metricName, AggregationFunction.SUM, tags, new ArrayList<>()); Collection<MetricTimeSeries> result = metricStore.query(query); if (result.isEmpty()) { return 0; } List<TimeValue> timeValues = result.iterator().next().getTimeValues(); if (timeValues.isEmpty()) { return 0; } return timeValues.get(0).getValue(); } }
@Override protected TimeValue computeNext() { long currentTsValue = 0; // no more data points in the timeseries if (!timeseries.hasNext()) { return endOfData(); } // move the iterator to the next point in this timeseries if this is an actual data point and not interpolated. if (timeseries.peek().getTimestamp() == currentTs) { currentTsValue += timeseries.peek().getValue(); timeseries.next(); } else if (interpolator != null && timeseries.peekBefore() != null) { // don't interpolate unless we're in between data points currentTsValue += interpolator.interpolate(timeseries.peekBefore(), timeseries.peek(), currentTs); } TimeValue output = new TimeValue(currentTs, currentTsValue); if (timeseries.hasNext()) { // increment the currentTs by resolution to get the next data point. currentTs = (interpolator == null) ? timeseries.peek().getTimestamp() : currentTs + resolution; } return output; }