@Override public Bucket update(Bucket current, Sample x) { current.put(x); return current; }
void merge(Bucket other) { boolean otherIsNewer = resolveTimeStamps(other); merge(other, otherIsNewer); }
MetricSnapshot convert() { for (Map.Entry<Identifier, UntypedMetric> entry : snapshot.entrySet()) { Identifier ident = entry.getKey(); getMap(ident.getLocation()).put(ident.getName(), convert(entry.getValue())); } Map<MetricDimensions, MetricSet> data = new HashMap<>(); for (Map.Entry<Point, Map<String, MetricValue>> entry : perPointData.entrySet()) { MetricDimensions key = convert(entry.getKey()); MetricSet newval = new MetricSet(entry.getValue()); MetricSet old = data.get(key); if (old != null) { // should not happen, this is bad // TODO: consider merging the two MetricSet instances log.warning("losing MetricSet when converting for: "+entry.getKey()); } else { data.put(key, newval); } } return new MetricSnapshot(snapshot.getFromMillis(), snapshot.getToMillis(), TimeUnit.MILLISECONDS, data); }
private void padMetric(String metric, Bucket toPresent, int currentDataPoints) { final LinkedHashMap<Point, TimeStampedMetric> cachedPoints = getCachedMetric(metric); int toAdd = pointsToKeep - currentDataPoints; @SuppressWarnings({"unchecked","rawtypes"}) Entry<Point, TimeStampedMetric>[] cachedEntries = cachedPoints.entrySet().toArray(new Entry[0]); long nowMillis = System.currentTimeMillis(); for (int i = cachedEntries.length - 1; i >= 0 && toAdd > 0; --i) { Entry<Point, TimeStampedMetric> leastOld = cachedEntries[i]; if (leastOld.getValue().millis + MAX_AGE_MILLIS < nowMillis) { continue; } final Identifier id = new Identifier(metric, leastOld.getKey()); if (!toPresent.hasIdentifier(id)) { toPresent.put(id, leastOld.getValue().metric.pruneData()); --toAdd; } } }
public Collection<Map.Entry<Point, UntypedMetric>> getValuesForMetric(@NonNull String metricName) { List<Map.Entry<Point, UntypedMetric>> singleMetric = new ArrayList<>(); for (Map.Entry<Identifier, UntypedMetric> entry : values.entrySet()) { if (metricName.equals(entry.getKey().getName())) { singleMetric.add(locationValuePair(entry)); } } return singleMetric; }
void merge(Bucket other, boolean otherIsNewer) { LazySet<String> malformedMetrics = LazySet.newHashSet(); for (Map.Entry<Identifier, UntypedMetric> entry : other.values.entrySet()) { String metricName = entry.getKey().getName(); try { if (!malformedMetrics.contains(metricName)) { get(entry.getKey(), entry.getValue()).merge(entry.getValue(), otherIsNewer); } } catch (IllegalArgumentException e) { log.log(LogLevel.WARNING, "Problems merging metric " + metricName + ", possibly ignoring data."); // avoid spamming the log if there are a lot of mismatching // threads malformedMetrics.add(metricName); } } }
private void padPresentation(Bucket toPresent) { Map<String, List<Entry<Point, UntypedMetric>>> currentMetricNames = toPresent.getValuesByMetricName(); for (Map.Entry<String, List<Entry<Point, UntypedMetric>>> metric : currentMetricNames.entrySet()) { final int currentDataPoints = metric.getValue().size(); if (currentDataPoints < pointsToKeep) { padMetric(metric.getKey(), toPresent, currentDataPoints); } } Set<String> keysMissingFromPresentation = new HashSet<>(persistentData.keySet()); keysMissingFromPresentation.removeAll(currentMetricNames.keySet()); for (String cachedMetric : keysMissingFromPresentation) { padMetric(cachedMetric, toPresent, 0); } }
public Bucket getUpdatedSnapshot() throws InterruptedException { updater.gotData.await(10, TimeUnit.SECONDS); Bucket s = receiver.getSnapshot(); long startedWaitingForSnapshot = System.currentTimeMillis(); // just waiting for the correct snapshot being constructed (yes, this is // necessary) while (s == null || s.entrySet().size() == 0) { if (System.currentTimeMillis() - startedWaitingForSnapshot > (10L * 1000L)) { throw new RuntimeException("Test timed out."); } Thread.sleep(10); s = receiver.getSnapshot(); } return s; }
@Override public Bucket createGenerationInstance(Bucket previous) { return new Bucket(); }
public Map<String, List<Map.Entry<Point, UntypedMetric>>> getValuesByMetricName() { Map<String, List<Map.Entry<Point, UntypedMetric>>> result = new HashMap<>(); for (Map.Entry<Identifier, UntypedMetric> entry : values.entrySet()) { List<Map.Entry<Point, UntypedMetric>> singleMetric; if (result.containsKey(entry.getKey().getName())) { singleMetric = result.get(entry.getKey().getName()); } else { singleMetric = new ArrayList<>(); result.put(entry.getKey().getName(), singleMetric); } singleMetric.add(locationValuePair(entry)); } return result; }
void put(Sample x) { UntypedMetric value = get(x); Measurement m = x.getMeasurement(); switch (x.getMetricType()) { case GAUGE: value.put(m.getMagnitude()); break; case COUNTER: value.add(m.getMagnitude()); break; default: throw new IllegalArgumentException("Unsupported metric type: " + x.getMetricType()); } }
private void updatePersistentData(Bucket toDelete) { if (toDelete == null) { return; } long millis = toDelete.gotTimeStamps ? toDelete.toMillis : System.currentTimeMillis(); for (Map.Entry<String, List<Entry<Point, UntypedMetric>>> metric : toDelete.getValuesByMetricName().entrySet()) { LinkedHashMap<Point, TimeStampedMetric> cachedPoints = getCachedMetric(metric.getKey()); for (Entry<Point, UntypedMetric> newestInterval : metric.getValue()) { // overwriting an existing entry does not update the order // in the map cachedPoints.remove(newestInterval.getKey()); TimeStampedMetric toInsert = new TimeStampedMetric(millis, newestInterval.getValue()); cachedPoints.put(newestInterval.getKey(), toInsert); } } }
void outputHistograms(PrintStream output) { boolean gotHistogram = false; for (Map.Entry<Identifier, UntypedMetric> entry : snapshot.entrySet()) { if (entry.getValue().getHistogram() == null) { continue; } gotHistogram = true; DoubleHistogram histogram = entry.getValue().getHistogram(); Identifier id = entry.getKey(); String metricIdentifier = getIdentifierString(id); output.println("# start of metric " + metricIdentifier); histogram.outputPercentileDistribution(output, 4, 1.0d, true); output.println("# end of metric " + metricIdentifier); } if (!gotHistogram) { output.println("# No histograms currently available."); } }
private Bucket updateBuffer() { List<Bucket> buckets = metricsCollection.fetch(); final long toMillis = System.currentTimeMillis(); final int bucketIndex = generation++ % buffer.length; Bucket bucketToDelete = buffer[bucketIndex]; Bucket latest = new Bucket(fromMillis, toMillis); for (Bucket b : buckets) { latest.merge(b, true); } buffer[bucketIndex] = latest; this.fromMillis = toMillis; return bucketToDelete; }