private static String measurementName(Meter.Id id, Measurement measurement) { final StringBuilder buf = new StringBuilder(); // Append name. buf.append(id.getName()); // Append statistic. buf.append('#'); buf.append(measurement.getStatistic().getTagValueRepresentation()); // Append tags if there are any. final Iterator<Tag> tagsIterator = id.getTags().iterator(); if (tagsIterator.hasNext()) { buf.append('{'); tagsIterator.forEachRemaining(tag -> buf.append(tag.getKey()).append('=') .append(tag.getValue()).append(',')); buf.setCharAt(buf.length() - 1, '}'); } return buf.toString(); }
@Test public void metricsActivatedHasDistinctSchedulerIdTags() { Schedulers.newParallel("A", 4); Schedulers.newParallel("A", 4); Schedulers.newParallel("A", 3); Schedulers.newSingle("B"); Schedulers.newElastic("C").createWorker(); assertThat(simpleMeterRegistry.getMeters() .stream() .map(m -> m.getId().getTag(SchedulerMetricDecorator.TAG_SCHEDULER_ID)) .distinct()) .containsOnly( "parallel(4,\"A\")", "parallel(4,\"A\")#1", "parallel(3,\"A\")", "single(\"B\")", "elastic(\"C\")" ); }
if (m.getDomain() != null && !id.getName().startsWith(m.getDomain().getPrefix())) { String tagValue = id.getTag(m.getLabel()); return m.getValue().equals(tagValue); })); if (m.getDomain() != null && !id.getName().startsWith(m.getDomain().getPrefix())) { String tagValue = id.getTag(m.getLabel()); return pattern.matcher(tagValue).matches(); }));
@Override public Meter.Id map(final Meter.Id id) { String name = id.getName(); final List<Tag> newTags = new ArrayList<>(id.getTags()); newTags.add(Tag.of(TAG_HONO, "hono")); if (MicrometerBasedMetrics.METER_CONNECTIONS_AUTHENTICATED.equals(id.getName()) || MicrometerBasedMetrics.METER_CONNECTIONS_UNAUTHENTICATED.equals(id.getName())) { } else if (MicrometerBasedMetrics.METER_MESSAGES_UNDELIVERABLE.equals(id.getName())) { return new Meter.Id(name, newTags, id.getBaseUnit(), id.getDescription(), id.getType());
io.micrometer.core.instrument.Meter.Type type = id.getType(); StreamSupport.stream(id.getTags().spliterator(), false) .map(tag -> MeterTag.newBuilder().setKey(tag.getKey()).setValue(tag.getValue()).build()) .collect(Collectors.toList()); .build(); MeterId.Builder idBuilder = MeterId.newBuilder(); idBuilder.setName(id.getName()); idBuilder.addAllTag(meterTags); idBuilder.setType(convert(type)); if (id.getDescription() != null) { idBuilder.setDescription(id.getDescription());
@Override public Meter.Id map(final Meter.Id id) { String name = id.getName(); final List<Tag> newTags = new ArrayList<>(id.getTags()); newTags.add(Tag.of(TAG_HONO, "hono")); return new Meter.Id(name, newTags, id.getBaseUnit(), id.getDescription(), id.getType());
List<Tag> tags = id.getTags();
private Map<String, Object> garbageCollectorMetrics() { Map<String, Object> resultsGarbageCollector = new HashMap<>(); Collection<Timer> timers = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc.pause")).timers(); timers.forEach(timer -> { String key = timer.getId().getName(); HashMap<String, Number> gcPauseResults = new HashMap<>(); gcPauseResults.put("count", timer.count()); gcPauseResults.put("max", timer.max(TimeUnit.MILLISECONDS)); gcPauseResults.put("totalTime", timer.totalTime(TimeUnit.MILLISECONDS)); gcPauseResults.put("mean", timer.mean(TimeUnit.MILLISECONDS)); ValueAtPercentile[] percentiles = timer.takeSnapshot().percentileValues(); for (ValueAtPercentile percentile : percentiles) { gcPauseResults.put(String.valueOf(percentile.percentile()), percentile.value(TimeUnit.MILLISECONDS)); } resultsGarbageCollector.putIfAbsent(key, gcPauseResults); }); Collection<Gauge> gauges = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc") && !s.contains("jvm.gc.pause")).gauges(); gauges.forEach(gauge -> resultsGarbageCollector.put(gauge.getId().getName(), gauge.value())); Collection<Counter> counters = Search.in(this.meterRegistry).name(s -> s.contains("jvm.gc") && !s.contains("jvm.gc.pause")).counters(); counters.forEach(counter -> resultsGarbageCollector.put(counter.getId().getName(), counter.count())); gauges = Search.in(this.meterRegistry).name(s -> s.contains("jvm.classes.loaded")).gauges(); Double classesLoaded = gauges.stream().map(Gauge::value).reduce((x, y) -> (x + y)).orElse((double) 0); resultsGarbageCollector.put("classesLoaded", classesLoaded); Collection<FunctionCounter> functionCounters = Search.in(this.meterRegistry).name(s -> s.contains("jvm.classes.unloaded")).functionCounters(); Double classesUnloaded = functionCounters.stream().map(FunctionCounter::count).reduce((x, y) -> (x + y)).orElse((double) 0); resultsGarbageCollector.put("classesUnloaded", classesUnloaded); return resultsGarbageCollector; }
private void createMetricDescriptorIfNecessary(MetricServiceClient client, Meter.Id id, MetricDescriptor.ValueType valueType, @Nullable String statistic) { if (!verifiedDescriptors.contains(id.getName())) { MetricDescriptor descriptor = MetricDescriptor.newBuilder() .setType(metricType(id, statistic)) .setDescription(id.getDescription() == null ? "" : id.getDescription()) .setMetricKind(MetricDescriptor.MetricKind.GAUGE) .setValueType(valueType) .build(); ProjectName name = ProjectName.of(config.projectId()); CreateMetricDescriptorRequest request = CreateMetricDescriptorRequest.newBuilder() .setName(name.toString()) .setMetricDescriptor(descriptor) .build(); if (logger.isTraceEnabled()) { logger.trace("creating metric descriptor:\n{}", request); } try { client.createMetricDescriptor(request); verifiedDescriptors.add(id.getName()); } catch (ApiException e) { logger.warn("failed to create metric descriptor in stackdriver for meter " + id + " {}", e.getCause().getMessage()); } } }
private Map<String, Map<String, Number>> databaseMetrics() { Map<String, Map<String, Number>> resultsDatabase = new HashMap<>(); Collection<Timer> timers = Search.in(this.meterRegistry).name(s -> s.contains("hikari")).timers(); timers.forEach(timer -> { String key = timer.getId().getName().substring(timer.getId().getName().lastIndexOf('.') + 1); resultsDatabase.putIfAbsent(key, new HashMap<>()); resultsDatabase.get(key).put("count", timer.count()); resultsDatabase.get(key).put("max", timer.max(TimeUnit.MILLISECONDS)); resultsDatabase.get(key).put("totalTime", timer.totalTime(TimeUnit.MILLISECONDS)); resultsDatabase.get(key).put("mean", timer.mean(TimeUnit.MILLISECONDS)); ValueAtPercentile[] percentiles = timer.takeSnapshot().percentileValues(); for (ValueAtPercentile percentile : percentiles) { resultsDatabase.get(key).put(String.valueOf(percentile.percentile()), percentile.value(TimeUnit.MILLISECONDS)); } }); Collection<Gauge> gauges = Search.in(this.meterRegistry).name(s -> s.contains("hikari")).gauges(); gauges.forEach(gauge -> { String key = gauge.getId().getName().substring(gauge.getId().getName().lastIndexOf('.') + 1); resultsDatabase.putIfAbsent(key, new HashMap<>()); resultsDatabase.get(key).put("value", gauge.value()); }); return resultsDatabase; }
private Map<String, Map> serviceMetrics() { Collection<String> crudOperation = Arrays.asList("GET", "POST", "PUT", "DELETE"); Set<String> uris = new HashSet<>(); Collection<Timer> timers = this.meterRegistry.find("http.server.requests").timers(); timers.forEach(timer -> uris.add(timer.getId().getTag("uri"))); Map<String, Map> resultsHttpPerUri = new HashMap<>(); uris.forEach(uri -> { Map<String, Map> resultsPerUri = new HashMap<>(); crudOperation.forEach(operation -> { Map<String, Number> resultsPerUriPerCrudOperation = new HashMap<>(); Collection<Timer> httpTimersStream = this.meterRegistry.find("http.server.requests").tags("uri", uri, "method", operation).timers(); long count = httpTimersStream.stream().map(Timer::count).reduce((x, y) -> x + y).orElse(0L); if (count != 0) { double max = httpTimersStream.stream().map(x -> x.max(TimeUnit.MILLISECONDS)).reduce((x, y) -> x > y ? x : y).orElse((double) 0); double totalTime = httpTimersStream.stream().map(x -> x.totalTime(TimeUnit.MILLISECONDS)).reduce((x, y) -> (x + y)).orElse((double) 0); resultsPerUriPerCrudOperation.put("count", count); resultsPerUriPerCrudOperation.put("max", max); resultsPerUriPerCrudOperation.put("mean", totalTime / count); resultsPerUri.put(operation, resultsPerUriPerCrudOperation); } }); resultsHttpPerUri.put(uri, resultsPerUri); }); return resultsHttpPerUri; }
private Map<String, Map> httpRequestsMetrics() { Set<String> statusCode = new HashSet<>(); Collection<Timer> timers = this.meterRegistry.find("http.server.requests").timers(); timers.forEach(timer -> statusCode.add(timer.getId().getTag("status"))); Map<String, Map> resultsHttp = new HashMap<>(); Map<String, Map<String, Number>> resultsHttpPerCode = new HashMap<>(); statusCode.forEach(code -> { Map<String, Number> resultsPerCode = new HashMap<>(); Collection<Timer> httpTimersStream = this.meterRegistry.find("http.server.requests").tag("status", code).timers(); long count = httpTimersStream.stream().map(Timer::count).reduce((x, y) -> x + y).orElse(0L); double max = httpTimersStream.stream().map(x -> x.max(TimeUnit.MILLISECONDS)).reduce((x, y) -> x > y ? x : y).orElse((double) 0); double totalTime = httpTimersStream.stream().map(x -> x.totalTime(TimeUnit.MILLISECONDS)).reduce((x, y) -> (x + y)).orElse((double) 0); resultsPerCode.put("count", count); resultsPerCode.put("max", max); resultsPerCode.put("mean", count != 0 ? totalTime / count : 0); resultsHttpPerCode.put(code, resultsPerCode); }); resultsHttp.put("percode", resultsHttpPerCode); timers = this.meterRegistry.find("http.server.requests").timers(); long countAllrequests = timers.stream().map(Timer::count).reduce((x, y) -> x + y).orElse(0L); Map<String, Number> resultsHTTPAll = new HashMap<>(); resultsHTTPAll.put("count", countAllrequests); resultsHttp.put("all", resultsHTTPAll); return resultsHttp; }
private Meter convert(io.micrometer.core.instrument.Meter meter) { Meter.Builder meterBuilder = Meter.newBuilder(); MeterId.Builder idBuilder = MeterId.newBuilder(); io.micrometer.core.instrument.Meter.Id id = meter.getId(); io.micrometer.core.instrument.Meter.Type type = id.getType(); for (Tag tag : id.getTags()) { idBuilder.addTag(MeterTag.newBuilder().setKey(tag.getKey()).setValue(tag.getValue())); } idBuilder.setName(id.getName()); idBuilder.setType(convert(type)); if (id.getDescription() != null) { idBuilder.setDescription(id.getDescription()); } if (id.getBaseUnit() != null) { idBuilder.setBaseUnit(id.getBaseUnit()); } meterBuilder.setId(idBuilder); for (Measurement measurement : meter.measure()) { meterBuilder.addMeasure( MeterMeasurement.newBuilder() .setValue(measurement.getValue()) .setStatistic(convert(measurement.getStatistic()))); } return meterBuilder.build(); }
private Map<String, Map<String, Number>> jvmMemoryMetrics() { Map<String, Map<String, Number>> resultsJvm = new HashMap<>(); Search jvmUsedSearch = Search.in(this.meterRegistry).name(s -> s.contains("jvm.memory.used")); Collection<Gauge> gauges = jvmUsedSearch.gauges(); gauges.forEach(gauge -> { String key = gauge.getId().getTag("id"); resultsJvm.putIfAbsent(key, new HashMap<>()); resultsJvm.get(key).put("used", gauge.value()); }); Search jvmMaxSearch = Search.in(this.meterRegistry).name(s -> s.contains("jvm.memory.max")); gauges = jvmMaxSearch.gauges(); gauges.forEach(gauge -> { String key = gauge.getId().getTag("id"); resultsJvm.get(key).put("max", gauge.value()); }); gauges = Search.in(this.meterRegistry).name(s -> s.contains("jvm.memory.committed")).gauges(); gauges.forEach(gauge -> { String key = gauge.getId().getTag("id"); resultsJvm.get(key).put("committed", gauge.value()); }); return resultsJvm; }
private Map<String, Map<String, Number>> cacheMetrics() { Map<String, Map<String, Number>> resultsCache = new HashMap<>(); Collection<FunctionCounter> counters = Search.in(this.meterRegistry).name(s -> s.contains("cache") && !s.contains("hibernate")).functionCounters(); counters.forEach(counter -> { String name = counter.getId().getTag("name"); resultsCache.putIfAbsent(name, new HashMap<>()); String key = counter.getId().getName(); if (counter.getId().getTag("result") != null) { key += "." + counter.getId().getTag("result"); } resultsCache.get(name).put(key, counter.count()); }); Collection<Gauge> gauges = Search.in(this.meterRegistry).name(s -> s.contains("cache")).gauges(); gauges.forEach(gauge -> { String name = gauge.getId().getTag("name"); resultsCache.putIfAbsent(name, new HashMap<>()); String key = gauge.getId().getName(); resultsCache.get(name).put(key, gauge.value()); }); return resultsCache; }
String generatePrometheusFriendlyName(Meter.Id id) { String name = ""; Optional<Tag> group = findTagByKey(id, "group"); if (group.isPresent()) { name += group.get().getValue(); } Optional<Tag> service = findTagByKey(id, "service"); if (service.isPresent()) { name += "." + service.get().getValue(); } Optional<Tag> method = findTagByKey(id, "method"); if (method.isPresent()) { name += "." + method.get().getValue(); } if (name.isEmpty()) { return name; } else { return name + "." + id.getName(); } }
@SuppressWarnings("ConstantConditions") @Override protected io.micrometer.core.instrument.DistributionSummary newDistributionSummary(Meter.Id id, DistributionStatisticConfig distributionStatisticConfig, double scale) { com.netflix.spectator.api.DistributionSummary internalSummary; if (distributionStatisticConfig.isPercentileHistogram()) { // This doesn't report the normal count/totalTime/max stats, so we treat it as additive internalSummary = PercentileDistributionSummary.get(registry, spectatorId(id)); } else { internalSummary = registry.distributionSummary(spectatorId(id)); } SpectatorDistributionSummary summary = new SpectatorDistributionSummary(id, internalSummary, clock, distributionStatisticConfig, scale); HistogramGauges.register(summary, this, percentile -> id.getName(), percentile -> Tags.concat(id.getTagsAsIterable(), "percentile", DoubleFormat.decimalOrNan(percentile.percentile())), ValueAtPercentile::value, bucket -> id.getName(), bucket -> Tags.concat(id.getTagsAsIterable(), "sla", DoubleFormat.decimalOrWhole(bucket.bucket()))); return summary; }
@GetMapping("/metrics") public ObjectNode fetchMetricsFromMicrometer() { ObjectNode metrics = factory.objectNode(); // Iterate over the meters and get the Zipkin Custom meters for constructing the Metrics endpoint for (Meter meter : meterRegistry.getMeters()) { String name = meter.getId().getName(); if (!name.startsWith("zipkin_collector")) continue; String transport = meter.getId().getTag("transport"); if (transport == null) continue; switch (meter.getId().getType()) { case COUNTER: metrics.put("counter." + name + "." + transport, meterRegistry.get(name).counter().count()); continue; case GAUGE: metrics.put("gauge." + name + "." + transport, meterRegistry.get(name).gauge().value()); } } return metrics; }
/** * Gets the object to use for mapping a Micrometer meter name and tags * to a hierarchical Graphite-compliant meter name. * * @param defaultMapper The default mapper to use for non-Hono specific * meters. * @return The mapper. */ protected HierarchicalNameMapper legacyGraphiteFormatMapper( final GraphiteHierarchicalNameMapper defaultMapper) { return (id, convention) -> { if (id.getTag(TAG_HONO) == null) { return defaultMapper.toHierarchicalName(id, convention); } return amendWithTags(id.getConventionName(convention), id, new String[] { MetricsTags.TAG_HOST, TAG_METER_TYPE, TAG_HONO, MetricsTags.TAG_PROTOCOL }, new String[] { MetricsTags.TAG_TYPE, MetricsTags.TAG_TENANT, TAG_SUB_NAME, TAG_TYPE_SUFFIX }); }; }
private SignalFxProtocolBuffers.DataPoint.Builder addDatapoint(Meter meter, SignalFxProtocolBuffers.MetricType metricType, @Nullable String statSuffix, Number value) { SignalFxProtocolBuffers.Datum.Builder datumBuilder = SignalFxProtocolBuffers.Datum.newBuilder(); SignalFxProtocolBuffers.Datum datum = (value instanceof Double ? datumBuilder.setDoubleValue((Double) value) : datumBuilder.setIntValue((Long) value) ).build(); String metricName = config().namingConvention().name(statSuffix == null ? meter.getId().getName() : meter.getId().getName() + "." + statSuffix, meter.getId().getType(), meter.getId().getBaseUnit()); SignalFxProtocolBuffers.DataPoint.Builder dataPointBuilder = SignalFxProtocolBuffers.DataPoint.newBuilder() .setMetric(metricName) .setMetricType(metricType) .setValue(datum); for (Tag tag : getConventionTags(meter.getId())) { dataPointBuilder.addDimensions(SignalFxProtocolBuffers.Dimension.newBuilder() .setKey(tag.getKey()) .setValue(tag.getValue()) .build()); } return dataPointBuilder; }