/** * Constructs a new Micrometer metrics listener that adds metrics to the given registry with the given list of tags. * * @param meterRegistry the registry to which to add metrics * @param tagKeysAndValues an optional list of tag keys/values to attach to metrics; must be an even number of * strings representing alternating key/value pairs */ public MicrometerApnsClientMetricsListener(final MeterRegistry meterRegistry, final String... tagKeysAndValues) { this(meterRegistry, Tags.of(tagKeysAndValues)); }
public DataSourcePoolMetrics(DataSource dataSource, DataSourcePoolMetadataProvider metadataProvider, String name, Iterable<Tag> tags) { Assert.notNull(dataSource, "DataSource must not be null"); Assert.notNull(metadataProvider, "MetadataProvider must not be null"); this.dataSource = dataSource; this.metadataProvider = new CachingDataSourcePoolMetadataProvider( metadataProvider); this.tags = Tags.concat(tags, "name", name); }
@Override public void process(Exchange exchange) { Message in = exchange.getIn(); String defaultMetricsName = simple(exchange, getEndpoint().getMetricsName(), String.class); String finalMetricsName = getStringHeader(in, HEADER_METRIC_NAME, defaultMetricsName); Iterable<Tag> defaultTags = getEndpoint().getTags(); Iterable<Tag> headerTags = getTagHeader(in, HEADER_METRIC_TAGS, Tags.empty()); Iterable<Tag> finalTags = Tags.concat(defaultTags, headerTags).stream() .map(tag -> Tag.of( simple(exchange, tag.getKey(), String.class), simple(exchange, tag.getValue(), String.class))) .reduce(Tags.empty(), Tags::and, Tags::and) .and(Tags.of( CAMEL_CONTEXT_TAG, getEndpoint().getCamelContext().getName())); try { doProcess(exchange, finalMetricsName, finalTags); } catch (Exception e) { exchange.setException(e); } finally { clearMetricsHeaders(in); } }
Iterable<Tag> getMetricsTag(Map<String, Object> parameters) { String tagsString = getAndRemoveParameter(parameters, "tags", String.class, ""); if (tagsString != null && !tagsString.isEmpty()) { String[] tagStrings = tagsString.split("\\s*,\\s*"); return Stream.of(tagStrings) .map(s -> Tags.of(s.split("\\s*=\\s*"))) .reduce(Tags.empty(), Tags::and); } return Tags.empty(); }
@SuppressWarnings({ "unchecked" }) private MeterBinder getMeterBinder(Cache cache, Tags tags) { Tags cacheTags = tags.and(getAdditionalTags(cache)); return LambdaSafe .callbacks(CacheMeterBinderProvider.class, this.binderProviders, cache) .withLogger(CacheMetricsRegistrar.class) .invokeAnd((binderProvider) -> binderProvider.getMeterBinder(cache, cacheTags)) .filter(Objects::nonNull).findFirst().orElse(null); }
@Override public void bindTo(MeterRegistry registry) { for (FileStore fileStore : FileSystems.getDefault().getFileStores()) { LOG.debug("Add gauge metric for {}", fileStore.name()); Iterable<Tag> tagsWithPath = Tags.concat(Tags.empty(), "path", fileStore.toString()); Gauge.builder("disk.free", fileStore, exceptionToNonWrapper(FileStore::getUnallocatedSpace)) .tags(tagsWithPath) .description("Unallocated space for file store") .baseUnit("bytes") .strongReference(true) .register(registry); Gauge.builder("disk.total", fileStore, exceptionToNonWrapper(FileStore::getTotalSpace)) .tags(tagsWithPath) .description("Total space for file store") .baseUnit("bytes") .strongReference(true) .register(registry); Gauge.builder("disk.usable", fileStore, exceptionToNonWrapper(FileStore::getUsableSpace)) .tags(tagsWithPath) .description("Usable space for file store") .baseUnit("bytes") .strongReference(true) .register(registry); } }
public MicrometerModule(TimeUnit timeUnit) { this(timeUnit, name -> true, Tags.empty()); }
private AtomicLong getPartitionLatencyHolder(int partition) { return partitionLatencies.computeIfAbsent(partition, k -> { Tags tags = roadStreamTags.and("partition", Integer.toString(k)); AtomicLong latencyHolder = new AtomicLong(); registry.more().timeGauge(OFFRAMP + LATENCY, tags, latencyHolder, MILLISECONDS, AtomicLong::doubleValue); return latencyHolder; }); }
@Override public TomcatMetrics get() { return new TomcatMetrics(manager, Tags.empty()); } }
/** * Returns a {@link MeterIdPrefixFunction} that returns a newly created {@link MeterIdPrefix} which has * the specified label added. */ default MeterIdPrefixFunction withTags(String... keyValues) { requireNonNull(keyValues, "keyValues"); return withTags(Tags.of(keyValues)); }
public <T> T record(TimerTag timerTag, Supplier<T> supplier) { return registry.timer(OFFRAMP_TIMER, roadStreamTags.and(timerTag.tag)).record(supplier); }
private void addSourceMetrics(MeterRegistry registry) { for (String source : configurer.getSourceNames()) { MessageSourceMetrics sourceMetrics = configurer.getSourceMetrics(source); Iterable<Tag> tagsWithSource = Tags.concat(tags, "source", source); FunctionCounter.builder("spring.integration.source.messages", sourceMetrics, MessageSourceMetrics::getMessageCount) .tags(tagsWithSource) .description("The number of successful handler calls") .register(registry); } }
private String event(Meter.Id id, Attribute... attributes) { return event(id, Tags.empty(), attributes); }
@Threads(16) @Benchmark public void of() { Tags.of("key", "value", "key2", "value2", "key3", "value3", "key4", "value4", "key5", "value5"); }
public void record(TimerTag timerTag, Runnable runnable) { registry.timer(OFFRAMP_TIMER, roadStreamTags.and(timerTag.tag)).record(runnable); }
@SuppressWarnings("ConstantConditions") @Override protected io.micrometer.core.instrument.DistributionSummary newDistributionSummary(Meter.Id id, DistributionStatisticConfig distributionStatisticConfig, double scale) { com.netflix.spectator.api.DistributionSummary internalSummary; if (distributionStatisticConfig.isPercentileHistogram()) { // This doesn't report the normal count/totalTime/max stats, so we treat it as additive internalSummary = PercentileDistributionSummary.get(registry, spectatorId(id)); } else { internalSummary = registry.distributionSummary(spectatorId(id)); } SpectatorDistributionSummary summary = new SpectatorDistributionSummary(id, internalSummary, clock, distributionStatisticConfig, scale); HistogramGauges.register(summary, this, percentile -> id.getName(), percentile -> Tags.concat(id.getTagsAsIterable(), "percentile", DoubleFormat.decimalOrNan(percentile.percentile())), ValueAtPercentile::value, bucket -> id.getName(), bucket -> Tags.concat(id.getTagsAsIterable(), "sla", DoubleFormat.decimalOrWhole(bucket.bucket()))); return summary; }
@Override public ScheduledExecutorService newScheduledThreadPool(ThreadPoolProfile profile, ThreadFactory threadFactory) { ScheduledExecutorService executorService = threadPoolFactory.newScheduledThreadPool(profile, threadFactory); String executorServiceName = name(profile.getId()); return new TimedScheduledExecutorService(meterRegistry, executorService, executorServiceName, Tags.empty()); }