/** * Instantiates {@link com.codahale.metrics.Metric}s. Called once in {@link #startUp()}. Subclasses should override * this method to instantiate their own metrics. */ protected void createMetrics() { this.messagesRead = this.metricContext.counter(RuntimeMetrics.GOBBLIN_KAFKA_HIGH_LEVEL_CONSUMER_MESSAGES_READ); }
/** * Get a {@link Counter} with the given name prefix and suffixes. * * @param prefix the given name prefix * @param suffixes the given name suffixes * @return a {@link Counter} with the given name prefix and suffixes */ public Counter getCounter(String prefix, String... suffixes) { return this.metricContext.counter(MetricRegistry.name(prefix, suffixes)); }
/** * Creates a {@link Counter} for every value of the enumClass. * Use {@link #inc(Enum, long)} to increment the counter associated with a enum value * * @param metricContext that {@link Counter}s will be registered * @param enumClass that define the names of {@link Counter}s. One counter is created per value * @param instrumentedClass name that will be prefixed in the metric name */ public void initialize(final MetricContext metricContext, final Class<E> enumClass, final Class<?> instrumentedClass) { Builder<E, Counter> builder = ImmutableMap.builder(); for (E e : Arrays.asList(enumClass.getEnumConstants())) { builder.put(e, metricContext.counter(MetricRegistry.name(instrumentedClass, e.name()))); } counters = builder.build(); }
public ObjectStoreWriter(ObjectStoreClient client, State state) { super(state); this.objectStoreClient = client; this.operationsExecuted = this.getMetricContext().counter(OPERATIONS_EXECUTED_COUNTER); }
@Override protected void createMetrics() { super.createMetrics(); this.rejectedEvents = getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_SLAEVENT_REJECTEDEVENTS); }
@Test public void testTags() throws IOException { MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName()).build(); Counter counter = metricContext.counter("com.linkedin.example.counter"); Map<String, String> tags = new HashMap<>(); tags.put("testKey", "testValue"); tags.put("key2", "value2"); OutputStreamReporter reporter = OutputStreamReporter.Factory.newBuilder().withTags(tags).outputTo(this.stream).build(new Properties()); counter.inc(); reporter.report(); Assert.assertTrue(this.stream.toString().contains("key2=value2")); Assert.assertTrue(this.stream.toString().contains("testKey=testValue")); String[] lines = this.stream.toString().split("\n"); Map<String, Set<String>> expected = new HashMap<>(); expectMetrics(expected, lines); Set<String> counterSubMetrics = new HashSet<>(); counterSubMetrics.add("count"); expected.put("com.linkedin.example.counter", counterSubMetrics); reporter.close(); }
@Builder private MetricsUpdater(int depth, int counters, int meters, int histograms, int timers, MetricContext baseContext) { this.depth = depth; this.random = new Random(); MetricContext tmpContext = baseContext == null ? RootMetricContext.get() : baseContext; while(depth > 0) { tmpContext = tmpContext.childBuilder(UUID.randomUUID().toString()).build(); depth--; } this.context = tmpContext; this.counters = Lists.newArrayList(); for(int i = 0; i < counters; i++) { this.counters.add(this.context.counter("gobblin.performance.test.counter" + i)); } this.meters = Lists.newArrayList(); for(int i = 0; i < meters; i++) { this.meters.add(this.context.meter("gobblin.performance.test.meter" + i)); } this.histograms = Lists.newArrayList(); for(int i = 0; i < histograms; i++) { this.histograms.add(this.context.histogram("gobblin.performance.test.histogram" + i)); } this.timers = Lists.newArrayList(); for(int i = 0; i < timers; i++) { this.timers.add(this.context.timer("gobblin.performance.test.timer" + i)); } }
@Test public void testTagsFromContext() throws IOException { Tag<?> tag1 = new Tag<>("tag1", "value1"); MetricContext context = MetricContext.builder("context").addTag(tag1).build(); Counter counter = context.counter("com.linkedin.example.counter"); OutputStreamReporter reporter = OutputStreamReporter.Factory.newBuilder().outputTo(this.stream).build(new Properties()); counter.inc(); reporter.report(); Assert.assertTrue(this.stream.toString().contains("tag1=value1")); String[] lines = this.stream.toString().split("\n"); Map<String, Set<String>> expected = new HashMap<>(); expectMetrics(expected, lines); Set<String> counterSubMetrics = new HashSet<>(); counterSubMetrics.add("count"); expected.put("com.linkedin.example.counter", counterSubMetrics); reporter.close(); }
Counter counter = metricContext.counter("com.linkedin.example.counter"); Meter meter = metricContext.meter("com.linkedin.example.meter"); Histogram histogram = metricContext.histogram("com.linkedin.example.histogram");
MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testKafkaReporter").build(); Counter counter = metricContext.counter("com.linkedin.example.counter"); Meter meter = metricContext.meter("com.linkedin.example.meter"); Histogram histogram = metricContext.histogram("com.linkedin.example.histogram");
@Test public void testWithTags() throws IOException { try ( MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testGraphiteReporter") .addTag(new Tag<String>("taskId", "task_testjob_123")) .addTag(new Tag<String>("forkBranchName", "fork_1")).build(); InfluxDBReporter influxDBReporter = InfluxDBReporter.Factory.newBuilder() .withInfluxDBPusher(influxDBPusher) .withMetricContextName(CONTEXT_NAME) .build(new Properties());) { Counter counter = metricContext.counter(MetricRegistry.name(METRIC_PREFIX, COUNTER)); counter.inc(5l); influxDBReporter.report(new TreeMap<String, Gauge>(), metricContext.getCounters(), new TreeMap<String, Histogram>(), new TreeMap<String, Meter>(), new TreeMap<String, Timer>(), metricContext.getTagMap()); //InfluxDB converts all values to float64 internally Assert.assertEquals(getMetricValue("task_testjob_123.fork_1." + METRIC_PREFIX, COUNTER, Measurements.COUNT), Float.toString(5f)); } }
@Test public void kafkaReporterContextTest() throws IOException { Tag<?> tag1 = new Tag<>("tag1", "value1"); MetricContext context = MetricContext.builder("context").addTag(tag1).build(); Counter counter = context.counter("com.linkedin.example.counter"); MockKafkaPusher pusher = new MockKafkaPusher(); KafkaReporter kafkaReporter = getBuilderFromContext(pusher).build("localhost:0000", "topic", new Properties()); counter.inc(); kafkaReporter.report(context); try { Thread.sleep(1000); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } MetricReport metricReport = nextReport(pusher.messageIterator()); Assert.assertEquals(3, metricReport.getTags().size()); Assert.assertTrue(metricReport.getTags().containsKey(tag1.getKey())); Assert.assertEquals(metricReport.getTags().get(tag1.getKey()), tag1.getValue().toString()); }
@Test public void testWithTags() throws IOException { try ( MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".testGraphiteReporter") .addTag(new Tag<String>("taskId", "task_testjob_123")) .addTag(new Tag<String>("forkBranchName", "fork_1")).build(); GraphiteReporter graphiteReporter = GraphiteReporter.Factory.newBuilder() .withGraphitePusher(graphitePusher) .withMetricContextName(CONTEXT_NAME) .build(new Properties());) { Counter counter = metricContext.counter(MetricRegistry.name(METRIC_PREFIX, COUNTER)); counter.inc(5l); graphiteReporter.report(new TreeMap<String, Gauge>(), metricContext.getCounters(), new TreeMap<String, Histogram>(), new TreeMap<String, Meter>(), new TreeMap<String, Timer>(), metricContext.getTagMap()); Assert.assertEquals(getMetricValue("task_testjob_123.fork_1." + METRIC_PREFIX, COUNTER, Measurements.COUNT), Long.toString(5l)); } }
@Test public void kafkaReporterTagsTest() throws IOException { MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName() + ".kafkaReporterTagsTest").build(); Counter counter = metricContext.counter("com.linkedin.example.counter"); Tag<?> tag1 = new Tag<>("tag1", "value1"); Tag<?> tag2 = new Tag<>("tag2", 2); MockKafkaPusher pusher = new MockKafkaPusher(); KafkaReporter kafkaReporter = getBuilder(pusher).withTags(Lists.newArrayList(tag1, tag2)).build("localhost:0000", "topic", new Properties()); counter.inc(); kafkaReporter.report(metricContext); try { Thread.sleep(1000); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } MetricReport metricReport = nextReport(pusher.messageIterator()); Assert.assertEquals(4, metricReport.getTags().size()); Assert.assertTrue(metricReport.getTags().containsKey(tag1.getKey())); Assert.assertEquals(metricReport.getTags().get(tag1.getKey()), tag1.getValue().toString()); Assert.assertTrue(metricReport.getTags().containsKey(tag2.getKey())); Assert.assertEquals(metricReport.getTags().get(tag2.getKey()), tag2.getValue().toString()); }
Counter counter = metricContext.counter(MetricRegistry.name(METRIC_PREFIX, COUNTER)); Meter meter = metricContext.meter(MetricRegistry.name(METRIC_PREFIX, METER)); Histogram histogram = metricContext.histogram(MetricRegistry.name(METRIC_PREFIX, HISTOGRAM));
Counter counter = metricContext.counter(MetricRegistry.name(METRIC_PREFIX, COUNTER)); Meter meter = metricContext.meter(MetricRegistry.name(METRIC_PREFIX, METER)); Histogram histogram = metricContext.histogram(MetricRegistry.name(METRIC_PREFIX, HISTOGRAM));
/** * Instantiates {@link com.codahale.metrics.Metric}s. Called once in {@link #startUp()}. Subclasses should override * this method to instantiate their own metrics. */ protected void createMetrics() { this.messagesRead = this.metricContext.counter(RuntimeMetrics.GOBBLIN_KAFKA_HIGH_LEVEL_CONSUMER_MESSAGES_READ); }
public ObjectStoreWriter(ObjectStoreClient client, State state) { super(state); this.objectStoreClient = client; this.operationsExecuted = this.getMetricContext().counter(OPERATIONS_EXECUTED_COUNTER); }
@Override protected void createMetrics() { super.createMetrics(); this.rejectedEvents = getMetricContext().counter(RuntimeMetrics.GOBBLIN_JOB_MONITOR_SLAEVENT_REJECTEDEVENTS); }