@Override public Counter mkInstance() { return new Counter(); }
@Override public Counter newMetric() { return new Counter(); }
@Override public Counter newMetric() { return new Counter(); }
public AsmCounter() { super(); for (int win : windowSeconds) { counterMap.put(win, new Counter()); } }
@Override public Counter load(String key) { Counter counter = new Counter(); metricRegistry.register(key, counter); return counter; } }
@Override public Counter load(String key) { Counter counter = new Counter(); metricRegistry.register(key, counter); return counter; } });
Counter c = new Counter() { public int count() { super.count(); return super.count(); } } c.count(); // now count 2
Counter c = new Counter() { public int count() { int lastCount = 0; for (int i = super.count(); --i >= 0; ) { lastCount = super.count(); } return lastCount; } } c.count(); // Now double count
@Test public void testCounterReporting() { final Counter counter = new Counter(); TreeMap<String, Counter> counters = new TreeMap<>(); counters.put("my_counter", counter); // Add the metrics objects to the internal "queues" by hand metrics2Reporter.setDropwizardCounters(counters); // Set some values counter.inc(5L); MetricsCollector collector = mock(MetricsCollector.class); MetricsRecordBuilder recordBuilder = mock(MetricsRecordBuilder.class); Mockito.when(collector.addRecord(recordName)).thenReturn(recordBuilder); metrics2Reporter.getMetrics(collector, true); verify(recordBuilder).addCounter(Interns.info("my_counter", ""), 5L); verifyRecordBuilderUnits(recordBuilder); // Should not be the same instance we gave before. Our map should have gotten swapped out. assertTrue("Should not be the same map instance after collection", counters != metrics2Reporter.getDropwizardCounters()); }
Counter recordsProcessedCounter = new Counter(); recordsProcessedCounter.inc(10l);
com.codahale.metrics.Counter recordsProcessedCounter = new com.codahale.metrics.Counter(); recordsProcessedCounter.inc(10l);
private synchronized void updatePerWorkerISMetrics( Map<String, Integer> topicPartitionMapForIdealState) { for (String worker : topicPartitionMapForIdealState.keySet()) { if (!_idealStatePerWorkerTopicPartitionCounter.containsKey(worker)) { Counter workCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getIdealStatePerWorkMetricName(worker), workCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _idealStatePerWorkerTopicPartitionCounter.put(worker, workCounter); } Counter counter = _idealStatePerWorkerTopicPartitionCounter.get(worker); counter.inc(topicPartitionMapForIdealState.get(worker) - counter.getCount()); } for (String worker : _idealStatePerWorkerTopicPartitionCounter.keySet()) { if (!topicPartitionMapForIdealState.containsKey(worker)) { Counter counter = _idealStatePerWorkerTopicPartitionCounter.get(worker); counter.dec(counter.getCount()); } } }
private synchronized void updatePerWorkerEVMetrics( Map<String, Integer> topicPartitionMapForExternalView) { for (String worker : topicPartitionMapForExternalView.keySet()) { if (!_externalViewPerWorkerTopicPartitionCounter.containsKey(worker)) { Counter workCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getExternalViewPerWorkMetricName(worker), workCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _externalViewPerWorkerTopicPartitionCounter.put(worker, workCounter); } Counter counter = _externalViewPerWorkerTopicPartitionCounter.get(worker); counter.inc(topicPartitionMapForExternalView.get(worker) - counter.getCount()); } for (String worker : _externalViewPerWorkerTopicPartitionCounter.keySet()) { if (!topicPartitionMapForExternalView.containsKey(worker)) { Counter counter = _externalViewPerWorkerTopicPartitionCounter.get(worker); counter.dec(counter.getCount()); } } }
private synchronized void updateMetrics(int numMissingTopics, int numMismatchedTopics, int numMismatchedTopicPartitions, Map<String, Integer> misMatchedPartitionNumberTopics) { _numMissingTopics.inc(numMissingTopics - _numMissingTopics.getCount()); _numMismatchedTopics.inc(numMismatchedTopics - _numMismatchedTopics.getCount()); _numMismatchedTopicPartitions .inc(numMismatchedTopicPartitions - _numMismatchedTopicPartitions.getCount()); for (String topic : misMatchedPartitionNumberTopics.keySet()) { if (!_mismatchedTopicPartitionsCounter.containsKey(topic)) { Counter topicPartitionCounter = new Counter(); try { HelixKafkaMirrorMakerMetricsReporter.get().getRegistry().register( getMismatchedTopicMetricName(topic), topicPartitionCounter); } catch (Exception e) { LOGGER.error("Error registering metrics!", e); } _mismatchedTopicPartitionsCounter.put(topic, topicPartitionCounter); } } for (String topic : _mismatchedTopicPartitionsCounter.keySet()) { Counter counter = _mismatchedTopicPartitionsCounter.get(topic); if (!misMatchedPartitionNumberTopics.containsKey(topic)) { counter.dec(counter.getCount()); } else { counter.inc(misMatchedPartitionNumberTopics.get(topic) - counter.getCount()); } } }
Object secondInitInstance = HelixKafkaMirrorMakerMetricsReporter.get(); Assert.assertTrue(firstInitInstance == secondInitInstance); Counter testCounter0 = new Counter(); Meter testMeter0 = new Meter(); Timer testTimer0 = new Timer();
@Test public void generateFullMetricDescription() { assertThat(DropWizardUtils.generateFullMetricDescription("Counter", new Counter())) .isEqualTo("Collected from codahale (metric=Counter, type=com.codahale.metrics.Counter)"); } }
@Test public void testCompletelyEmpty() throws Exception { String str = "{}"; Config config = ConfigFactory.parseString(str); MetricFilter filter = PatternMetricFilter.parse(new Configs(), config); assertSame(filter, MetricFilter.ALL); assertTrue(filter.matches("foo", new Counter())); }