@Override public void update(List<Metric> metrics) { List<Metric> transformed = metrics.stream() .map(transformer::apply).collect(Collectors.toList()); observer.update(transformed); }
public void updateImpl(List<Metric> metrics) { try { Thread.sleep(delay); } catch (InterruptedException e) { LOGGER.warn("sleep interrupted", e); } wrappedObserver.update(metrics); } }
/** * {@inheritDoc} */ @Override public void run() { try { List<Metric> metrics = poller.poll(filter, reset); for (MetricObserver o : observers) { try { o.update(metrics); } catch (Throwable t) { LOGGER.warn("failed to send metrics to " + o.getName(), t); } } } catch (Throwable t) { LOGGER.warn("failed to poll metrics", t); } } }
private void processUpdate() { TimestampedUpdate update; try { update = updateQueue.take(); long cutoff = System.currentTimeMillis() - expireTime; if (update.getTimestamp() < cutoff) { expiredUpdateCount.increment(); return; } wrappedObserver.update(update.getMetrics()); } catch (InterruptedException ie) { LOGGER.warn("interrupted while adding to queue, update dropped"); incrementFailedCount(); } catch (Throwable t) { LOGGER.warn("update failed for downstream queue", t); incrementFailedCount(); } }
/** * {@inheritDoc} */ public void update(List<Metric> metrics) { Preconditions.checkNotNull(metrics, "metrics"); LOGGER.debug("received {} metrics", metrics.size()); final List<Metric> newMetrics = new ArrayList<>(metrics.size()); for (Metric m : metrics) { if (isCounter(m)) { final MonitorConfig rateConfig = toRateConfig(m.getConfig()); final CounterValue prev = cache.get(rateConfig); if (prev != null) { final double rate = prev.computeRate(m); newMetrics.add(new Metric(rateConfig, m.getTimestamp(), rate)); } else { CounterValue current = new CounterValue(m); cache.put(rateConfig, current); if (intervalMillis > 0L) { final double delta = m.getNumberValue().doubleValue(); final double rate = current.computeRate(intervalMillis, delta); newMetrics.add(new Metric(rateConfig, m.getTimestamp(), rate)); } } } else { newMetrics.add(m); } } LOGGER.debug("writing {} metrics to downstream observer", newMetrics.size()); observer.update(newMetrics); }
@Test public void testResultingType() throws Exception { MemoryMetricObserver mmo = new MemoryMetricObserver("m", 1); MetricObserver transform = new CounterToRateMetricTransform(mmo, 120, TimeUnit.SECONDS); Map<String, String> metrics; // Make time look like the future to avoid expirations long baseTime = System.currentTimeMillis() + 100000L; // First sample transform.update(mkList(baseTime + 0, 0)); metrics = mkTypeMap(mmo.getObservations()); assertEquals(metrics.size(), 2); assertEquals(metrics.get("m3"), null); assertEquals(metrics.get("m2"), "GAUGE"); assertEquals(metrics.get("m1"), null); transform.update(mkList(baseTime + 5000, 5)); metrics = mkTypeMap(mmo.getObservations()); assertEquals(metrics.size(), 3); assertEquals(metrics.get("m3"), "RATE"); assertEquals(metrics.get("m2"), "GAUGE"); assertEquals(metrics.get("m1"), null); }
@Test public void testUpdate() throws Exception { File dir = createTempDir(); try { MetricObserver fmo = new FileMetricObserver("test", dir); fmo.update(mkList(1)); Thread.sleep(250); fmo.update(mkList(2)); Thread.sleep(250); fmo.update(mkList(3)); File[] files = dir.listFiles(); assert files != null; assertEquals(files.length, 3); for (File f : files) { checkFile(f, false); } } finally { deleteRecursively(dir); } }
@Test public void testFirstSample() throws Exception { MemoryMetricObserver mmo = new MemoryMetricObserver("m", 1); MetricObserver transform = new CounterToRateMetricTransform(mmo, 120, 5, TimeUnit.SECONDS); Map<String, Double> metrics; // Make time look like the future to avoid expirations long baseTime = System.currentTimeMillis() + 100000L; // First sample transform.update(mkList(baseTime + 0, 10)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3); assertEquals(metrics.get("m3"), 2.0, 0.00001); // Delta of 5 in 5 seconds transform.update(mkList(baseTime + 5000, 15)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3); assertEquals(metrics.get("m3"), 1.0, 0.00001); } }
/** * {@inheritDoc} */ @Override public void update(List<Metric> metrics) { Preconditions.checkNotNull(metrics, "metrics"); final List<Metric> newMetrics = new ArrayList<>(metrics.size()); for (Metric m : metrics) { long offset = m.getTimestamp() % stepMillis; long stepBoundary = m.getTimestamp() - offset; String dsType = getDataSourceType(m); if (isGauge(dsType) || isNormalized(dsType)) { Metric atStepBoundary = new Metric(m.getConfig(), stepBoundary, m.getValue()); newMetrics.add(atStepBoundary); // gauges are not normalized } else if (isRate(dsType)) { Metric normalized = normalize(m, stepBoundary); if (normalized != null) { newMetrics.add(normalized); } } else if (!isInformational(dsType)) { // unknown type - use a safe fallback newMetrics.add(m); // we cannot normalize this } } observer.update(newMetrics); }
@Test public void testUpdateCompressed() throws Exception { File dir = createTempDir(); try { MetricObserver fmo = new FileMetricObserver("test", dir, true); fmo.update(mkList(1)); Thread.sleep(250); fmo.update(mkList(2)); Thread.sleep(250); fmo.update(mkList(3)); File[] files = dir.listFiles(); assert files != null; assertEquals(files.length, 3); for (File f : files) { checkFile(f, true); } } finally { deleteRecursively(dir); } } }
void assertMetrics(long step, long heartbeat, List<Metric> input, List<TimeVal> expected) { ManualClock clock = new ManualClock(0); MemoryMetricObserver mmo = new MemoryMetricObserver("m", 1); MetricObserver transform = new NormalizationTransform(mmo, step, heartbeat, TimeUnit.MILLISECONDS, clock); int i = 0; for (Metric m : input) { transform.update(UnmodifiableList.of(m)); Metric result = mmo.getObservations().get(0).get(0); assertEquals(TimeVal.from(result), expected.get(i)); i++; } }
transform.update(mkList(baseTime + 0, 0)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 2); transform.update(mkList(baseTime + 5000, 5)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3); transform.update(mkList(baseTime + 10000, 20)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3); transform.update(mkList(baseTime + 15000, 20)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3); transform.update(mkList(baseTime + 20000, 19)); metrics = mkMap(mmo.getObservations()); assertEquals(metrics.size(), 3);
@Override public void update(List<Metric> metrics) { List<Metric> transformed = metrics.stream() .map(transformer::apply).collect(Collectors.toList()); observer.update(transformed); }
/** * {@inheritDoc} */ @Override public void run() { try { List<Metric> metrics = poller.poll(filter, reset); for (MetricObserver o : observers) { try { o.update(metrics); } catch (Throwable t) { LOGGER.warn("failed to send metrics to " + o.getName(), t); } } } catch (Throwable t) { LOGGER.warn("failed to poll metrics", t); } } }
private void processUpdate() { TimestampedUpdate update; try { update = updateQueue.take(); long cutoff = System.currentTimeMillis() - expireTime; if (update.getTimestamp() < cutoff) { expiredUpdateCount.increment(); return; } wrappedObserver.update(update.getMetrics()); } catch (InterruptedException ie) { LOGGER.warn("interrupted while adding to queue, update dropped"); incrementFailedCount(); } catch (Throwable t) { LOGGER.warn("update failed for downstream queue", t); incrementFailedCount(); } }
/** * {@inheritDoc} */ public void update(List<Metric> metrics) { Preconditions.checkNotNull(metrics, "metrics"); LOGGER.debug("received {} metrics", metrics.size()); final List<Metric> newMetrics = new ArrayList<>(metrics.size()); for (Metric m : metrics) { if (isCounter(m)) { final MonitorConfig rateConfig = toRateConfig(m.getConfig()); final CounterValue prev = cache.get(rateConfig); if (prev != null) { final double rate = prev.computeRate(m); newMetrics.add(new Metric(rateConfig, m.getTimestamp(), rate)); } else { CounterValue current = new CounterValue(m); cache.put(rateConfig, current); if (intervalMillis > 0L) { final double delta = m.getNumberValue().doubleValue(); final double rate = current.computeRate(intervalMillis, delta); newMetrics.add(new Metric(rateConfig, m.getTimestamp(), rate)); } } } else { newMetrics.add(m); } } LOGGER.debug("writing {} metrics to downstream observer", newMetrics.size()); observer.update(newMetrics); }
/** * {@inheritDoc} */ @Override public void update(List<Metric> metrics) { Preconditions.checkNotNull(metrics, "metrics"); final List<Metric> newMetrics = new ArrayList<>(metrics.size()); for (Metric m : metrics) { long offset = m.getTimestamp() % stepMillis; long stepBoundary = m.getTimestamp() - offset; String dsType = getDataSourceType(m); if (isGauge(dsType) || isNormalized(dsType)) { Metric atStepBoundary = new Metric(m.getConfig(), stepBoundary, m.getValue()); newMetrics.add(atStepBoundary); // gauges are not normalized } else if (isRate(dsType)) { Metric normalized = normalize(m, stepBoundary); if (normalized != null) { newMetrics.add(normalized); } } else if (!isInformational(dsType)) { // unknown type - use a safe fallback newMetrics.add(m); // we cannot normalize this } } observer.update(newMetrics); }