private void reportBacklog() { long splitBacklogBytes = getSplitBacklogBytes(); if (splitBacklogBytes < 0) { splitBacklogBytes = UnboundedReader.BACKLOG_UNKNOWN; } backlogBytesOfSplit.set(splitBacklogBytes); long splitBacklogMessages = getSplitBacklogMessageCount(); if (splitBacklogMessages < 0) { splitBacklogMessages = UnboundedReader.BACKLOG_UNKNOWN; } backlogElementsOfSplit.set(splitBacklogMessages); }
private void reportBacklog() { long splitBacklogBytes = getSplitBacklogBytes(); if (splitBacklogBytes < 0) { splitBacklogBytes = UnboundedReader.BACKLOG_UNKNOWN; } backlogBytesOfSplit.set(splitBacklogBytes); long splitBacklogMessages = getSplitBacklogMessageCount(); if (splitBacklogMessages < 0) { splitBacklogMessages = UnboundedReader.BACKLOG_UNKNOWN; } backlogElementsOfSplit.set(splitBacklogMessages); }
public void exposeMetrics(long elapsedMs) { logger.info(String.format("jdbcavroio : Read %d rows, took %5.2f seconds", rowCount, elapsedMs / 1000.0)); this.writeElapsedMs.inc(elapsedMs); if (rowCount > 0) { this.recordCount.inc((this.rowCount % countReportEvery)); this.msPerMillionRows.set(1000000L * elapsedMs / rowCount); if (elapsedMs != 0) { this.rowsPerMinute.set((60 * 1000L) * rowCount / elapsedMs); } } }
/** * Increment and report counters to Beam SDK and logs. * To avoid slowing down the writes, counts are reported every x 1000s of rows. * This exposes the job progress. */ public void incrementRecordCount() { this.rowCount++; if ((this.rowCount % countReportEvery) == 0) { this.recordCount.inc(countReportEvery); long elapsedMs = System.currentTimeMillis() - this.writeIterateStartTime; long msPerMillionRows = 1000000L * elapsedMs / rowCount; long rowsPerMinute = (60 * 1000L) * rowCount / elapsedMs; this.msPerMillionRows.set(msPerMillionRows); this.rowsPerMinute.set(rowsPerMinute); if ((this.rowCount % logEvery) == 0) { logger.info(String.format( "jdbcavroio : Fetched # %08d rows at %08d rows per minute and %08d ms per M rows", rowCount, rowsPerMinute, msPerMillionRows)); } } }
@Override public void run() { try { BufferedMutatorDoFn.cumulativeThrottlingSeconds.set(TimeUnit.NANOSECONDS.toSeconds( ResourceLimiterStats.getInstance(instanceName).getCumulativeThrottlingTimeNanos())); } catch (Exception e) { STATS_LOG.warn("Something bad happened in export stats", e); } } };
@Override public void run() { try { BufferedMutatorDoFn.cumulativeThrottlingSeconds.set(TimeUnit.NANOSECONDS.toSeconds( ResourceLimiterStats.getInstance(instanceName).getCumulativeThrottlingTimeNanos())); } catch (Exception e) { STATS_LOG.warn("Something bad happened in export stats", e); } } };
@Override public void set(long value) { MetricsContainer container = MetricsEnvironment.getCurrentContainer(); if (container != null) { container.getGauge(name).set(value); } }
@Test public void testGauge() { FlinkMetricContainer.FlinkGauge flinkGauge = new FlinkMetricContainer.FlinkGauge(GaugeResult.empty()); when(metricGroup.gauge(eq("namespace.name"), anyObject())).thenReturn(flinkGauge); FlinkMetricContainer container = new FlinkMetricContainer(runtimeContext); MetricsContainer step = container.getMetricsContainer("step"); MetricName metricName = MetricName.named("namespace", "name"); Gauge gauge = step.getGauge(metricName); assertThat(flinkGauge.getValue(), is(GaugeResult.empty())); // first set will install the mocked gauge container.updateMetrics("step"); gauge.set(1); gauge.set(42); container.updateMetrics("step"); assertThat(flinkGauge.getValue().getValue(), is(42L)); }
@SuppressWarnings("unused") @ProcessElement public void processElement(ProcessContext c) { Distribution values = Metrics.distribution(MetricsTest.class, "input"); Gauge gauge = Metrics.gauge(MetricsTest.class, "my-gauge"); Integer element = c.element(); count.inc(); values.update(element); gauge.set(12L); c.output(element); c.output(output2, element); } })
final long readDurationMillis = metadata.getReadDurationMillis(); if (readDurationMillis > maxReadDuration) { gauge.set(readDurationMillis);