@Override public void incrementCustom(String metric, long delta) { counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).increment(delta); }
private void checkIfTooManySkippedMsg(Counters counters) { double maxPercentSkippedSchemaNotFound = Double.parseDouble(props.getProperty(ETL_MAX_PERCENT_SKIPPED_SCHEMANOTFOUND, ETL_MAX_PERCENT_SKIPPED_SCHEMANOTFOUND_DEFAULT)); double maxPercentSkippedOther = Double.parseDouble(props.getProperty(ETL_MAX_PERCENT_SKIPPED_OTHER, ETL_MAX_PERCENT_SKIPPED_OTHER_DEFAULT)); long actualSkippedSchemaNotFound = 0; long actualSkippedOther = 0; long actualDecodeSuccessful = 0; for (String groupName : counters.getGroupNames()) { if (groupName.equals(EtlRecordReader.KAFKA_MSG.class.getName())) { CounterGroup group = counters.getGroup(groupName); for (Counter counter : group) { if (counter.getDisplayName().equals(EtlRecordReader.KAFKA_MSG.DECODE_SUCCESSFUL.toString())) { actualDecodeSuccessful = counter.getValue(); } else if (counter.getDisplayName().equals(EtlRecordReader.KAFKA_MSG.SKIPPED_SCHEMA_NOT_FOUND.toString())) { actualSkippedSchemaNotFound = counter.getValue(); } else if (counter.getDisplayName().equals(EtlRecordReader.KAFKA_MSG.SKIPPED_OTHER.toString())) { actualSkippedOther = counter.getValue(); } } } } checkIfTooManySkippedMsg(maxPercentSkippedSchemaNotFound, actualSkippedSchemaNotFound, actualDecodeSuccessful, "schema not found"); checkIfTooManySkippedMsg(maxPercentSkippedOther, actualSkippedOther, actualDecodeSuccessful, "other"); }
@Override public long getCustom(String metric) { return counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).getValue(); }
CounterGroup group = counters.getGroup(groupName); log.info("Group: " + group.getDisplayName()); for (Counter counter : group) {
@Override public long getCustom(String metric) { return counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).getValue(); }
@Override public void incrementCustom(String metric, long delta) { counters.getGroup(HadoopContextScanMetrics.CUSTOM_COUNTER_GROUP).findCounter(metric).increment(delta); }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }
@Override public long get(Metric metric) { return counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).getValue(); }
@Override public long get(Metric metric) { return counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).getValue(); }
/** * Create a {@link org.apache.gobblin.metrics.GobblinMetrics} instance for this job run from the Hadoop counters. */ @VisibleForTesting void countersToMetrics(GobblinMetrics metrics) throws IOException { Optional<Counters> counters = Optional.fromNullable(this.job.getCounters()); if (counters.isPresent()) { // Write job-level counters CounterGroup jobCounterGroup = counters.get().getGroup(MetricGroup.JOB.name()); for (Counter jobCounter : jobCounterGroup) { metrics.getCounter(jobCounter.getName()).inc(jobCounter.getValue()); } // Write task-level counters CounterGroup taskCounterGroup = counters.get().getGroup(MetricGroup.TASK.name()); for (Counter taskCounter : taskCounterGroup) { metrics.getCounter(taskCounter.getName()).inc(taskCounter.getValue()); } } }
@Override protected void handleFailure(Counters counters) throws IOException { try (Connection conn = ConnectionFactory.createConnection(job.getConfiguration())) { TableName tableName = TableName.valueOf(COMMON_TABLE_NAME); CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.getRegionLocator(tableName).getRegionLocation(key, true); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = conn.getRegionLocator(tableName).getRegionLocation(key, true); LOG.error("unreferred row " + keyString + ", " + loc); } } } }
protected void handleFailure(Counters counters) throws IOException { Configuration conf = job.getConfiguration(); TableName tableName = getTableName(conf); try (Connection conn = ConnectionFactory.createConnection(conf)) { try (RegionLocator rl = conn.getRegionLocator(tableName)) { CounterGroup g = counters.getGroup("undef"); Iterator<Counter> it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("undefined row " + keyString + ", " + loc); } g = counters.getGroup("unref"); it = g.iterator(); while (it.hasNext()) { String keyString = it.next().getName(); byte[] key = Bytes.toBytes(keyString); HRegionLocation loc = rl.getRegionLocation(key, true); LOG.error("unreferred row " + keyString + ", " + loc); } } } } }
assertTrue(job.getCounters().getGroup("FileSystemCounters") .findCounter("FILE_BYTES_READ").getValue() > 0);
@Override protected Set<String> getCountersFor( Counters counters, String group ) { Set<String> results = new HashSet<>(); for( Counter counter : counters.getGroup( group ) ) results.add( counter.getName() ); return results; }
protected void setupEntriesPerLevel(final Job job1, final Configuration conf) throws IOException { for (int l = kdeCommandLineOptions.getMinLevel(); l <= kdeCommandLineOptions.getMaxLevel(); l++) { conf.setLong( "Entries per level.level" + l, job1.getCounters().getGroup("Entries per level").findCounter( "level " + Long.valueOf(l)).getValue()); } }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }
@Override public void increment(Metric metric) { counters.getGroup(HadoopContextScanMetrics.STANDARD_COUNTER_GROUP).findCounter(metric.name()).increment(1L); } }