/** * Get a {@link Timer} with the given name prefix and suffixes. * * @param prefix the given name prefix * @param suffixes the given name suffixes * @return a {@link Timer} with the given name prefix and suffixes */ public Timer getTimer(String prefix, String... suffixes) { return this.metricContext.timer(MetricRegistry.name(prefix, suffixes)); }
@Override public Timer.Context apply(@Nonnull MetricContext input) { return input.timer(name).time(); } });
/** * Generates metrics for the instrumentation of this class. */ protected void regenerateMetrics() { if (isInstrumentationEnabled()) { this.inputMeter = Optional.of(this.metricContext.meter(MetricNames.ForkOperatorMetrics.RECORDS_IN_METER)); this.outputForks = Optional.of(this.metricContext.meter(MetricNames.ForkOperatorMetrics.FORKS_OUT_METER)); this.forkOperatorTimer = Optional.<Timer>of(this.metricContext.timer(MetricNames.ForkOperatorMetrics.FORK_TIMER)); } else { this.inputMeter = Optional.absent(); this.outputForks = Optional.absent(); this.forkOperatorTimer = Optional.absent(); } }
@Override public Collection<HiveSpec> call() throws Exception { try (Timer.Context context = metricContext.timer(HIVE_SPEC_COMPUTATION_TIMER).time()) { return policy.getHiveSpecs(new Path(path)); } } });
/** * Generates metrics for the instrumentation of this class. */ protected void regenerateMetrics() { if (isInstrumentationEnabled()) { this.readRecordsMeter = Optional.of(this.metricContext.meter(MetricNames.ExtractorMetrics.RECORDS_READ_METER)); this.dataRecordExceptionsMeter = Optional.of(this.metricContext.meter(MetricNames.ExtractorMetrics.RECORDS_FAILED_METER)); this.extractorTimer = Optional.<Timer>of(this.metricContext.timer(MetricNames.ExtractorMetrics.EXTRACT_TIMER)); } else { this.readRecordsMeter = Optional.absent(); this.dataRecordExceptionsMeter = Optional.absent(); this.extractorTimer = Optional.absent(); } }
/** * Generates metrics for the instrumentation of this class. */ protected void regenerateMetrics() { if (isInstrumentationEnabled()) { this.recordsInMeter = Optional.of(this.metricContext.meter(MetricNames.ConverterMetrics.RECORDS_IN_METER)); this.recordsOutMeter = Optional.of(this.metricContext.meter(MetricNames.ConverterMetrics.RECORDS_OUT_METER)); this.recordsExceptionMeter = Optional.of( this.metricContext.meter(MetricNames.ConverterMetrics.RECORDS_FAILED_METER)); this.converterTimer = Optional.<Timer>of(this.metricContext.timer(MetricNames.ConverterMetrics.CONVERT_TIMER)); } }
private void addSchemaProperties(Path path, HiveRegistrationUnit hiveUnit, Schema schema) throws IOException { Path schemaFile = new Path(path, this.schemaFileName); if (this.useSchemaFile) { hiveUnit.setSerDeProp(SCHEMA_URL, schemaFile.toString()); } else { try (Timer.Context context = metricContext.timer(HIVE_SPEC_SCHEMA_WRITING_TIMER).time()) { addSchemaFromAvroFile(schema, schemaFile, hiveUnit); } } }
@Override public boolean existsTable(String dbName, String tableName) throws IOException { if (this.optimizedChecks && this.tableAndDbExistenceCache.getIfPresent(dbName + ":" + tableName ) != null ) { return true; } try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { try (Timer.Context context = this.metricContext.timer(TABLE_EXISTS).time()) { return client.get().tableExists(dbName, tableName); } } catch (TException e) { throw new IOException(String.format("Unable to check existence of table %s in db %s", tableName, dbName), e); } }
/** * End the previous stage and record the time spent in that stage. */ public void endStage() { if (this.currentStage != null) { long time = System.currentTimeMillis() - this.currentStageStart; this.timings.add(new Stage(this.currentStage, time)); if (reportAsMetrics && submitter.getMetricContext().isPresent()) { String timerName = submitter.getNamespace() + "." + name + "." + this.currentStage; submitter.getMetricContext().get().timer(timerName).update(time, TimeUnit.MILLISECONDS); } } this.currentStage = null; }
@Override public boolean existsPartition(String dbName, String tableName, List<Column> partitionKeys, List<String> partitionValues) throws IOException { try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { try (Timer.Context context = this.metricContext.timer(GET_HIVE_PARTITION).time()) { client.get().getPartition(dbName, tableName, partitionValues); } return true; } catch (NoSuchObjectException e) { return false; } catch (TException e) { throw new IOException(String.format("Unable to check existence of partition %s in table %s in db %s", partitionValues, tableName, dbName), e); } }
@Override public Optional<HiveTable> getTable(String dbName, String tableName) throws IOException { try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { Table hiveTable; try (Timer.Context context = this.metricContext.timer(GET_HIVE_TABLE).time()) { hiveTable = client.get().getTable(dbName, tableName); } return Optional.of(HiveMetaStoreUtils.getHiveTable(hiveTable)); } catch (NoSuchObjectException e) { return Optional.<HiveTable> absent(); } catch (TException e) { throw new IOException("Unable to get table " + tableName + " in db " + dbName, e); } }
public ThrottleWriter(DataWriter<D> writer, State state) { Preconditions.checkNotNull(writer, "DataWriter is required."); Preconditions.checkNotNull(state, "State is required."); this.state = state; this.writer = writer; this.type = ThrottleType.valueOf(state.getProp(WRITER_THROTTLE_TYPE_KEY)); int rateLimit = computeRateLimit(state); LOG.info("Rate limit for each writer: " + rateLimit + " " + type); this.limiter = new RateBasedLimiter(computeRateLimit(state)); if (GobblinMetrics.isEnabled(state)) { throttledTimer = Optional.<Timer>of(Instrumented.getMetricContext(state, getClass()).timer(WRITES_THROTTLED_TIMER)); } else { throttledTimer = Optional.absent(); } }
@Override public Optional<HivePartition> getPartition(String dbName, String tableName, List<Column> partitionKeys, List<String> partitionValues) throws IOException { try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { Partition hivePartition; try (Timer.Context context = this.metricContext.timer(GET_HIVE_PARTITION).time()) { hivePartition = client.get().getPartition(dbName, tableName, partitionValues); } return Optional.of(HiveMetaStoreUtils.getHivePartition(hivePartition)); } catch (NoSuchObjectException e) { return Optional.<HivePartition> absent(); } catch (TException e) { throw new IOException( "Unable to get partition " + partitionValues + " from table " + tableName + " in db " + dbName, e); } }
@VisibleForTesting UnsampledReport createUnsampledReports(UnsampledReport request) throws IOException { long startTimeInMillis = System.currentTimeMillis(); try { UnsampledReport requestedReport = requestUnsampledReport(request); UnsampledReport createdReport = pollForCompletion(wuState, gaService, requestedReport); createdReport.setEndDate(requestedReport.getEndDate()); return createdReport; } finally { long delta = System.currentTimeMillis() - startTimeInMillis; if (GobblinMetrics.isEnabled(wuState)) { Timer timer = Instrumented.getMetricContext(wuState, getClass()).timer(GA_UNSAMPLED_REPORT_CREATION_TIMER); Instrumented.updateTimer(Optional.of(timer), delta, TimeUnit.MILLISECONDS); } } }
/** User needs to provide eventhub properties */ public EventhubDataWriter(Properties properties) { PasswordManager manager = PasswordManager.getInstance(properties); namespaceName = properties.getProperty(BatchedEventhubDataWriter.EVH_NAMESPACE); eventHubName = properties.getProperty(BatchedEventhubDataWriter.EVH_HUBNAME); sasKeyName = properties.getProperty(BatchedEventhubDataWriter.EVH_SAS_KEYNAME); String encodedSasKey = properties.getProperty(BatchedEventhubDataWriter.EVH_SAS_KEYVALUE); sasKey = manager.readPassword(encodedSasKey); targetURI = "https://" + namespaceName + ".servicebus.windows.net/" + eventHubName + "/messages"; httpclient = HttpClients.createDefault(); metricContext = Instrumented.getMetricContext(new State(properties),EventhubDataWriter.class); recordsAttempted = this.metricContext.meter(EventhubMetricNames.EventhubDataWriterMetrics.RECORDS_ATTEMPTED_METER); recordsSuccess = this.metricContext.meter(EventhubMetricNames.EventhubDataWriterMetrics.RECORDS_SUCCESS_METER); recordsFailed = this.metricContext.meter(EventhubMetricNames.EventhubDataWriterMetrics.RECORDS_FAILED_METER); bytesWritten = this.metricContext.meter(EventhubMetricNames.EventhubDataWriterMetrics.BYTES_WRITTEN_METER); writeTimer = this.metricContext.timer(EventhubMetricNames.EventhubDataWriterMetrics.WRITE_TIMER); }
public ThrottledHttpClient (SharedResourcesBroker<GobblinScopeTypes> broker, String limiterKey) { this.broker = broker; try { this.limiter = broker.getSharedResource(new SharedLimiterFactory<>(), new HttpLimiterKey(limiterKey)); this.metricContext = broker.getSharedResource(new MetricContextFactory<>(), new MetricContextKey()); this.sendTimer = this.metricContext.timer(limiterKey); } catch (NotConfiguredException e) { log.error ("Limiter cannot be initialized due to exception " + ExceptionUtils.getFullStackTrace(e)); throw new RuntimeException(e); } }
/** * Generates metrics for the instrumentation of this class. */ protected void regenerateMetrics() { // Set up the metrics that are enabled regardless of instrumentation this.recordsIn = this.metricContext.meter(MetricNames.DataWriterMetrics.RECORDS_IN_METER); this.recordsAttempted = this.metricContext.meter(MetricNames.DataWriterMetrics.RECORDS_ATTEMPTED_METER); this.recordsSuccess = this.metricContext.meter(MetricNames.DataWriterMetrics.SUCCESSFUL_WRITES_METER); this.recordsFailed = this.metricContext.meter(MetricNames.DataWriterMetrics.FAILED_WRITES_METER); this.bytesWritten = this.metricContext.meter(MetricNames.DataWriterMetrics.BYTES_WRITTEN_METER); if (isInstrumentationEnabled()) { this.dataWriterTimer = Optional.<Timer>of(this.metricContext.timer(MetricNames.DataWriterMetrics.WRITE_TIMER)); } else { this.dataWriterTimer = Optional.absent(); } }
@Override protected void registerPath(HiveSpec spec) throws IOException { try (Timer.Context context = this.metricContext.timer(PATH_REGISTER_TIMER).time(); AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { Table table = HiveMetaStoreUtils.getTable(spec.getTable()); createDbIfNotExists(client.get(), table.getDbName()); createOrAlterTable(client.get(), table, spec); Optional<HivePartition> partition = spec.getPartition(); if (partition.isPresent()) { addOrAlterPartition(client.get(), table, partition.get()); } HiveMetaStoreEventHelper.submitSuccessfulPathRegistration(eventSubmitter, spec); } catch (TException e) { HiveMetaStoreEventHelper.submitFailedPathRegistration(eventSubmitter, spec, e); throw new IOException(e); } }
@Override public void alterPartition(HiveTable table, HivePartition partition) throws IOException { try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { try (Timer.Context context = this.metricContext.timer(ALTER_PARTITION).time()) { client.get().alter_partition(table.getDbName(), table.getTableName(), getPartitionWithCreateTimeNow(HiveMetaStoreUtils.getPartition(partition))); } HiveMetaStoreEventHelper.submitSuccessfulPartitionAlter(eventSubmitter, table, partition); } catch (TException e) { HiveMetaStoreEventHelper.submitFailedPartitionAlter(eventSubmitter, table, partition, e); throw new IOException(String.format("Unable to alter partition %s in table %s in db %s", partition.getValues(), table.getTableName(), table.getDbName()), e); } }
@Test public void testMetrics() throws Exception { ThrottlingGuiceServletConfig guiceServletConfig = new ThrottlingGuiceServletConfig(); guiceServletConfig.initialize(ConfigFactory.empty()); Injector injector = guiceServletConfig.getInjector(); LimiterServerResource limiterServer = injector.getInstance(LimiterServerResource.class); PermitRequest request = new PermitRequest(); request.setPermits(10); request.setResource("myResource"); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); limiterServer.getSync(new ComplexResourceKey<>(request, new EmptyRecord())); MetricContext metricContext = limiterServer.metricContext; Timer timer = metricContext.timer(LimiterServerResource.REQUEST_TIMER_NAME); Assert.assertEquals(timer.getCount(), 3); }