private void sendEmailOnShutdown(Optional<String> report) { final String subject = String.format("Gobblin AWS cluster %s completed", this.clusterName); final StringBuilder messageBuilder = new StringBuilder("Gobblin AWS cluster was shutdown at: " + new Date()); if (report.isPresent()) { messageBuilder.append(' ').append(report.get()); } try { EmailUtils.sendEmail(ConfigUtils.configToState(this.config), subject, messageBuilder.toString()); } catch (EmailException ee) { LOGGER.error("Failed to send email notification on shutdown", ee); } }
private Optional<ContainerMetrics> buildContainerMetrics(Config config, Properties properties, String applicationName, String workerId) { if (GobblinMetrics.isEnabled(properties)) { return Optional.of(ContainerMetrics.get(ConfigUtils.configToState(config), applicationName, workerId)); } else { return Optional.absent(); } }
public Avro2OrcStaleDatasetCleaner(String jobId, Properties props) throws IOException { super(jobId, log); props.setProperty(HiveDatasetFinder.HIVE_DATASET_CONFIG_PREFIX_KEY, HIVE_DATASET_CONFIG_AVRO_PREFIX); this.graceTimeInMillis = TimeUnit.DAYS.toMillis(Long.parseLong(props .getProperty(HIVE_PARTITION_DELETION_GRACE_TIME_IN_DAYS, DEFAULT_HIVE_PARTITION_DELETION_GRACE_TIME_IN_DAYS))); Config config = ConfigFactory.parseProperties(props); this.fs = FileSystem.newInstance(new Configuration()); this.metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), ValidationJob.class); this.eventSubmitter = new EventSubmitter.Builder(this.metricContext, EventConstants.CONVERSION_NAMESPACE).build(); this.datasetFinder = new ConvertibleHiveDatasetFinder(this.fs, props, this.eventSubmitter); }
this.metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), ValidationJob.class); this.eventSubmitter = new EventSubmitter.Builder(this.metricContext, EventConstants.CONVERSION_NAMESPACE).build(); this.updateProvider = UpdateProviderFactory.create(props);
public Orchestrator(Config config, Optional<TopologyCatalog> topologyCatalog, Optional<Logger> log, boolean instrumentationEnabled) { _log = log.isPresent() ? log.get() : LoggerFactory.getLogger(getClass()); if (instrumentationEnabled) { this.metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), IdentityFlowToJobSpecCompiler.class); this.flowOrchestrationSuccessFulMeter = Optional.of(this.metricContext.meter(ServiceMetricNames.FLOW_ORCHESTRATION_SUCCESSFUL_METER)); this.flowOrchestrationFailedMeter = Optional.of(this.metricContext.meter(ServiceMetricNames.FLOW_ORCHESTRATION_FAILED_METER)); this.flowOrchestrationTimer = Optional.<Timer>of(this.metricContext.timer(ServiceMetricNames.FLOW_ORCHESTRATION_TIMER)); } else { this.metricContext = null; this.flowOrchestrationSuccessFulMeter = Optional.absent(); this.flowOrchestrationFailedMeter = Optional.absent(); this.flowOrchestrationTimer = Optional.absent(); } this.aliasResolver = new ClassAliasResolver<>(SpecCompiler.class); this.topologyCatalog = topologyCatalog; try { String specCompilerClassName = ServiceConfigKeys.DEFAULT_GOBBLIN_SERVICE_FLOWCOMPILER_CLASS; if (config.hasPath(ServiceConfigKeys.GOBBLIN_SERVICE_FLOWCOMPILER_CLASS_KEY)) { specCompilerClassName = config.getString(ServiceConfigKeys.GOBBLIN_SERVICE_FLOWCOMPILER_CLASS_KEY); } _log.info("Using specCompiler class name/alias " + specCompilerClassName); this.specCompiler = (SpecCompiler) ConstructorUtils.invokeConstructor(Class.forName(this.aliasResolver.resolve( specCompilerClassName)), config); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException | ClassNotFoundException e) { throw new RuntimeException(e); } }
/** * Start this {@link GobblinTaskRunner} instance. */ public void start() { LOGGER.info(String.format("Starting %s in container %s", this.helixInstanceName, this.taskRunnerId)); // Add a shutdown hook so the task scheduler gets properly shutdown addShutdownHook(); connectHelixManager(); // Start metric reporting if (this.containerMetrics.isPresent()) { this.containerMetrics.get() .startMetricReportingWithFileSuffix(ConfigUtils.configToState(this.config), this.taskRunnerId); } this.serviceManager.startAsync(); this.serviceManager.awaitStopped(); }
public IdentityFlowToJobSpecCompiler(Config config, Optional<Logger> log, boolean instrumentationEnabled) { this.log = log.isPresent() ? log.get() : LoggerFactory.getLogger(getClass()); if (instrumentationEnabled) { this.metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), IdentityFlowToJobSpecCompiler.class); this.flowCompilationSuccessFulMeter = Optional.of(this.metricContext.meter(ServiceMetricNames.FLOW_COMPILATION_SUCCESSFUL_METER)); this.flowCompilationFailedMeter = Optional.of(this.metricContext.meter(ServiceMetricNames.FLOW_COMPILATION_FAILED_METER));
@Override public AWSCredentials getCredentials() { String accessKey = null; if (config.hasPath(GobblinAWSConfigurationKeys.SERVICE_ACCESS_KEY)) { accessKey = config.getString(GobblinAWSConfigurationKeys.SERVICE_ACCESS_KEY); } String secretKey = null; if (config.hasPath(GobblinAWSConfigurationKeys.SERVICE_SECRET_KEY)) { secretKey = PasswordManager.getInstance(ConfigUtils.configToState(config)) .readPassword(config.getString(GobblinAWSConfigurationKeys.SERVICE_SECRET_KEY)); } accessKey = StringUtils.trim(accessKey); secretKey = StringUtils.trim(secretKey); if (StringUtils.isNullOrEmpty(accessKey) || StringUtils.isNullOrEmpty(secretKey)) { throw new AmazonClientException(String.format("Unable to load AWS credentials from config (%s and %s)", GobblinAWSConfigurationKeys.SERVICE_ACCESS_KEY, GobblinAWSConfigurationKeys.SERVICE_SECRET_KEY)); } return new BasicAWSCredentials(accessKey, secretKey); }
this.eventBus, this.stateStores.taskStateStore, outputTaskStateDir); if (Task.getExecutionModel(ConfigUtils.configToState(jobConfig)).equals(ExecutionModel.STREAMING)) {
public KafkaDataWriter(KafkaProducer producer, Config config) { super(ConfigUtils.configToState(config)); recordsProduced = getMetricContext().meter(KafkaWriterMetricNames.RECORDS_PRODUCED_METER); recordsWritten = getMetricContext().meter(KafkaWriterMetricNames.RECORDS_SUCCESS_METER); recordsFailed = getMetricContext().meter(KafkaWriterMetricNames.RECORDS_FAILED_METER); bytesWritten = new AtomicInteger(-1); this.topic = config.getString(KafkaWriterConfigurationKeys.KAFKA_TOPIC); this.commitTimeoutInNanos = ConfigUtils.getLong(config, KafkaWriterConfigurationKeys.COMMIT_TIMEOUT_MILLIS_CONFIG, KafkaWriterConfigurationKeys.COMMIT_TIMEOUT_MILLIS_DEFAULT) * MILLIS_TO_NANOS; this.commitStepWaitTimeMillis = ConfigUtils.getLong(config, KafkaWriterConfigurationKeys.COMMIT_STEP_WAIT_TIME_CONFIG, KafkaWriterConfigurationKeys.COMMIT_STEP_WAIT_TIME_DEFAULT); this.failureAllowance = ConfigUtils.getDouble(config, KafkaWriterConfigurationKeys.FAILURE_ALLOWANCE_PCT_CONFIG, KafkaWriterConfigurationKeys.FAILURE_ALLOWANCE_PCT_DEFAULT) / 100.0; this.producer = producer; this.producerCallback = new Callback() { @Override public void onCompletion(RecordMetadata metadata, Exception exception) { if (null == exception) { recordsWritten.mark(); } else { log.debug("record failed to write", exception); recordsFailed.mark(); } } }; }
EmailUtils.sendJobFailureAlertEmail(state.getJobSpec().toShortString(), getEmailBody(state, previousStatus, newStatus), 1, ConfigUtils.configToState(state.getJobSpec().getConfig())); } catch (EmailException ee) { LOGGER.error("Failed to send job failure alert email for job " + state.getJobSpec().toShortString(), ee); EmailUtils.sendJobCompletionEmail(state.getJobSpec().toShortString(), getEmailBody(state, previousStatus, newStatus), newStatus.toString(), ConfigUtils.configToState(state.getJobSpec().getConfig())); } catch (EmailException ee) { LOGGER.error("Failed to send job completion notification email for job "