/** * Convert a given {@link Config} instance to a {@link Properties} instance. * * @param config the given {@link Config} instance * @param prefix only properties whose name starts with the prefix will be returned. * @return a {@link Properties} instance */ public static Properties configToProperties(Config config, String prefix) { return configToProperties(config, Optional.of(prefix)); }
/** * Convert a given {@link Config} instance to a {@link Properties} instance. * * @param config the given {@link Config} instance * @return a {@link Properties} instance */ public static Properties configToProperties(Config config) { return configToProperties(config, Optional.<String>absent()); }
public Properties getConfigAsProperties() { if (!this.configAsProperties.isPresent()) { this.configAsProperties = Optional.of(ConfigUtils.configToProperties(this.config.get())); } return this.configAsProperties.get(); }
public Properties getConfigAsProperties() { if (!this.configAsProperties.isPresent()) { this.configAsProperties = Optional.of(ConfigUtils.configToProperties(this.config.get())); } return this.configAsProperties.get(); }
public Properties getConfigAsProperties() { if (!this.configAsProperties.isPresent()) { this.configAsProperties = Optional.of(ConfigUtils.configToProperties(this.config.get())); } return this.configAsProperties.get(); }
public GobblinServiceJobScheduler(String serviceName, Config config, Optional<HelixManager> helixManager, Optional<FlowCatalog> flowCatalog, Optional<TopologyCatalog> topologyCatalog, Orchestrator orchestrator, SchedulerService schedulerService, Optional<Logger> log) throws Exception { super(ConfigUtils.configToProperties(config), schedulerService); _log = log.isPresent() ? log.get() : LoggerFactory.getLogger(getClass()); this.serviceName = serviceName; this.flowCatalog = flowCatalog; this.helixManager = helixManager; this.orchestrator = orchestrator; this.scheduledFlowSpecs = Maps.newHashMap(); }
protected ConsumerConfig createConsumerConfig(Config config) { Properties props = ConfigUtils.configToProperties(config); if (!props.containsKey(GROUP_ID_KEY)) { props.setProperty(GROUP_ID_KEY, DEFAULT_GROUP_ID); } return new ConsumerConfig(props); }
/** * Convert a given {@link Config} to a {@link State} instance. * * @param config the given {@link Config} instance * @return a {@link State} instance */ public static State configToState(Config config) { return new State(configToProperties(config)); }
@Override public HivePartitionExtendedFilter createFilter(Config config){ Properties props = ConfigUtils.configToProperties(config); return props.containsKey(PathBasedHivePartitionFilterFactory.HIVE_PARTITION_PATH_FILTER_REGEX)? new PathBasedPartitionFilter(props.getProperty(PathBasedHivePartitionFilterFactory.HIVE_PARTITION_PATH_FILTER_REGEX)) :null; } }
public SchemaRegistryVersionWriter(Config config) throws IOException { this(new KafkaAvroSchemaRegistry(ConfigUtils.configToProperties(config)), Optional.<String>absent(), Optional.<Schema>absent()); }
private Optional<ContainerMetrics> buildContainerMetrics() { Properties properties = ConfigUtils.configToProperties(this.config); if (GobblinMetrics.isEnabled(properties)) { return Optional.of(ContainerMetrics .get(ConfigUtils.configToState(config), this.applicationName, this.taskRunnerId)); } else { return Optional.absent(); } }
/** * return an identifier for the data source based on the configuration * @param config configuration * @return a {@link String} to identify the data source */ public static String getDataSourceId(Config config) { PasswordManager passwordManager = PasswordManager.getInstance(ConfigUtils.configToProperties(config)); return ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_JDBC_DRIVER_KEY, ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER) + "::" + config.getString(ConfigurationKeys.STATE_STORE_DB_URL_KEY) + "::" + passwordManager.readPassword(config.getString(ConfigurationKeys.STATE_STORE_DB_USER_KEY)); }
private GobblinMetrics buildGobblinMetrics() { // Create tags list ImmutableList.Builder<Tag<?>> tags = new ImmutableList.Builder<>(); tags.add(new Tag<>(GobblinClusterMetricTagNames.APPLICATION_ID, this.applicationId)); tags.add(new Tag<>(GobblinClusterMetricTagNames.APPLICATION_NAME, this.applicationName)); // Intialize Gobblin metrics and start reporters GobblinMetrics gobblinMetrics = GobblinMetrics.get(this.applicationId, null, tags.build()); gobblinMetrics.startMetricReporting(ConfigUtils.configToProperties(config)); return gobblinMetrics; }
private void getServices() { final Properties properties = ConfigUtils.configToProperties(this.clusterConfig); this.taskExecutor = new TaskExecutor(properties); this.taskStateTracker = new GobblinHelixTaskStateTracker(properties); final List<Service> services = Lists.newArrayList(this.taskExecutor, this.taskStateTracker); this.serviceManager = new ServiceManager(services); }
private Properties generateJobProperties(Config baseConfig, String jobNameSuffix, String jobIdSuffix) { Properties properties = ConfigUtils.configToProperties(baseConfig); String jobName = properties.getProperty(ConfigurationKeys.JOB_NAME_KEY) + jobNameSuffix; properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, jobName); properties.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_" + jobName + jobIdSuffix); properties.setProperty(ConfigurationKeys.WRITER_FILE_PATH, jobName); // expiry time should be more than the time needed for the job to complete // otherwise JobContext will become null. This is how Helix work flow works. properties.setProperty(GobblinClusterConfigurationKeys.HELIX_WORKFLOW_EXPIRY_TIME_SECONDS, "5"); return properties; }
/** * Called once on {@link #startUp()} to start metrics. */ @VisibleForTesting protected void buildMetricsContextAndMetrics() { this.metricContext = Instrumented.getMetricContext(new org.apache.gobblin.configuration.State(ConfigUtils.configToProperties(config)), this.getClass(), getTagsForMetrics()); createMetrics(); }
@Test public void testConfigToPropertiesWithPrefix() { Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder() .put("a.key1", 1) .put("b.key2", "sTring") .put("a.key3", true) .build()); Properties props = ConfigUtils.configToProperties(cfg, "a."); Assert.assertEquals(props.getProperty("a.key1"), "1"); Assert.assertNull(props.getProperty("b.key2")); Assert.assertEquals(props.getProperty("a.key3"), "true"); }
@Test public void testConfigToProperties() { Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder() .put("key1", 1) .put("key2", "sTring") .put("key3", true) .build()); Properties props = ConfigUtils.configToProperties(cfg); Assert.assertEquals(props.getProperty("key1"), "1"); Assert.assertEquals(props.getProperty("key2"), "sTring"); Assert.assertEquals(props.getProperty("key3"), "true"); }
TaskRunnerSuiteThreadModel(TaskRunnerSuiteBase.Builder builder) { super(builder); // initialize task related metrics this.taskExecutor = new TaskExecutor(ConfigUtils.configToProperties(builder.getConfig())); this.taskExecutionMetrics = new GobblinTaskRunnerMetrics.TaskExecutionMetrics(taskExecutor, metricContext); this.taskFactory = generateTaskFactory(taskExecutor, builder); this.jobFactory = new GobblinHelixJobFactory(builder, this.metricContext); }
/** * Resolve the job spec using classpath templates as well as any templates available in the input {@link JobCatalog}. */ public ResolvedJobSpec(JobSpec other, JobCatalog catalog) throws SpecNotFoundException, JobTemplate.TemplateException { super(other.getUri(), other.getVersion(), other.getDescription(), resolveConfig(other, catalog), ConfigUtils.configToProperties(resolveConfig(other, catalog)), other.getTemplateURI(), other.getMetadata()); this.originalJobSpec = other; }