/** * Get config with metrics configuration and shared kafka configuration */ public static Config getKafkaAndMetricsConfigFromProperties(Properties props) { return ConfigUtils.propertiesToConfig(props, Optional.of(ConfigurationKeys.METRICS_CONFIGURATIONS_PREFIX)) .withFallback(ConfigUtils.propertiesToConfig(props, Optional.of(ConfigurationKeys.SHARED_KAFKA_CONFIG_PREFIX))); }
public BaseServiceNodeImpl(String nodeName, Properties props) { Preconditions.checkNotNull(nodeName); this.nodeName = nodeName; isNodeSecure = Boolean.parseBoolean (props.getProperty(ServiceConfigKeys.NODE_SECURITY_KEY, ServiceConfigKeys.DEFAULT_NODE_SECURITY)); nodeProps = ConfigUtils.propertiesToConfig(props); }
private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { uri = (URI) stream.readObject(); version = (String) stream.readObject(); description = (String) stream.readObject(); templateURI = Optional.fromNullable((URI) stream.readObject()); configAsProperties = (Properties) stream.readObject(); config = ConfigUtils.propertiesToConfig(configAsProperties); } }
/** * Builds and returns {@link GraphiteReporter}. * * @param props metrics properties * @return GraphiteReporter */ public GraphiteReporter build(Properties props) throws IOException { return new GraphiteReporter(this, ConfigUtils.propertiesToConfig(props, Optional.of(ConfigurationKeys.METRICS_CONFIGURATIONS_PREFIX))); } }
/** * Builds a {@link OutputStreamReporter} with the given properties. * * @return a {@link OutputStreamReporter} */ public OutputStreamReporter build(Properties props) { return new OutputStreamReporter(this, ConfigUtils.propertiesToConfig(props, Optional.of(ConfigurationKeys.METRICS_CONFIGURATIONS_PREFIX))); } }
/** * Builds and returns {@link InfluxDBReporter}. * * @return InfluxDBReporter */ public InfluxDBReporter build(Properties props) throws IOException { return new InfluxDBReporter(this, ConfigUtils.propertiesToConfig(props, Optional.of(ConfigurationKeys.METRICS_CONFIGURATIONS_PREFIX))); } }
public JobStateToJsonConverter(Properties props, String storeUrl, boolean keepConfig) throws IOException { Configuration conf = new Configuration(); JobConfigurationUtils.putPropertiesIntoConfiguration(props, conf); if (StringUtils.isNotBlank(storeUrl)) { props.setProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, storeUrl); } this.keepConfig = keepConfig; this.jobStateStore = (StateStore) DatasetStateStore.buildDatasetStateStore(ConfigUtils.propertiesToConfig(props)); }
@Test public void testPropertiesToConfigWithPrefix() { Properties properties = new Properties(); properties.setProperty("k1.kk1", "v1"); properties.setProperty("k1.kk2", "v2"); properties.setProperty("k2.kk", "v3"); Config conf = ConfigUtils.propertiesToConfig(properties, Optional.of("k1")); Assert.assertEquals(conf.getString("k1.kk1"), "v1"); Assert.assertEquals(conf.getString("k1.kk2"), "v2"); Assert.assertFalse(conf.hasPath("k2.kk"), "Should not contain key k2.kk"); }
public AzkabanGobblinYarnAppLauncher(String jobId, Properties props) throws IOException { super(jobId, LOGGER); Config gobblinConfig = ConfigUtils.propertiesToConfig(props); this.gobblinYarnAppLauncher = new GobblinYarnAppLauncher(gobblinConfig, new YarnConfiguration()); }
public TimeBasedDatasetStoreDataset(Key key, List<DatasetStateStoreEntryManager> entries, Properties props) { super(key, entries); this.versionFinder = new TimestampedDatasetStateStoreVersionFinder(); Config propsAsConfig = ConfigUtils.propertiesToConfig(props); // strip the retention config namespace since the selection policy looks for configuration without the namespace Config retentionConfig = ConfigUtils.getConfigOrEmpty(propsAsConfig, ConfigurableCleanableDataset.RETENTION_CONFIGURATION_KEY); Config retentionConfigWithFallback = retentionConfig.withFallback(propsAsConfig); this.versionSelectionPolicy = createSelectionPolicy(ConfigUtils.getString(retentionConfigWithFallback, SELECTION_POLICY_CLASS_KEY, DEFAULT_SELECTION_POLICY_CLASS), retentionConfigWithFallback, props); }
/** * Test that you can go from properties to Config and back without changing. * Specifically tests prefixed paths and numeric key-parts. */ @Test public void testPropertiesToConfigAndBack() { Properties props = new Properties(); props.setProperty("writer.staging.dir", "foobar"); props.setProperty("writer.staging.dir.0", "foobar-0"); Config config = ConfigUtils.propertiesToConfig(props); Properties configProps = ConfigUtils.configToProperties(config); Assert.assertEquals(configProps, props); }
@Test (dependsOnMethods = "testDeleteSpec") public void testResetConsumption() throws Exception { SimpleKafkaSpecConsumer seic = _closer .register(new SimpleKafkaSpecConsumer(ConfigUtils.propertiesToConfig(_properties))); List<Pair<SpecExecutor.Verb, Spec>> consumedEvent = seic.changedSpecs().get(); Assert.assertTrue(consumedEvent.size() == 3, "Consumption was reset, we should see all events"); }
@Test public void testProjectNameWithConfig() throws Exception { String expectedProjectName = "randomPrefix_http___localhost_8000_context"; Properties properties = new Properties(); properties.setProperty("gobblin.service.azkaban.project.namePrefix", "randomPrefix"); JobSpec jobSpec = new JobSpec(new URI("http://localhost:8000/context"), "0.0", "test job spec", ConfigUtils.propertiesToConfig(properties), properties, Optional.absent(), Collections.EMPTY_MAP); AzkabanProjectConfig azkabanProjectConfig = new AzkabanProjectConfig(jobSpec); String actualProjectName = azkabanProjectConfig.getAzkabanProjectName(); Assert.assertEquals(actualProjectName, expectedProjectName); }
@Test public void testProjectZipFileName() throws Exception { String expectedZipFileName = "randomPrefix_http___localhost_8000_context.zip"; Properties properties = new Properties(); properties.setProperty("gobblin.service.azkaban.project.namePrefix", "randomPrefix"); JobSpec jobSpec = new JobSpec(new URI("http://localhost:8000/context"), "0.0", "test job spec", ConfigUtils.propertiesToConfig(properties), properties, Optional.absent(), Collections.EMPTY_MAP); AzkabanProjectConfig azkabanProjectConfig = new AzkabanProjectConfig(jobSpec); String actualZipFileName = azkabanProjectConfig.getAzkabanProjectZipFilename(); Assert.assertEquals(actualZipFileName, expectedZipFileName); }
@Test public void testProjectNameWithReallyLongName() throws Exception { String expectedProjectName = "randomPrefixWithReallyLongName_http___localhost_8000__55490420"; Properties properties = new Properties(); properties.setProperty("gobblin.service.azkaban.project.namePrefix", "randomPrefixWithReallyLongName"); JobSpec jobSpec = new JobSpec(new URI("http://localhost:8000/context/that-keeps-expanding-and-explanding"), "0.0", "test job spec", ConfigUtils.propertiesToConfig(properties), properties, Optional.absent(), Collections.EMPTY_MAP); AzkabanProjectConfig azkabanProjectConfig = new AzkabanProjectConfig(jobSpec); String actualProjectName = azkabanProjectConfig.getAzkabanProjectName(); Assert.assertEquals(actualProjectName, expectedProjectName); }
@Test public void testProjectZipFileNameForLongName() throws Exception { String expectedZipFileName = "randomPrefixWithReallyLongName_http___localhost_8000__55490420.zip"; Properties properties = new Properties(); properties.setProperty("gobblin.service.azkaban.project.namePrefix", "randomPrefixWithReallyLongName"); JobSpec jobSpec = new JobSpec(new URI("http://localhost:8000/context/that-keeps-expanding-and-explanding"), "0.0", "test job spec", ConfigUtils.propertiesToConfig(properties), properties, Optional.absent(), Collections.EMPTY_MAP); AzkabanProjectConfig azkabanProjectConfig = new AzkabanProjectConfig(jobSpec); String actualZipFileName = azkabanProjectConfig.getAzkabanProjectZipFilename(); Assert.assertEquals(actualZipFileName, expectedZipFileName); } }
@Test public void testProjectNameDefault() throws Exception { String expectedProjectName = "GobblinService__uri"; Properties properties = new Properties(); JobSpec jobSpec = new JobSpec(new URI("uri"), "0.0", "test job spec", ConfigUtils.propertiesToConfig(properties), properties, Optional.absent(), Collections.EMPTY_MAP); AzkabanProjectConfig azkabanProjectConfig = new AzkabanProjectConfig(jobSpec); String actualProjectName = azkabanProjectConfig.getAzkabanProjectName(); Assert.assertEquals(actualProjectName, expectedProjectName); }
private JobSpec initJobSpec(String specUri) { Properties properties = new Properties(); return JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); }
private JobSpec initJobSpec(String specUri) { Properties properties = new Properties(); return JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); }
@Override public DataWriter<D> build() throws IOException { validate(); return AsyncWriterManager.builder() .config(ConfigUtils.propertiesToConfig(getState().getProperties())) .asyncDataWriter(new AsyncHttpWriter(this)) .maxOutstandingWrites(maxOutstandingWrites) .retriesEnabled(false) // retries are done in HttpBatchDispatcher .commitTimeoutMillis(10000L) .failureAllowanceRatio(0).build(); } }