@Override public Set<String> keySet() { if (this.prefix == null) { return this.backingConfig.keySet(); } final HashSet<String> set = new HashSet<>(); int prefixLen = this.prefix.length(); for (String key : this.backingConfig.keySet()) { if (key.startsWith(prefix)) { set.add(key.substring(prefixLen)); } } return set; }
private org.apache.hadoop.conf.Configuration loadHadoopConfigFromFlink() { org.apache.hadoop.conf.Configuration hadoopConfig = new org.apache.hadoop.conf.Configuration(); for (String key : flinkConfig.keySet()) { for (String prefix : flinkConfigPrefixes) { if (key.startsWith(prefix)) { String newKey = hadoopConfigPrefix + key.substring(prefix.length()); String newValue = fixHadoopConfig(key, flinkConfig.getString(key, null)); hadoopConfig.set(newKey, newValue); LOG.debug("Adding Flink config entry for {} as {} to Hadoop config", key, newKey); } } } return hadoopConfig; }
@VisibleForTesting org.apache.hadoop.conf.Configuration getHadoopConfiguration() { org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(); if (flinkConfig == null) { return conf; } // read all configuration with prefix 'FLINK_CONFIG_PREFIXES' for (String key : flinkConfig.keySet()) { for (String prefix : FLINK_CONFIG_PREFIXES) { if (key.startsWith(prefix)) { String value = flinkConfig.getString(key, null); conf.set(key, value); if (CONFIG_KEYS_TO_SHADE.contains(key)) { conf.set(key, FLINK_SHADING_PREFIX + value); } LOG.debug("Adding Flink config entry for {} as {} to Hadoop config", key, conf.get(key)); } } } return conf; } }
for (String key : flinkConfig.keySet()) { if (key.startsWith(CONFIG_PREFIX)) { String value = flinkConfig.getString(key, null);
finalConf = new org.apache.hadoop.conf.Configuration(hadoopConf); for (String key : extraUserConf.keySet()) { finalConf.set(key, extraUserConf.getString(key, null));
assertEquals(6, conf.keySet().size());
@Test public void testDelegationConfigurationWithNullPrefix() { Configuration backingConf = new Configuration(); backingConf.setValueInternal("test-key", "value"); DelegatingConfiguration configuration = new DelegatingConfiguration( backingConf, null); Set<String> keySet = configuration.keySet(); assertEquals(keySet, backingConf.keySet()); }
@Test public void testRemove(){ Configuration cfg = new Configuration(); cfg.setInteger("a", 1); cfg.setInteger("b", 2); ConfigOption<Integer> validOption = ConfigOptions .key("a") .defaultValue(-1); ConfigOption<Integer> deprecatedOption = ConfigOptions .key("c") .defaultValue(-1) .withDeprecatedKeys("d", "b"); ConfigOption<Integer> unexistedOption = ConfigOptions .key("e") .defaultValue(-1) .withDeprecatedKeys("f", "g", "j"); assertEquals("Wrong expectation about size", cfg.keySet().size(), 2); assertTrue("Expected 'validOption' is removed", cfg.removeConfig(validOption)); assertEquals("Wrong expectation about size", cfg.keySet().size(), 1); assertTrue("Expected 'existedOption' is removed", cfg.removeConfig(deprecatedOption)); assertEquals("Wrong expectation about size", cfg.keySet().size(), 0); assertFalse("Expected 'unexistedOption' is not removed", cfg.removeConfig(unexistedOption)); } }
@Override public Set<String> keySet() { if (this.prefix == null) { return this.backingConfig.keySet(); } final HashSet<String> set = new HashSet<>(); int prefixLen = this.prefix.length(); for (String key : this.backingConfig.keySet()) { if (key.startsWith(prefix)) { set.add(key.substring(prefixLen)); } } return set; }
@Override public Set<String> keySet() { if (this.prefix == null) { return this.backingConfig.keySet(); } final HashSet<String> set = new HashSet<>(); int prefixLen = this.prefix.length(); for (String key : this.backingConfig.keySet()) { if (key.startsWith(prefix)) { set.add(key.substring(prefixLen)); } } return set; }
public static Configuration toHadoopConfig(org.apache.flink.configuration.Configuration flinkConfig) { if(flinkConfig == null) { return null; } Configuration hadoopConfig = new Configuration(); for(String key : flinkConfig.keySet()) { hadoopConfig.set(key, flinkConfig.getString(key, null)); } return hadoopConfig; }
/** * Writes a Flink YAML config file from a Flink Configuration object. * @param cfg The Flink config * @param file The File to write to * @throws IOException */ public static void writeConfiguration(Configuration cfg, File file) throws IOException { try (FileWriter fwrt = new FileWriter(file); PrintWriter out = new PrintWriter(fwrt)) { for (String key : cfg.keySet()) { String value = cfg.getString(key, null); out.print(key); out.print(": "); out.println(value); } } }
/** * Writes a Flink YAML config file from a Flink Configuration object. * @param cfg The Flink config * @param file The File to write to * @throws IOException */ public static void writeConfiguration(Configuration cfg, File file) throws IOException { try (FileWriter fwrt = new FileWriter(file); PrintWriter out = new PrintWriter(fwrt)) { for (String key : cfg.keySet()) { String value = cfg.getString(key, null); out.print(key); out.print(": "); out.println(value); } } }
/** * Writes a Flink YAML config file from a Flink Configuration object. * @param cfg The Flink config * @param file The File to write to * @throws IOException */ public static void writeConfiguration(Configuration cfg, File file) throws IOException { try (FileWriter fwrt = new FileWriter(file); PrintWriter out = new PrintWriter(fwrt)) { for (String key : cfg.keySet()) { String value = cfg.getString(key, null); out.print(key); out.print(": "); out.println(value); } } }
/** * Writes a Flink YAML config file from a Flink Configuration object. * @param cfg The Flink config * @param file The File to write to * @throws IOException */ public static void writeConfiguration(Configuration cfg, File file) throws IOException { try (FileWriter fwrt = new FileWriter(file); PrintWriter out = new PrintWriter(fwrt)) { for (String key : cfg.keySet()) { String value = cfg.getString(key, null); out.print(key); out.print(": "); out.println(value); } } }
public static void setStubParameters(TaskConfig config, OperatorID operatorID, Configuration parameters) { for (String key : parameters.keySet()) { config.setStubParameter(operatorID + "." + key, parameters.getString(key, null)); } } }
public static ClusterConfigurationInfo from(Configuration config) { ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size()); for (String key : config.keySet()) { String value = config.getString(key, null); // Mask key values which contain sensitive information if (value != null && GlobalConfiguration.isSensitive(key)) { value = GlobalConfiguration.HIDDEN_CONTENT; } clusterConfig.add(new ClusterConfigurationInfoEntry(key, value)); } return clusterConfig; }
public static ClusterConfigurationInfo from(Configuration config) { ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size()); for (String key : config.keySet()) { String value = config.getString(key, null); // Mask key values which contain sensitive information if (value != null && GlobalConfiguration.isSensitive(key)) { value = GlobalConfiguration.HIDDEN_CONTENT; } clusterConfig.add(new ClusterConfigurationInfoEntry(key, value)); } return clusterConfig; }
public static ClusterConfigurationInfo from(Configuration config) { ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size()); for (String key : config.keySet()) { String value = config.getString(key, null); // Mask key values which contain sensitive information if (value != null && GlobalConfiguration.isSensitive(key)) { value = GlobalConfiguration.HIDDEN_CONTENT; } clusterConfig.add(new ClusterConfigurationInfoEntry(key, value)); } return clusterConfig; }