private Map<String, Object> toMap(SparkConf sparkConf) { Map<String, Object> result = new HashMap<>(); Arrays.stream(sparkConf.getAll()) .filter(x -> !x._1.equals(SPARK_EXTRA_LISTENERS)) .filter(x -> !x._1.equals(BEAKERX_ID)) .filter(x -> !x._1.equals(SPARK_APP_NAME)) .forEach(x -> addToMap(result, x)); return result; }
public Map<String, String> getAdvanceSettings() { return Arrays.stream(getSparkConf().getAll()) .filter(x -> isAdvancedSettings(x._1)) .collect(Collectors.toMap(Tuple2::_1, Tuple2::_2)); }
@Override public Properties asProperties() { Properties props = new Properties(); if (cfg != null) { String sparkPrefix = "spark."; for (Tuple2<String, String> tuple : cfg.getAll()) { // spark. are special so save them without the prefix as well // since its unlikely the other implementations will be aware of this convention String key = tuple._1; props.setProperty(key, tuple._2); if (key.startsWith(sparkPrefix)) { String simpleKey = key.substring(sparkPrefix.length()); // double check to not override a property defined directly in the config if (!props.containsKey(simpleKey)) { props.setProperty(simpleKey, tuple._2); } } } } return props; } }
@Override public Properties asProperties() { Properties props = new Properties(); if (cfg != null) { String sparkPrefix = "spark."; for (Tuple2<String, String> tuple : cfg.getAll()) { // spark. are special so save them without the prefix as well // since its unlikely the other implementations will be aware of this convention String key = tuple._1; props.setProperty(key, tuple._2); if (key.startsWith(sparkPrefix)) { String simpleKey = key.substring(sparkPrefix.length()); // double check to not override a property defined directly in the config if (!props.containsKey(simpleKey)) { props.setProperty(simpleKey, tuple._2); } } } } return props; } }
for (Tuple2<String, String> e : conf.getAll()) { mapConf.put(e._1(), e._2()); LOG.debug("Remote Spark Driver configured with: " + e._1() + "=" + e._2());
Tuple2<String, String>[] sparkConfPairs = sparkConf.getAll(); System.out.println("--- sparkConf ---"); for (int i = 0; i < sparkConfPairs.length; i++) {
private static Configuration makeApacheConfiguration(final SparkConf sparkConfiguration) { final BaseConfiguration apacheConfiguration = new BaseConfiguration(); apacheConfiguration.setDelimiterParsingDisabled(true); for (final Tuple2<String, String> tuple : sparkConfiguration.getAll()) { apacheConfiguration.setProperty(tuple._1(), tuple._2()); } return apacheConfiguration; }
@Override public Builder config(final SparkConf conf) { for (Tuple2<String, String> kv : conf.getAll()) { this.options.put(kv._1, kv._2); } return (Builder) super.config(conf); }
@Override public Properties asProperties() { Properties props = new Properties(); if (cfg != null) { String sparkPrefix = "spark."; for (Tuple2<String, String> tuple : cfg.getAll()) { // spark. are special so save them without the prefix as well // since its unlikely the other implementations will be aware of this convention String key = tuple._1; props.setProperty(key, tuple._2); if (key.startsWith(sparkPrefix)) { String simpleKey = key.substring(sparkPrefix.length()); // double check to not override a property defined directly in the config if (!props.containsKey(simpleKey)) { props.setProperty(simpleKey, tuple._2); } } } } return props; } }
@Override public Properties asProperties() { Properties props = new Properties(); if (cfg != null) { String sparkPrefix = "spark."; for (Tuple2<String, String> tuple : cfg.getAll()) { // spark. are special so save them without the prefix as well // since its unlikely the other implementations will be aware of this convention String key = tuple._1; props.setProperty(key, tuple._2); if (key.startsWith(sparkPrefix)) { String simpleKey = key.substring(sparkPrefix.length()); // double check to not override a property defined directly in the config if (!props.containsKey(simpleKey)) { props.setProperty(simpleKey, tuple._2); } } } } return props; } }
@Override public Properties asProperties() { Properties props = new Properties(); if (cfg != null) { String sparkPrefix = "spark."; for (Tuple2<String, String> tuple : cfg.getAll()) { // spark. are special so save them without the prefix as well // since its unlikely the other implementations will be aware of this convention String key = tuple._1; props.setProperty(key, tuple._2); if (key.startsWith(sparkPrefix)) { String simpleKey = key.substring(sparkPrefix.length()); // double check to not override a property defined directly in the config if (!props.containsKey(simpleKey)) { props.setProperty(simpleKey, tuple._2); } } } } return props; } }
/** * this method checks the existing jvm properties and add the port offset to properties which * starts with "spark." and ends with ".port". also, sets the relevant spark conf properties * * @param conf spark conf * @param portOffset port offset */ private void addSparkPropertiesPortOffset(SparkConf conf, int portOffset) { Tuple2<String, String>[] properties = conf.getAll(); for (Tuple2<String, String> prop : properties) { String key = prop._1().trim(); if (key.startsWith("spark.") && key.endsWith(".port")) { String withPortOffset = Integer.toString(Integer.parseInt(prop._2()) + portOffset); conf.set(key, withPortOffset); } } }
String confDirPath, final File zipFile) { final Properties properties = new Properties(); for (Tuple2<String, String> tuple : sparkConf.getAll()) { properties.put(tuple._1(), tuple._2());
String confDirPath, final File zipFile) { final Properties properties = new Properties(); for (Tuple2<String, String> tuple : sparkConf.getAll()) { properties.put(tuple._1(), tuple._2());
String confDirPath, final File zipFile) { final Properties properties = new Properties(); for (Tuple2<String, String> tuple : sparkConf.getAll()) { properties.put(tuple._1(), tuple._2());
private static Configuration makeApacheConfiguration(final SparkConf sparkConfiguration) { final BaseConfiguration apacheConfiguration = new BaseConfiguration(); apacheConfiguration.setDelimiterParsingDisabled(true); for (final Tuple2<String, String> tuple : sparkConfiguration.getAll()) { apacheConfiguration.setProperty(tuple._1(), tuple._2()); } return apacheConfiguration; }
for (Tuple2<String, String> e : conf.getAll()) { mapConf.put(e._1(), e._2()); LOG.debug("Remote Driver configured with: " + e._1() + "=" + e._2());
for (Tuple2<String, String> tuple : sparkConf.getAll()) { configs.put(tuple._1(), tuple._2());
for (Tuple2<String, String> tuple : sparkConf.getAll()) { configs.put(tuple._1(), tuple._2());
for (Tuple2<String, String> tuple : sparkConf.getAll()) { configs.put(tuple._1(), tuple._2());