private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
Map<String, String> getEffectiveConfig() throws IOException { if (effectiveConfig == null) { effectiveConfig = new HashMap<>(conf); Properties p = loadPropertiesFile(); for (String key : p.stringPropertyNames()) { if (!effectiveConfig.containsKey(key)) { effectiveConfig.put(key, p.getProperty(key)); } } } return effectiveConfig; }
String sparkHome = getSparkHome(); addToClassPath(cp, appClassPath); addToClassPath(cp, getConfDir()); boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); List<String> projects = Arrays.asList( "common/network-common", addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome)); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, join(File.separator, jarsDir, "*")); addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH"));
File javaOpts = new File(join(File.separator, getConfDir(), "java-opts")); if (javaOpts.isFile()) { try (BufferedReader br = new BufferedReader(new InputStreamReader( String line; while ((line = br.readLine()) != null) { addOptionString(cmd, line); cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath))); return cmd;
/** * Prepare spark commands with the appropriate command builder. * If printLaunchCommand is set then the commands will be printed to the stderr. */ private static List<String> buildCommand( AbstractCommandBuilder builder, Map<String, String> env, boolean printLaunchCommand) throws IOException, IllegalArgumentException { List<String> cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd)); System.err.println("========================================"); } return cmd; }
/** * Loads the configuration file for the application, if it exists. This is either the * user-specified properties file, or the spark-defaults.conf file under the Spark configuration * directory. */ private Properties loadPropertiesFile() throws IOException { Properties props = new Properties(); File propsFile; if (propertiesFile != null) { propsFile = new File(propertiesFile); checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", propertiesFile); } else { propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE); } if (propsFile.isFile()) { try (InputStreamReader isr = new InputStreamReader( new FileInputStream(propsFile), StandardCharsets.UTF_8)) { props.load(isr); for (Map.Entry<Object, Object> e : props.entrySet()) { e.setValue(e.getValue().toString().trim()); } } } return props; }
String sparkHome = getSparkHome(); addToClassPath(cp, getenv("SPARK_CLASSPATH")); addToClassPath(cp, appClassPath); addToClassPath(cp, getConfDir()); boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); List<String> projects = Arrays.asList( "common/network-common", addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome)); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, join(File.separator, jarsDir, "*")); addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR"));
File javaOpts = new File(join(File.separator, getConfDir(), "java-opts")); if (javaOpts.isFile()) { try (BufferedReader br = new BufferedReader(new InputStreamReader( String line; while ((line = br.readLine()) != null) { addOptionString(cmd, line); cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath))); return cmd;
/** * Prepare spark commands with the appropriate command builder. * If printLaunchCommand is set then the commands will be printed to the stderr. */ private static List<String> buildCommand( AbstractCommandBuilder builder, Map<String, String> env, boolean printLaunchCommand) throws IOException, IllegalArgumentException { List<String> cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd)); System.err.println("========================================"); } return cmd; }
/** * Loads the configuration file for the application, if it exists. This is either the * user-specified properties file, or the spark-defaults.conf file under the Spark configuration * directory. */ private Properties loadPropertiesFile() throws IOException { Properties props = new Properties(); File propsFile; if (propertiesFile != null) { propsFile = new File(propertiesFile); checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", propertiesFile); } else { propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE); } if (propsFile.isFile()) { try (InputStreamReader isr = new InputStreamReader( new FileInputStream(propsFile), StandardCharsets.UTF_8)) { props.load(isr); for (Map.Entry<Object, Object> e : props.entrySet()) { e.setValue(e.getValue().toString().trim()); } } } return props; }
String sparkHome = getSparkHome(); addToClassPath(cp, appClassPath); addToClassPath(cp, getConfDir()); boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); List<String> projects = Arrays.asList( "common/kvstore", addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome)); addToClassPath(cp, String.format("%s/mllib/target/jars/*", sparkHome)); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, join(File.separator, jarsDir, "*")); addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR"));
File javaOpts = new File(join(File.separator, getConfDir(), "java-opts")); if (javaOpts.isFile()) { BufferedReader br = new BufferedReader(new InputStreamReader( String line; while ((line = br.readLine()) != null) { addOptionString(cmd, line); cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath))); return cmd;
private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
String getSparkHome() { String path = getenv(ENV_SPARK_HOME); checkState(path != null, "Spark home not found; set it explicitly or use the SPARK_HOME environment variable."); return path; }
Map<String, String> getEffectiveConfig() throws IOException { if (effectiveConfig == null) { effectiveConfig = new HashMap<>(conf); Properties p = loadPropertiesFile(); for (String key : p.stringPropertyNames()) { if (!effectiveConfig.containsKey(key)) { effectiveConfig.put(key, p.getProperty(key)); } } } return effectiveConfig; }
List<String> cmd = builder.buildCommand(env); if (printLaunchCommand) { System.err.println("Spark Command: " + join(" ", cmd));
checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", propertiesFile); } else { propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
String sparkHome = getSparkHome(); addToClassPath(cp, appClassPath); addToClassPath(cp, getConfDir()); boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); List<String> projects = Arrays.asList( "common/kvstore", addToClassPath(cp, String.format("%s/%s/target/scala-%s/classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, project, scala)); addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome)); addToClassPath(cp, String.format("%s/mllib/target/jars/*", sparkHome)); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, join(File.separator, jarsDir, "*")); addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR"));