String getSparkHome() { String path = getenv(ENV_SPARK_HOME); checkState(path != null, "Spark home not found; set it explicitly or use the SPARK_HOME environment variable."); return path; }
String getSparkHome() { String path = getenv(ENV_SPARK_HOME); checkState(path != null, "Spark home not found; set it explicitly or use the SPARK_HOME environment variable."); return path; }
String getScalaVersion() { String scala = getenv("SPARK_SCALA_VERSION"); if (scala != null) { return scala; } String sparkHome = getSparkHome(); File scala210 = new File(sparkHome, "launcher/target/scala-2.10"); File scala211 = new File(sparkHome, "launcher/target/scala-2.11"); checkState(!scala210.isDirectory() || !scala211.isDirectory(), "Presence of build for both scala versions (2.10 and 2.11) detected.\n" + "Either clean one of them or set SPARK_SCALA_VERSION in your environment."); if (scala210.isDirectory()) { return "2.10"; } else { checkState(scala211.isDirectory(), "Cannot find any build directories."); return "2.11"; } }
String getScalaVersion() { String scala = getenv("SPARK_SCALA_VERSION"); if (scala != null) { return scala; } String sparkHome = getSparkHome(); File scala210 = new File(sparkHome, "launcher/target/scala-2.10"); File scala211 = new File(sparkHome, "launcher/target/scala-2.11"); checkState(!scala210.isDirectory() || !scala211.isDirectory(), "Presence of build for both scala versions (2.10 and 2.11) detected.\n" + "Either clean one of them or set SPARK_SCALA_VERSION in your environment."); if (scala210.isDirectory()) { return "2.10"; } else { checkState(scala211.isDirectory(), "Cannot find any build directories."); return "2.11"; } }
String getScalaVersion() { String scala = getenv("SPARK_SCALA_VERSION"); if (scala != null) { return scala; } String sparkHome = getSparkHome(); File scala212 = new File(sparkHome, "launcher/target/scala-2.12"); File scala211 = new File(sparkHome, "launcher/target/scala-2.11"); checkState(!scala212.isDirectory() || !scala211.isDirectory(), "Presence of build for multiple Scala versions detected.\n" + "Either clean one of them or set SPARK_SCALA_VERSION in your environment."); if (scala212.isDirectory()) { return "2.12"; } else { checkState(scala211.isDirectory(), "Cannot find any build directories."); return "2.11"; } }
String getScalaVersion() { String scala = getenv("SPARK_SCALA_VERSION"); if (scala != null) { return scala; } String sparkHome = getSparkHome(); File scala212 = new File(sparkHome, "launcher/target/scala-2.12"); File scala211 = new File(sparkHome, "launcher/target/scala-2.11"); checkState(!scala212.isDirectory() || !scala211.isDirectory(), "Presence of build for multiple Scala versions detected.\n" + "Either clean one of them or set SPARK_SCALA_VERSION in your environment."); if (scala212.isDirectory()) { return "2.12"; } else { checkState(scala211.isDirectory(), "Cannot find any build directories."); return "2.11"; } }
private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); }
boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH")); return new ArrayList<>(cp);
addToClassPath(cp, getenv("SPARK_CLASSPATH")); addToClassPath(cp, appClassPath); boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH")); return cp;
boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH")); return new ArrayList<>(cp);
boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES")); boolean isTesting = "1".equals(getenv("SPARK_TESTING")); if (prependClasses || isTesting) { String scala = getScalaVersion(); boolean isTestingSql = "1".equals(getenv("SPARK_SQL_TESTING")); String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting && !isTestingSql); if (jarsDir != null) { addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH")); return new ArrayList<>(cp);