AbstractLauncher() { this.builder = new SparkSubmitCommandBuilder(); }
@Test public void testRedirectToLog() throws Exception { launcher.redirectToLog("fakeLogger"); assertTrue(launcher.redirectToLog); assertTrue(launcher.builder.getEffectiveConfig() .containsKey(SparkLauncher.CHILD_PROCESS_LOGGER_NAME)); }
@Override public List<String> buildCommand(Map<String, String> env) throws IOException, IllegalArgumentException { if (PYSPARK_SHELL.equals(appResource) && !isSpecialCommand) { return buildPySparkShellCommand(env); } else if (SPARKR_SHELL.equals(appResource) && !isSpecialCommand) { return buildSparkRCommand(env); } else { return buildSparkSubmitCommand(env); } }
Map<String, String> config = getEffectiveConfig(); boolean isClientMode = isClientMode(config); String extraClassPath = isClientMode ? config.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH) : null; List<String> cmd = buildJavaCommand(extraClassPath); if (isThriftServer(mainClass)) { addOptionString(cmd, System.getenv("SPARK_DAEMON_JAVA_OPTS")); addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS")); isThriftServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null; String memory = firstNonEmpty(tsMemory, config.get(SparkLauncher.DRIVER_MEMORY), System.getenv("SPARK_DRIVER_MEMORY"), System.getenv("SPARK_MEM"), DEFAULT_MEM); cmd.add("-Xmx" + memory); addOptionString(cmd, driverExtraJavaOptions); mergeEnvPathList(env, getLibPathEnvName(), config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); cmd.addAll(buildSparkSubmitArgs()); return cmd;
private void constructEnvVarArgs( Map<String, String> env, String submitArgsEnvVariable) throws IOException { mergeEnvPathList(env, getLibPathEnvName(), getEffectiveConfig().get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); StringBuilder submitArgs = new StringBuilder(); for (String arg : buildSparkSubmitArgs()) { if (submitArgs.length() > 0) { submitArgs.append(" "); } submitArgs.append(quoteForCommandString(arg)); } env.put(submitArgsEnvVariable, submitArgs.toString()); }
/** * Starts a Spark application. * * @see AbstractLauncher#startApplication(SparkAppHandle.Listener...) * @param listeners Listeners to add to the handle before the app is launched. * @return A handle for the launched application. */ @Override public SparkAppHandle startApplication(SparkAppHandle.Listener... listeners) throws IOException { if (builder.isClientMode(builder.getEffectiveConfig())) { LOG.warning("It's not recommended to run client-mode applications using InProcessLauncher."); } Method main = findSparkSubmit(); LauncherServer server = LauncherServer.getOrCreateServer(); InProcessAppHandle handle = new InProcessAppHandle(server); for (SparkAppHandle.Listener l : listeners) { handle.addListener(l); } String secret = server.registerHandle(handle); setConf(LauncherProtocol.CONF_LAUNCHER_PORT, String.valueOf(server.getPort())); setConf(LauncherProtocol.CONF_LAUNCHER_SECRET, secret); List<String> sparkArgs = builder.buildSparkSubmitArgs(); String[] argv = sparkArgs.toArray(new String[sparkArgs.size()]); String appName = CommandBuilderUtils.firstNonEmpty(builder.appName, builder.mainClass, "<unknown>"); handle.start(appName, main, argv); return handle; }
@Test(expected = IllegalArgumentException.class) public void testMissingAppResource() { new SparkSubmitCommandBuilder().buildSparkSubmitArgs(); }
private ProcessBuilder createBuilder() throws IOException { List<String> cmd = new ArrayList<>(); cmd.add(findSparkSubmit()); cmd.addAll(builder.buildSparkSubmitArgs());
private ProcessBuilder createBuilder() { List<String> cmd = new ArrayList<>(); String script = isWindows() ? "spark-submit.cmd" : "spark-submit"; cmd.add(join(File.separator, builder.getSparkHome(), "bin", script)); cmd.addAll(builder.buildSparkSubmitArgs());
/** * Set a custom properties file with Spark configuration for the application. * * @param path Path to custom properties file to use. * @return This launcher. */ public SparkLauncher setPropertiesFile(String path) { checkNotNull(path, "path"); builder.setPropertiesFile(path); return this; }
private List<String> buildSparkRCommand(Map<String, String> env) throws IOException { if (!appArgs.isEmpty() && appArgs.get(0).endsWith(".R")) { System.err.println( "Running R applications through 'sparkR' is not supported as of Spark 2.0.\n" + "Use ./bin/spark-submit <R file>"); System.exit(-1); } // When launching the SparkR shell, store the spark-submit arguments in the SPARKR_SUBMIT_ARGS // env variable. appResource = SPARKR_SHELL_RESOURCE; constructEnvVarArgs(env, "SPARKR_SUBMIT_ARGS"); // Set shell.R as R_PROFILE_USER to load the SparkR package when the shell comes up. String sparkHome = System.getenv("SPARK_HOME"); env.put("R_PROFILE_USER", join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R")); List<String> args = new ArrayList<>(); args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL), System.getenv("SPARKR_DRIVER_R"), "R")); return args; }
launcher.setPropertiesFile(dummyPropsFile.getAbsolutePath()); launcher.conf.put(SparkLauncher.DRIVER_MEMORY, "1g"); launcher.conf.put(SparkLauncher.DRIVER_EXTRA_CLASSPATH, "/driver"); List<String> cmd = launcher.buildCommand(env);
Map<String, String> config = getEffectiveConfig(); boolean isClientMode = isClientMode(config); String extraClassPath = isClientMode ? config.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH) : null; List<String> cmd = buildJavaCommand(extraClassPath); if (isThriftServer(mainClass)) { addOptionString(cmd, System.getenv("SPARK_DAEMON_JAVA_OPTS")); addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS")); isThriftServer(mainClass) ? System.getenv("SPARK_DAEMON_MEMORY") : null; String memory = firstNonEmpty(tsMemory, config.get(SparkLauncher.DRIVER_MEMORY), System.getenv("SPARK_DRIVER_MEMORY"), System.getenv("SPARK_MEM"), DEFAULT_MEM); cmd.add("-Xmx" + memory); addOptionString(cmd, driverExtraJavaOptions); mergeEnvPathList(env, getLibPathEnvName(), config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); cmd.addAll(buildSparkSubmitArgs()); return cmd;
private void constructEnvVarArgs( Map<String, String> env, String submitArgsEnvVariable) throws IOException { mergeEnvPathList(env, getLibPathEnvName(), getEffectiveConfig().get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); StringBuilder submitArgs = new StringBuilder(); for (String arg : buildSparkSubmitArgs()) { if (submitArgs.length() > 0) { submitArgs.append(" "); } submitArgs.append(quoteForCommandString(arg)); } env.put(submitArgsEnvVariable, submitArgs.toString()); }
/** * Starts a Spark application. * * @see AbstractLauncher#startApplication(SparkAppHandle.Listener...) * @param listeners Listeners to add to the handle before the app is launched. * @return A handle for the launched application. */ @Override public SparkAppHandle startApplication(SparkAppHandle.Listener... listeners) throws IOException { if (builder.isClientMode(builder.getEffectiveConfig())) { LOG.warning("It's not recommended to run client-mode applications using InProcessLauncher."); } Method main = findSparkSubmit(); LauncherServer server = LauncherServer.getOrCreateServer(); InProcessAppHandle handle = new InProcessAppHandle(server); for (SparkAppHandle.Listener l : listeners) { handle.addListener(l); } String secret = server.registerHandle(handle); setConf(LauncherProtocol.CONF_LAUNCHER_PORT, String.valueOf(server.getPort())); setConf(LauncherProtocol.CONF_LAUNCHER_SECRET, secret); List<String> sparkArgs = builder.buildSparkSubmitArgs(); String[] argv = sparkArgs.toArray(new String[sparkArgs.size()]); String appName = CommandBuilderUtils.firstNonEmpty(builder.appName, builder.mainClass, "<unknown>"); handle.start(appName, main, argv); return handle; }
@Test(expected = IllegalArgumentException.class) public void testMissingAppResource() { new SparkSubmitCommandBuilder().buildSparkSubmitArgs(); }
private ProcessBuilder createBuilder() throws IOException { List<String> cmd = new ArrayList<>(); cmd.add(findSparkSubmit()); cmd.addAll(builder.buildSparkSubmitArgs());
private ProcessBuilder createBuilder() { List<String> cmd = new ArrayList<>(); String script = isWindows() ? "spark-submit.cmd" : "spark-submit"; cmd.add(join(File.separator, builder.getSparkHome(), "bin", script)); cmd.addAll(builder.buildSparkSubmitArgs());
/** * Set a custom properties file with Spark configuration for the application. * * @param path Path to custom properties file to use. * @return This launcher. */ public SparkLauncher setPropertiesFile(String path) { checkNotNull(path, "path"); builder.setPropertiesFile(path); return this; }
private List<String> buildSparkRCommand(Map<String, String> env) throws IOException { if (!appArgs.isEmpty() && appArgs.get(0).endsWith(".R")) { System.err.println( "Running R applications through 'sparkR' is not supported as of Spark 2.0.\n" + "Use ./bin/spark-submit <R file>"); System.exit(-1); } // When launching the SparkR shell, store the spark-submit arguments in the SPARKR_SUBMIT_ARGS // env variable. appResource = SPARKR_SHELL_RESOURCE; constructEnvVarArgs(env, "SPARKR_SUBMIT_ARGS"); // Set shell.R as R_PROFILE_USER to load the SparkR package when the shell comes up. String sparkHome = System.getenv("SPARK_HOME"); env.put("R_PROFILE_USER", join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R")); List<String> args = new ArrayList<>(); args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL), System.getenv("SPARKR_DRIVER_R"), "R")); return args; }