@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=childproc") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), TF); new OutputRedirector(app.getErrorStream(), TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
void setChildProc(Process childProc, String loggerName) { this.childProc = childProc; this.redirector = new OutputRedirector(childProc.getInputStream(), loggerName, SparkLauncher.REDIRECTOR_FACTORY); }
void setChildProc(Process childProc, String loggerName) { this.childProc = childProc; this.redirector = new OutputRedirector(childProc.getInputStream(), loggerName, SparkLauncher.REDIRECTOR_FACTORY); }
void setChildProc(Process childProc, String loggerName, InputStream logStream) { this.childProc = childProc; if (logStream != null) { this.redirector = new OutputRedirector(logStream, loggerName, SparkLauncher.REDIRECTOR_FACTORY, this); } else { // If there is no log redirection, spawn a thread that will wait for the child process // to finish. SparkLauncher.REDIRECTOR_FACTORY.newThread(this::monitorChild).start(); } }
void setChildProc(Process childProc, String loggerName, InputStream logStream) { this.childProc = childProc; if (logStream != null) { this.redirector = new OutputRedirector(logStream, loggerName, SparkLauncher.REDIRECTOR_FACTORY, this); } else { // If there is no log redirection, spawn a thread that will wait for the child process // to finish. SparkLauncher.REDIRECTOR_FACTORY.newThread(this::monitorChild).start(); } }
/** * Launches a sub-process that will start the configured Spark application. * <p> * The {@link #startApplication(SparkAppHandle.Listener...)} method is preferred when launching * Spark, since it provides better control of the child application. * * @return A process handle for the Spark app. */ public Process launch() throws IOException { Process childProc = createBuilder().start(); if (redirectToLog) { String loggerName = builder.getEffectiveConfig().get(CHILD_PROCESS_LOGGER_NAME); new OutputRedirector(childProc.getInputStream(), loggerName, REDIRECTOR_FACTORY); } return childProc; }
/** * Launches a sub-process that will start the configured Spark application. * <p> * The {@link #startApplication(SparkAppHandle.Listener...)} method is preferred when launching * Spark, since it provides better control of the child application. * * @return A process handle for the Spark app. */ public Process launch() throws IOException { Process childProc = createBuilder().start(); if (redirectToLog) { String loggerName = builder.getEffectiveConfig().get(CHILD_PROCESS_LOGGER_NAME); new OutputRedirector(childProc.getInputStream(), loggerName, REDIRECTOR_FACTORY); } return childProc; }
/** * Launches a sub-process that will start the configured Spark application. * <p> * The {@link #startApplication(SparkAppHandle.Listener...)} method is preferred when launching * Spark, since it provides better control of the child application. * * @return A process handle for the Spark app. */ public Process launch() throws IOException { ProcessBuilder pb = createBuilder(); boolean outputToLog = outputStream == null; boolean errorToLog = !redirectErrorStream && errorStream == null; String loggerName = getLoggerName(); if (loggerName != null && outputToLog && errorToLog) { pb.redirectErrorStream(true); } Process childProc = pb.start(); if (loggerName != null) { InputStream logStream = outputToLog ? childProc.getInputStream() : childProc.getErrorStream(); new OutputRedirector(logStream, loggerName, REDIRECTOR_FACTORY); } return childProc; }
/** * Launches a sub-process that will start the configured Spark application. * <p> * The {@link #startApplication(SparkAppHandle.Listener...)} method is preferred when launching * Spark, since it provides better control of the child application. * * @return A process handle for the Spark app. */ public Process launch() throws IOException { ProcessBuilder pb = createBuilder(); boolean outputToLog = outputStream == null; boolean errorToLog = !redirectErrorStream && errorStream == null; String loggerName = getLoggerName(); if (loggerName != null && outputToLog && errorToLog) { pb.redirectErrorStream(true); } Process childProc = pb.start(); if (loggerName != null) { InputStream logStream = outputToLog ? childProc.getInputStream() : childProc.getErrorStream(); new OutputRedirector(logStream, loggerName, REDIRECTOR_FACTORY); } return childProc; }