@Before public void configureLauncher() { launcher = new SparkLauncher().setSparkHome(System.getProperty("spark.test.home")); }
Process spark = new SparkLauncher() .setSparkHome("C:\\spark-1.4.1-bin-hadoop2.6") .setAppResource("C:\\spark-1.4.1-bin-hadoop2.6\\lib\\spark-examples-1.4.1-hadoop2.6.0.jar") .setMainClass("org.apache.spark.examples.SparkPi").setMaster("yarn-cluster").launch(); InputStreamReaderRunnable inputStreamReaderRunnable = new InputStreamReaderRunnable(spark.getInputStream(), "input"); Thread inputThread = new Thread(inputStreamReaderRunnable, "LogStreamReader input"); inputThread.start(); InputStreamReaderRunnable errorStreamReaderRunnable = new InputStreamReaderRunnable(spark.getErrorStream(), "error"); Thread errorThread = new Thread(errorStreamReaderRunnable, "LogStreamReader error"); errorThread.start(); System.out.println("Waiting for finish..."); int exitCode = spark.waitFor(); System.out.println("Finished! Exit code:" + exitCode);
SparkLauncher spark = new SparkLauncher() .setVerbose(true) .setJavaHome(javaHome)
SparkLauncher spark = new SparkLauncher() .setVerbose(true) .setJavaHome(javaHome)
import org.apache.spark.launcher.SparkAppHandle; import org.apache.spark.launcher.SparkLauncher; ... public void startApacheSparkApplication(){ SparkAppHandle handler = new SparkLauncher() .setAppResource("pathToYourSparkApp.jar") .setMainClass("your.package.main.Class") .setMaster("local") .setConf(...) .startApplication(); // <-- and start spark job app } ...
SparkAppHandle handle = new SparkLauncher() .setSparkHome(SPARK_HOME) .setJavaHome(JAVA_HOME) .setAppResource(SPARK_JOB_JAR_PATH) .setMainClass(SPARK_JOB_MAIN_CLASS) .addAppArgs("arg1", "arg2") .setMaster("yarn-cluster") .setConf("spark.dynamicAllocation.enabled", "true") .startApplication();
import org.apache.spark.launcher.SparkLauncher object Launcher extends App { val spark = new SparkLauncher() .setSparkHome("...") .setAppResource("..path to your jar...") .setMainClass("..your app...") .setMaster("yarn") .launch(); spark.waitFor(); }
Process spark = new SparkLauncher() .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, "/path/to/jar/file.jar") .setConf(SparkLauncher.EXECUTOR_EXTRA_CLASSPATH, "/path/to/jar/file.jar")
import org.apache.spark.launcher.SparkAppHandle; import org.apache.spark.launcher.SparkLauncher; public class MyLauncher { public static void main(String[] args) throws Exception { SparkAppHandle handle = new SparkLauncher() .setAppResource("/my/app.jar") .setMainClass("my.spark.app.Main") .setMaster("local") .setConf(SparkLauncher.DRIVER_MEMORY, "2g") .startApplication(); // Use handle API to monitor / control application. } }
Process spark = new SparkLauncher() .addJar("/path/to/local/jar/file1.jar") .addJar("/path/to/local/jar/file2.jar")
Process spark = new SparkLauncher() .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, "/path/to/jar/*") .setConf(SparkLauncher.EXECUTOR_EXTRA_CLASSPATH, "/path/to/jar/*")
Process sparkLauncherProcess = new SparkLauncher() .setSparkHome(SPARK_HOME) .setJavaHome(JAVA_HOME) .setAppResource(SPARK_JOB_JAR_PATH) .setMainClass(SPARK_JOB_MAIN_CLASS) .addAppArgs("arg1", "arg2") .setMaster("yarn-cluster") .setConf("spark.dynamicAllocation.enabled", "true") .launch(); sparkLauncherProcess.waitFor();
import org.apache.spark.launcher.SparkLauncher; public class MyLauncher { public static void main(String[] args) throws Exception { Process spark = new SparkLauncher() .setAppResource("/my/app.jar") .setMainClass("my.spark.app.Main") .setMaster("local") .setConf(SparkLauncher.DRIVER_MEMORY, "2g") .launch(); spark.waitFor(); } }
SparkAppHandle handle = new SparkLauncher() .setAppName("test word count") .setAppResource("file://c:/temp/my.jar") .setMainClass("my.spark.app.Main") .setMaster("spark://master:7077") .startApplication(); handle.getState(); // immediately returns UNKNOWN Thread.sleep(1000); // wait a little bit... handle.getState(); // the state may have changed to CONNECTED or others
import org.apache.spark.launcher.SparkLauncher; public class MyLauncher { public static void main(String[] args) throws Exception { Process spark = new SparkLauncher() .setAppResource("/my/app.jar") .setMainClass("my.spark.app.Main") .setMaster("local") .setConf(SparkLauncher.DRIVER_MEMORY, "2g") .launch(); spark.waitFor(); } }
/** * TODO: Need to fix a bunch of hardcoded stuff here eg: history server, spark distro */ public static SparkLauncher initLauncher(String propertiesFile) throws URISyntaxException { String currentJar = new File(SparkUtil.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()) .getAbsolutePath(); SparkLauncher sparkLauncher = new SparkLauncher().setAppResource(currentJar).setMainClass(SparkMain.class.getName()) .setPropertiesFile(propertiesFile); File libDirectory = new File(new File(currentJar).getParent(), "lib"); for (String library : libDirectory.list()) { sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath()); } return sparkLauncher; }
@Test(expected = IllegalArgumentException.class) public void testRedirectErrorTwiceFails() throws Exception { File err = Files.createTempFile("stderr", "txt").toFile(); err.deleteOnExit(); new SparkLauncher() .redirectError() .redirectError(err) .launch() .waitFor(); }
@Test(expected = IllegalArgumentException.class) public void testBadLogRedirect() throws Exception { File out = Files.createTempFile("stdout", "txt").toFile(); out.deleteOnExit(); new SparkLauncher() .redirectError() .redirectOutput(out) .redirectToLog("foo") .launch() .waitFor(); }
@Test public void testRedirectLastWins() throws Exception { SparkLauncher launcher = new SparkLauncher(); launcher.redirectError(ProcessBuilder.Redirect.PIPE) .redirectError(ProcessBuilder.Redirect.INHERIT); assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.INHERIT); launcher.redirectOutput(ProcessBuilder.Redirect.PIPE) .redirectOutput(ProcessBuilder.Redirect.INHERIT); assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.INHERIT); }
@Test public void testRedirectsSimple() throws Exception { SparkLauncher launcher = new SparkLauncher(); launcher.redirectError(ProcessBuilder.Redirect.PIPE); assertNotNull(launcher.errorStream); assertEquals(launcher.errorStream.type(), ProcessBuilder.Redirect.Type.PIPE); launcher.redirectOutput(ProcessBuilder.Redirect.PIPE); assertNotNull(launcher.outputStream); assertEquals(launcher.outputStream.type(), ProcessBuilder.Redirect.Type.PIPE); }