@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=childproc") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), TF); new OutputRedirector(app.getErrorStream(), TF); assertEquals(0, app.waitFor()); }
String name = findCliOption(arg, opts); if (name != null) { if (value == null) { value = args.get(idx); if (!handle(name, value)) { break; name = findCliOption(arg, switches); if (name != null) { if (!handle(name, null)) { break; if (!handleUnknown(arg)) { break; idx++; handleExtraArgs(args.subList(idx, args.size()));
@Test public void testEqualSeparatedOption() { List<String> args = Arrays.asList(parser.MASTER + "=" + parser.MASTER); parser.parse(args); verify(parser).handle(eq(parser.MASTER), eq(parser.MASTER)); verify(parser).handleExtraArgs(eq(Collections.emptyList())); }
@Test public void testExtraOptions() { List<String> args = Arrays.asList(parser.MASTER, parser.MASTER, "foo", "bar"); parser.parse(args); verify(parser).handle(eq(parser.MASTER), eq(parser.MASTER)); verify(parser).handleUnknown(eq("foo")); verify(parser).handleExtraArgs(eq(Arrays.asList("bar"))); }
/** * Adds a no-value argument to the Spark invocation. If the argument is known, this method * validates whether the argument is indeed a no-value argument, and throws an exception * otherwise. * <p> * Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * * @since 1.5.0 * @param arg Argument to add. * @return This launcher. */ public SparkLauncher addSparkArg(String arg) { SparkSubmitOptionParser validator = new ArgumentValidator(false); validator.parse(Arrays.asList(arg)); builder.sparkArgs.add(arg); return this; }
String name = findCliOption(arg, opts); if (name != null) { if (value == null) { value = args.get(idx); if (!handle(name, value)) { break; name = findCliOption(arg, switches); if (name != null) { if (!handle(name, null)) { break; if (!handleUnknown(arg)) { break; idx++; handleExtraArgs(args.subList(idx, args.size()));
@Test public void testEqualSeparatedOption() { List<String> args = Arrays.asList(parser.MASTER + "=" + parser.MASTER); parser.parse(args); verify(parser).handle(eq(parser.MASTER), eq(parser.MASTER)); verify(parser).handleExtraArgs(eq(Collections.emptyList())); }
@Test public void testExtraOptions() { List<String> args = Arrays.asList(parser.MASTER, parser.MASTER, "foo", "bar"); parser.parse(args); verify(parser).handle(eq(parser.MASTER), eq(parser.MASTER)); verify(parser).handleUnknown(eq("foo")); verify(parser).handleExtraArgs(eq(Arrays.asList("bar"))); }
/** * Adds a no-value argument to the Spark invocation. If the argument is known, this method * validates whether the argument is indeed a no-value argument, and throws an exception * otherwise. * <p> * Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * * @since 1.5.0 * @param arg Argument to add. * @return This launcher. */ public SparkLauncher addSparkArg(String arg) { SparkSubmitOptionParser validator = new ArgumentValidator(false); validator.parse(Arrays.asList(arg)); builder.sparkArgs.add(arg); return this; }
String name = findCliOption(arg, opts); if (name != null) { if (value == null) { value = args.get(idx); if (!handle(name, value)) { break; name = findCliOption(arg, switches); if (name != null) { if (!handle(name, null)) { break; if (!handleUnknown(arg)) { break; idx++; handleExtraArgs(args.subList(idx, args.size()));
@Test public void testAllOptions() { int count = 0; for (String[] optNames : parser.opts) { for (String optName : optNames) { String value = optName + "-value"; parser.parse(Arrays.asList(optName, value)); count++; verify(parser).handle(eq(optNames[0]), eq(value)); verify(parser, times(count)).handle(anyString(), anyString()); verify(parser, times(count)).handleExtraArgs(eq(Collections.emptyList())); } } for (String[] switchNames : parser.switches) { int switchCount = 0; for (String name : switchNames) { parser.parse(Arrays.asList(name)); count++; switchCount++; verify(parser, times(switchCount)).handle(eq(switchNames[0]), same(null)); verify(parser, times(count)).handle(anyString(), any(String.class)); verify(parser, times(count)).handleExtraArgs(eq(Collections.emptyList())); } } }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
/** * Adds a no-value argument to the Spark invocation. If the argument is known, this method * validates whether the argument is indeed a no-value argument, and throws an exception * otherwise. * <p> * Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * * @since 1.5.0 * @param arg Argument to add. * @return This launcher. */ public T addSparkArg(String arg) { SparkSubmitOptionParser validator = new ArgumentValidator(false); validator.parse(Arrays.asList(arg)); builder.userArgs.add(arg); return self(); }
String name = findCliOption(arg, opts); if (name != null) { if (value == null) { value = args.get(idx); if (!handle(name, value)) { break; name = findCliOption(arg, switches); if (name != null) { if (!handle(name, null)) { break; if (!handleUnknown(arg)) { break; idx++; handleExtraArgs(args.subList(idx, args.size()));
@Test public void testAllOptions() { int count = 0; for (String[] optNames : parser.opts) { for (String optName : optNames) { String value = optName + "-value"; parser.parse(Arrays.asList(optName, value)); count++; verify(parser).handle(eq(optNames[0]), eq(value)); verify(parser, times(count)).handle(anyString(), anyString()); verify(parser, times(count)).handleExtraArgs(eq(Collections.emptyList())); } } for (String[] switchNames : parser.switches) { int switchCount = 0; for (String name : switchNames) { parser.parse(Arrays.asList(name)); count++; switchCount++; verify(parser, times(switchCount)).handle(eq(switchNames[0]), same(null)); verify(parser, times(count)).handle(anyString(), any(String.class)); verify(parser, times(count)).handleExtraArgs(eq(Collections.emptyList())); } } }
@Test public void testChildProcLauncher() throws Exception { // This test is failed on Windows due to the failure of initiating executors // by the path length limitation. See SPARK-18718. assumeTrue(!Utils.isWindows()); SparkSubmitOptionParser opts = new SparkSubmitOptionParser(); Map<String, String> env = new HashMap<>(); env.put("SPARK_PRINT_LAUNCH_COMMAND", "1"); launcher .setMaster("local") .setAppResource(SparkLauncher.NO_RESOURCE) .addSparkArg(opts.CONF, String.format("%s=-Dfoo=ShouldBeOverriddenBelow", SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS)) .setConf(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Dfoo=bar -Dtest.appender=console") .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, System.getProperty("java.class.path")) .addSparkArg(opts.CLASS, "ShouldBeOverriddenBelow") .setMainClass(SparkLauncherTestApp.class.getName()) .redirectError() .addAppArgs("proc"); final Process app = launcher.launch(); new OutputRedirector(app.getInputStream(), getClass().getName() + ".child", TF); assertEquals(0, app.waitFor()); }
/** * Adds a no-value argument to the Spark invocation. If the argument is known, this method * validates whether the argument is indeed a no-value argument, and throws an exception * otherwise. * <p> * Use this method with caution. It is possible to create an invalid Spark command by passing * unknown arguments to this method, since those are allowed for forward compatibility. * * @since 1.5.0 * @param arg Argument to add. * @return This launcher. */ public T addSparkArg(String arg) { SparkSubmitOptionParser validator = new ArgumentValidator(false); validator.parse(Arrays.asList(arg)); builder.userArgs.add(arg); return self(); }
@Test public void testSparkArgumentHandling() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser();
@Test(expected=IllegalArgumentException.class) public void testMissingArg() { parser.parse(Arrays.asList(parser.MASTER)); }
@Test public void testSparkArgumentHandling() throws Exception { SparkSubmitOptionParser opts = new SparkSubmitOptionParser();