@Override public TwillPreparer withEnv(String runnableName, Map<String, String> env) { getDelegate().withEnv(runnableName, env); return this; }
@Override public TwillPreparer withEnv(String runnableName, Map<String, String> env) { getDelegate().withEnv(runnableName, env); return this; }
@Override public TwillPreparer withEnv(Map<String, String> env) { getDelegate().withEnv(env); return this; }
@Override public TwillPreparer withEnv(Map<String, String> env) { getDelegate().withEnv(env); return this; }
/** * Prepare the specs of the twill application for the Explore twill runnable. * Add jars needed by the Explore module in the classpath of the containers, and * add conf files (hive_site.xml, etc) as resources available for the Explore twill * runnable. */ private TwillPreparer prepareExploreContainer(TwillPreparer preparer) throws IOException { // Add all the conf files needed by hive as resources. They will be available in the explore container classpath Set<String> addedFiles = Sets.newHashSet(); for (File file : ExploreUtils.getExploreConfFiles()) { String name = file.getName(); if (name.equals("logback.xml") || !name.endsWith(".xml")) { continue; } if (addedFiles.add(name)) { LOG.debug("Adding config file: {}", file.getAbsolutePath()); preparer = preparer.withResources(file.toURI()); } else { LOG.warn("Ignoring duplicate config file: {}", file); } } // Setup SPARK_HOME environment variable as well if spark is configured String sparkHome = System.getenv(Constants.SPARK_HOME); if (sparkHome != null) { preparer.withEnv(Constants.Service.EXPLORE_HTTP_USER_SERVICE, Collections.singletonMap(Constants.SPARK_HOME, sparkHome)); } return preparer; }
@Test public void testEnv() throws Exception { TwillRunner runner = getTwillRunner(); TwillController controller = runner.prepare(new EchoApp()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationArguments("echo") .withArguments("echo1", "echo1") .withArguments("echo2", "echo2") .withEnv(ImmutableMap.of("GREETING", "Hello")) .withEnv("echo2", ImmutableMap.of("GREETING", "Hello2")) .start(); // Service echo1 should returns "Hello" as greeting, echo2 should returns "Hello2" Map<String, String> runnableGreetings = ImmutableMap.of("echo1", "Hello", "echo2", "Hello2"); for (Map.Entry<String, String> entry : runnableGreetings.entrySet()) { Discoverable discoverable = getDiscoverable(controller.discoverService(entry.getKey()), 60, TimeUnit.SECONDS); try ( Socket socket = new Socket(discoverable.getSocketAddress().getAddress(), discoverable.getSocketAddress().getPort()) ) { PrintWriter writer = new PrintWriter(new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8), true); LineReader reader = new LineReader(new InputStreamReader(socket.getInputStream(), Charsets.UTF_8)); writer.println("GREETING"); Assert.assertEquals(entry.getValue(), reader.readLine()); } } controller.terminate().get(); }
preparer .withResources(logbackFile.toUri()) .withEnv(Collections.singletonMap("CDAP_LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR));
.withBundlerClassAcceptor(new HadoopClassExcluder()) .setLogLevels(ImmutableMap.of(Logger.ROOT_LOGGER_NAME, yarnContainerLogLevel())) .withEnv(envVars) .withMaxRetries(YARN_RUNNABLE_NAME, MAX_APP_RESTART_RETRIES) .withArguments(YARN_RUNNABLE_NAME, discoveryArgs.toArray());