@Override public TwillController launch(TwillApplication twillApplication) { TwillPreparer twillPreparer = twillRunner .prepare(twillApplication); if (options.isDebug()) { LOG.info("Starting {} with debugging enabled.", program.getId()); twillPreparer.enableDebugging(); } TwillController twillController = twillPreparer .withDependencies(new HBaseTableUtilFactory().get().getClass()) .addSecureStore(YarnSecureStore.create(HBaseTokenUtils.obtainToken(hConf, new Credentials()))) .withApplicationArguments( String.format("--%s", RunnableOptions.JAR), copiedProgram.getJarLocation().getName(), String.format("--%s", RunnableOptions.RUNTIME_ARGS), runtimeArgs ).start(); return addCleanupListener(twillController, hConfFile, cConfFile, copiedProgram, programDir); } });
final TwillController controller = twillRunner .prepare(new PeriodicNotificationTwillApp(configFile)) .addLogHandler(new PrinterLogHandler(new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8), true))) .withApplicationClassPaths(applicationClassPaths) .start();
/** * Prepare the specs of the twill application for the Explore twill runnable. * Add jars needed by the Explore module in the classpath of the containers, and * add conf files (hive_site.xml, etc) as resources available for the Explore twill * runnable. */ private TwillPreparer prepareExploreContainer(TwillPreparer preparer) throws IOException { // Add all the conf files needed by hive as resources. They will be available in the explore container classpath Set<String> addedFiles = Sets.newHashSet(); for (File file : ExploreUtils.getExploreConfFiles()) { String name = file.getName(); if (name.equals("logback.xml") || !name.endsWith(".xml")) { continue; } if (addedFiles.add(name)) { LOG.debug("Adding config file: {}", file.getAbsolutePath()); preparer = preparer.withResources(file.toURI()); } else { LOG.warn("Ignoring duplicate config file: {}", file); } } // Setup SPARK_HOME environment variable as well if spark is configured String sparkHome = System.getenv(Constants.SPARK_HOME); if (sparkHome != null) { preparer.withEnv(Constants.Service.EXPLORE_HTTP_USER_SERVICE, Collections.singletonMap(Constants.SPARK_HOME, sparkHome)); } return preparer; }
/** * Adds a {@link LogHandler} to the {@link TwillPreparer} based on the configuration. */ private void addLogHandler(TwillPreparer twillPreparer, CConfiguration cConf) { String confLevel = cConf.get(Constants.COLLECT_APP_CONTAINER_LOG_LEVEL).toUpperCase(); if ("OFF".equals(confLevel)) { twillPreparer.withConfiguration(Collections.singletonMap(Configs.Keys.LOG_COLLECTION_ENABLED, "false")); return; } LogEntry.Level logLevel = LogEntry.Level.ERROR; try { logLevel = "ALL".equals(confLevel) ? LogEntry.Level.TRACE : LogEntry.Level.valueOf(confLevel.toUpperCase()); } catch (Exception e) { LOG.warn("Invalid application container log level {}. Defaulting to ERROR.", confLevel); } twillPreparer.addLogHandler(new LoggerLogHandler(LOG, logLevel)); }
twillPreparer.withResources(configResources); twillPreparer.setLogLevels(transformLogLevels(SystemArguments.getLogLevels(userArgs))); twillPreparer.withConfiguration(twillConfigs); RunnableDefinition runnableDefinition = entry.getValue(); if (runnableDefinition.getMaxRetries() != null) { twillPreparer.withMaxRetries(runnable, runnableDefinition.getMaxRetries()); twillPreparer.setLogLevels(runnable, transformLogLevels(runnableDefinition.getLogLevels())); twillPreparer.withConfiguration(runnable, runnableDefinition.getTwillRunnableConfigs()); twillPreparer.enableDebugging(); if (schedulerQueueName != null && !schedulerQueueName.isEmpty()) { LOG.info("Setting scheduler queue for app {} as {}", program.getId(), schedulerQueueName); twillPreparer.setSchedulerQueue(schedulerQueueName); twillPreparer.addJVMOptions("-Dlogback.configurationFile=" + LOGBACK_FILE_NAME); twillPreparer.withEnv(Collections.singletonMap("CDAP_LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR)); twillPreparer.withDependencies(extraDependencies); twillPreparer.withClassPaths(additionalClassPaths); twillPreparer.withClassPaths(launchConfig.getExtraClasspath()); twillPreparer.withEnv(launchConfig.getExtraEnv());
@Test public void testEnv() throws Exception { TwillRunner runner = getTwillRunner(); TwillController controller = runner.prepare(new EchoApp()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationArguments("echo") .withArguments("echo1", "echo1") .withArguments("echo2", "echo2") .withEnv(ImmutableMap.of("GREETING", "Hello")) .withEnv("echo2", ImmutableMap.of("GREETING", "Hello2")) .start(); // Service echo1 should returns "Hello" as greeting, echo2 should returns "Hello2" Map<String, String> runnableGreetings = ImmutableMap.of("echo1", "Hello", "echo2", "Hello2"); for (Map.Entry<String, String> entry : runnableGreetings.entrySet()) { Discoverable discoverable = getDiscoverable(controller.discoverService(entry.getKey()), 60, TimeUnit.SECONDS); try ( Socket socket = new Socket(discoverable.getSocketAddress().getAddress(), discoverable.getSocketAddress().getPort()) ) { PrintWriter writer = new PrintWriter(new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8), true); LineReader reader = new LineReader(new InputStreamReader(socket.getInputStream(), Charsets.UTF_8)); writer.println("GREETING"); Assert.assertEquals(entry.getValue(), reader.readLine()); } } controller.terminate().get(); }
preparer.withConfiguration(twillConfigs); .withResources(logbackFile.toUri()) .withEnv(Collections.singletonMap("CDAP_LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR)); if (queueName != null) { LOG.info("Setting scheduler queue to {} for master services", queueName); preparer.setSchedulerQueue(queueName); preparer.withDependencies(injector.getInstance(HBaseTableUtil.class).getClass()); preparer.addSecureStore(YarnSecureStore.create(secureStoreRenewer.createCredentials())); YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)); preparer.withApplicationClassPaths(yarnAppClassPath).withBundlerClassAcceptor(new HadoopClassExcluder()); preparer = preparer.withClassPaths(Iterables.concat(yarnAppClassPath, extraClassPath)); } else { preparer = preparer.withClassPaths(extraClassPath); preparer.setClassLoader(MasterServiceMainClassLoader.class.getName()); TwillController controller = preparer.start(cConf.getLong(Constants.AppFabric.PROGRAM_MAX_START_SECONDS), TimeUnit.SECONDS);
YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH); final TwillPreparer preparer = twillRunner.prepare(dacDaemonApp) .addLogHandler(new YarnTwillLogHandler()) .withApplicationClassPaths(yarnClasspath) .withBundlerClassAcceptor(new HadoopClassExcluder()) .setLogLevels(ImmutableMap.of(Logger.ROOT_LOGGER_NAME, yarnContainerLogLevel())) .withEnv(envVars) .withMaxRetries(YARN_RUNNABLE_NAME, MAX_APP_RESTART_RETRIES) .withArguments(YARN_RUNNABLE_NAME, discoveryArgs.toArray()); preparer.withClassPaths(classpathJar); preparer.addJVMOptions(prepareCommandOptions(yarnConfiguration, propertyList)); preparer.setSchedulerQueue(queue); preparer.enableDebugging(true, YARN_RUNNABLE_NAME);
/** * Test to verify exception is thrown in case a non-existent runnable is specified in a placement policy. */ @Test(expected = IllegalArgumentException.class) public void testNonExistentRunnable() throws InterruptedException, ExecutionException, TimeoutException { TwillRunner runner = getTwillRunner(); TwillController controller = runner.prepare(new FaultyApplication()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .start(); controller.terminate().get(120, TimeUnit.SECONDS); }
.addJVMOptions(" -verbose:gc -Xloggc:gc.log -XX:+PrintGCDetails") .withApplicationArguments("local") .withArguments("LocalFileSocketServer", "local2") .withResources(footerFile.toURI()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .start();
Iterables.addAll(applicationClassPaths, Splitter.on(",").split(yarnClasspath)); final TwillController controller = twillRunner.prepare(new HelloWorldRunnable()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationClassPaths(applicationClassPaths) .withBundlerClassAcceptor(new HadoopClassExcluder()) .start();
@Test public void testDebugPortOneRunnable() throws Exception { YarnTwillRunnerService runner = getTwillRunner(); runner.start(); TwillController controller = runner.prepare(new DummyApplication()) .enableDebugging("r1") .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out))) .start(); final CountDownLatch running = new CountDownLatch(1); controller.onRunning(new Runnable() { @Override public void run() { running.countDown(); } }, Threads.SAME_THREAD_EXECUTOR); Assert.assertTrue(running.await(120, TimeUnit.SECONDS)); Assert.assertTrue(waitForDebugPort(controller, "r1", 30)); controller.terminate().get(120, TimeUnit.SECONDS); // Sleep a bit before exiting. TimeUnit.SECONDS.sleep(2); }
@Test public void testCustomClassLoader() throws Exception { TwillController controller = getTwillRunner().prepare(new CustomClassLoaderRunnable()) .setClassLoader(CustomClassLoader.class.getName()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .setJVMOptions("-Dservice.port=54321") .setJVMOptions(CustomClassLoaderRunnable.class.getSimpleName(), "-Dservice.name=custom") .start(); Assert.assertTrue(waitForSize(controller.discoverService("custom"), 1, 120)); controller.terminate().get(); } }
String jarPath = "file:" + f.getCanonicalPath(); log.trace("Adding library jar (" + f.getName() + ") to Fluo application."); preparer.withResources(new URI(jarPath)); String jarPath = "file:" + f.getCanonicalPath(); log.debug("Adding application jar (" + f.getName() + ") to Fluo application."); preparer.withResources(new URI(jarPath)); TwillController controller = preparer.start();
@Test public void testServiceDiscovery() throws InterruptedException, ExecutionException, TimeoutException { TwillRunner twillRunner = getTwillRunner(); TwillController controller = twillRunner .prepare(new ServiceApplication()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .withApplicationArguments("echo") .start(); ServiceDiscovered discovered = controller.discoverService("discovered"); Assert.assertTrue(waitForSize(discovered, 1, 120)); controller.terminate().get(); }
private void maxRetriesRun(final int instances) throws TimeoutException, ExecutionException { TwillRunner runner = getTwillRunner(); final int maxRetries = 3; final AtomicInteger retriesSeen = new AtomicInteger(0); ResourceSpecification resource = ResourceSpecification.Builder.with().setVirtualCores(1) .setMemory(512, ResourceSpecification.SizeUnit.MEGA).setInstances(instances).build(); TwillController controller = runner.prepare(new FailingServer(), resource) .withMaxRetries(FailingServer.class.getSimpleName(), maxRetries) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .addLogHandler(new LogHandler() { @Override public void onLog(LogEntry logEntry) { if (logEntry.getMessage().contains("retries for instance")) { retriesSeen.incrementAndGet(); } } }) .start(); controller.awaitTerminated(2, TimeUnit.MINUTES); Assert.assertEquals(maxRetries * instances, retriesSeen.get()); }
.withConfiguration(Collections.singletonMap(Configs.Keys.LOG_COLLECTION_ENABLED, "false")) .addLogHandler(new LogHandler() { @Override public void onLog(LogEntry logEntry) { .start();
.withArguments("BundledJarRunnable", arguments.toArray()) .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out, true))) .start();
preparer.setLogLevel(LogEntry.Level.DEBUG); preparer.setLogLevels(defaultLogArguments); preparer.setLogLevels(LogLevelTestRunnable.class.getSimpleName(), defaultLogArguments); .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out))) .start();
@Override public TwillController start(long timeout, TimeUnit timeoutUnit) { return getDelegate().start(timeout, timeoutUnit); } }