@Override public T start(Map<String, String> arguments) { applicationManager.startProgram(programId, arguments); // this cast is fine as long as the derived classes extend AbstractProgramManager with the // template (<T>) declared as its own class return (T) this; }
/** * By default after each test finished, it will stop all apps started during the test. * Sub-classes can override this method to provide different behavior. */ @After public void afterTest() throws Exception { for (ApplicationManager manager : applicationManagers) { manager.stopAll(); } }
@Override public void stop() { applicationManager.stopProgram(programId); }
ServiceManager serviceManager = appManager.getServiceManager( ClusterNameTestApp.ClusterNameServiceHandler.class.getSimpleName()).start(); Assert.assertEquals(clusterName, callServiceGet(serviceManager.getServiceURL(10, TimeUnit.SECONDS), "clusterName")); WorkerManager workerManager = appManager.getWorkerManager( ClusterNameTestApp.ClusterNameWorker.class.getSimpleName()).start(); key.set("worker.cluster.name"); args.putAll(RuntimeArguments.addScope(Scope.DATASET, ClusterNameTestApp.OUTPUT_FILE_SET, outputArgs)); MapReduceManager mrManager = appManager.getMapReduceManager( ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName()).start(args); key.set("mr.client.cluster.name"); SparkManager sparkManager = appManager.getSparkManager( ClusterNameTestApp.ClusterNameSpark.class.getSimpleName()).start(); key.set("spark.cluster.name"); args = RuntimeArguments.addScope(Scope.MAPREDUCE, ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName(), args); WorkflowManager workflowManager = appManager.getWorkflowManager( ClusterNameTestApp.ClusterNameWorkflow.class.getSimpleName()).start(args);
@Test public void testAdminSpark() throws Exception { testAdminBatchProgram(appManager.getSparkManager(AdminApp.SPARK_NAME)); }
dummyAppManager.startProgram(Id.Service.fromEntityId(serviceId)); ServiceManager greetingService = dummyAppManager.getServiceManager(serviceId.getProgram()); greetingService.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS); dummyAppManager.stopProgram(Id.Service.fromEntityId(serviceId)); greetingService.waitForRun(ProgramRunStatus.KILLED, 10, TimeUnit.SECONDS); dummyAppManager.startProgram(Id.Service.fromEntityId(serviceId)); Assert.fail("Bob should not be able to start the service because he does not have execute privileges on it."); } catch (RuntimeException expected) { dummyAppManager.getInfo(); Assert.fail("Bob should not be able to read the app info with out privileges"); } catch (Exception expected) { dummyAppManager.delete();
@Test public void testSparkProgramStatusSchedule() throws Exception { ApplicationManager appManager = deploy(TestSparkApp.class); ScheduleId scheduleId = new ScheduleId(NamespaceId.DEFAULT.getNamespace(), TestSparkApp.class.getSimpleName(), "schedule"); appManager.enableSchedule(scheduleId); WorkflowManager workflowManager = appManager.getWorkflowManager(TestSparkApp.TriggeredWorkflow.class.getSimpleName()); int numRuns = workflowManager.getHistory(ProgramRunStatus.COMPLETED).size(); // Start the upstream program SparkManager sparkManager = appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName()); sparkManager.start(); // Wait for the downstream to complete workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES); // Run again with the kryo serializer sparkManager.start(Collections.singletonMap("spark.serializer", "org.apache.spark.serializer.KryoSerializer")); // Wait for the downstream to complete again workflowManager.waitForRuns(ProgramRunStatus.COMPLETED, numRuns + 2, 5, TimeUnit.MINUTES); }
@Category(SlowTests.class) @Test public void testMapReduceTaskMetricsDisable() throws Exception { addDatasetInstance("keyValueTable", "table1"); addDatasetInstance("keyValueTable", "table2"); DataSetManager<KeyValueTable> tableManager = getDataset("table1"); KeyValueTable inputTable = tableManager.get(); inputTable.write("hello", "world"); tableManager.flush(); ApplicationManager appManager = deployApplication(DatasetWithMRApp.class); Map<String, String> argsForMR = ImmutableMap.of(DatasetWithMRApp.INPUT_KEY, "table1", DatasetWithMRApp.OUTPUT_KEY, "table2", "task.*." + SystemArguments.METRICS_ENABLED, "false"); MapReduceManager mrManager = appManager.getMapReduceManager(DatasetWithMRApp.MAPREDUCE_PROGRAM).start(argsForMR); mrManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES); appManager.stopAll(); testTaskMetric(mrManager.getHistory().get(0).getPid(), false); }
@Category(SlowTests.class) @Test public void testCustomActionDatasetAccess() throws Exception { addDatasetInstance("keyValueTable", DatasetWithCustomActionApp.CUSTOM_TABLE); addDatasetInstance("fileSet", DatasetWithCustomActionApp.CUSTOM_FILESET); ApplicationManager appManager = deployApplication(DatasetWithCustomActionApp.class); ServiceManager serviceManager = appManager.getServiceManager(DatasetWithCustomActionApp.CUSTOM_SERVICE).start(); serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS); WorkflowManager workflowManager = appManager.getWorkflowManager(DatasetWithCustomActionApp.CUSTOM_WORKFLOW).start(); workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES); appManager.stopAll(); DataSetManager<KeyValueTable> outTableManager = getDataset(DatasetWithCustomActionApp.CUSTOM_TABLE); KeyValueTable outputTable = outTableManager.get(); Assert.assertEquals("world", Bytes.toString(outputTable.read("hello"))); Assert.assertEquals("service", Bytes.toString(outputTable.read("hi"))); Assert.assertEquals("another.world", Bytes.toString(outputTable.read("another.hello"))); DataSetManager<FileSet> outFileSetManager = getDataset(DatasetWithCustomActionApp.CUSTOM_FILESET); FileSet fs = outFileSetManager.get(); try (InputStream in = fs.getLocation("test").getInputStream()) { Assert.assertEquals(42, in.read()); } }
@Category(SlowTests.class) @Test public void testAppWithWorker() throws Exception { ApplicationManager applicationManager = deployApplication(testSpace, AppWithWorker.class); LOG.info("Deployed."); WorkerManager manager = applicationManager.getWorkerManager(AppWithWorker.WORKER).start(); // Wait for initialize and run states Tasks.waitFor(true, new Callable<Boolean>() { @Override public Boolean call() throws Exception { DataSetManager<KeyValueTable> dataSetManager = getDataset(testSpace.dataset(AppWithWorker.DATASET)); KeyValueTable table = dataSetManager.get(); return AppWithWorker.INITIALIZE.equals(Bytes.toString(table.read(AppWithWorker.INITIALIZE))) && AppWithWorker.RUN.equals(Bytes.toString(table.read(AppWithWorker.RUN))); } }, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS); manager.stop(); applicationManager.stopAll(); // Wait for stop state Tasks.waitFor(true, new Callable<Boolean>() { @Override public Boolean call() throws Exception { DataSetManager<KeyValueTable> dataSetManager = getDataset(testSpace.dataset(AppWithWorker.DATASET)); KeyValueTable table = dataSetManager.get(); return AppWithWorker.STOP.equals(Bytes.toString(table.read(AppWithWorker.STOP))); } }, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS); }
private String executeWorkflow(ApplicationManager applicationManager, Map<String, String> additionalParams, int expectedComplete) throws Exception { WorkflowManager wfManager = applicationManager.getWorkflowManager(WorkflowAppWithLocalDatasets.WORKFLOW_NAME); Map<String, String> runtimeArgs = new HashMap<>(); File waitFile = new File(TMP_FOLDER.newFolder(), "/wait.file"); Map<String, String> workflowMetricsContext = new HashMap<>(); workflowMetricsContext.put(Constants.Metrics.Tag.NAMESPACE, testSpace.getNamespace()); workflowMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName()); workflowMetricsContext.put(Constants.Metrics.Tag.WORKFLOW, WorkflowAppWithLocalDatasets.WORKFLOW_NAME); workflowMetricsContext.put(Constants.Metrics.Tag.RUN_ID, runId); sparkMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName()); sparkMetricsContext.put(Constants.Metrics.Tag.SPARK, "JavaSparkCSVToSpaceConverter"); sparkMetricsContext.put(Constants.Metrics.Tag.RUN_ID, appMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName()); mrMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName()); mrMetricsContext.put(Constants.Metrics.Tag.MAPREDUCE, "WordCount"); mrMetricsContext.put(Constants.Metrics.Tag.RUN_ID,
appManager.update(new AppRequest(new ArtifactSummary(DummyApp.class.getSimpleName(), "1.0-SNAPSHOT"))); appManager.update(new AppRequest(new ArtifactSummary(DummyApp.class.getSimpleName(), "1.0-SNAPSHOT"))); Assert.fail("App update should have failed because Bob does not have admin privileges on the app."); } catch (UnauthorizedException expected) { appManager.delete(); } catch (UnauthorizedException expected) { appManager.delete();
@Test public void test() throws Exception { ApplicationManager applicationManager = deployApplication(TestMapReduceServiceIntegrationApp.class); ServiceManager serviceManager = applicationManager.getServiceManager(TestMapReduceServiceIntegrationApp.SERVICE_NAME).start(); serviceManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS); DataSetManager<MyKeyValueTableDefinition.KeyValueTable> inDataSet = getDataset(TestMapReduceServiceIntegrationApp.INPUT_DATASET); inDataSet.get().write("key1", "Two words"); inDataSet.get().write("key2", "Plus three words"); inDataSet.flush(); MapReduceManager mrManager = applicationManager.getMapReduceManager(TestMapReduceServiceIntegrationApp.MR_NAME).start(); mrManager.waitForRun(ProgramRunStatus.COMPLETED, 180, TimeUnit.SECONDS); DataSetManager<MyKeyValueTableDefinition.KeyValueTable> outDataSet = getDataset(TestMapReduceServiceIntegrationApp.OUTPUT_DATASET); MyKeyValueTableDefinition.KeyValueTable results = outDataSet.get(); String total = results.get(TestMapReduceServiceIntegrationApp.SQUARED_TOTAL_WORDS_COUNT); Assert.assertEquals(25, Integer.parseInt(total)); } }
@Override public ProgramManager apply(ApplicationManager input) { return input.getMapReduceManager(AppWithPartitionConsumers.WordCountMapReduce.NAME).start(); } }, true);
@Override public Map<String, String> getRuntimeArgs() throws Exception { return applicationManager.getRuntimeArgs(programId); } }
@Override public List<RunRecord> getHistory(ProgramRunStatus status) { return applicationManager.getHistory(programId, status); }
appManager.getServiceManager(AppWithCustomTx.SERVICE) .start(txTimeoutArguments(100000)); } catch (IllegalArgumentException e) { ServiceManager serviceManager = appManager.getServiceManager(AppWithCustomTx.SERVICE) .start(txTimeoutArguments(txDefaulTimeoutService)); WorkerManager notxWorkerManager = appManager.getWorkerManager(AppWithCustomTx.WORKER_NOTX) .start(txTimeoutArguments(txDefaulTimeoutWorker)); WorkerManager txWorkerManager = appManager.getWorkerManager(AppWithCustomTx.WORKER_TX) .start(txTimeoutArguments(txDefaulTimeoutWorker)); WorkflowManager txWFManager = appManager.getWorkflowManager(AppWithCustomTx.WORKFLOW_TX) .start(txTimeoutArguments(txDefaulTimeoutWorkflow)); WorkflowManager notxWFManager = appManager.getWorkflowManager(AppWithCustomTx.WORKFLOW_NOTX) .start(txTimeoutArguments(txDefaulTimeoutWorkflow, txDefaulTimeoutAction, "action", AppWithCustomTx.ACTION_NOTX)); MapReduceManager txMRManager = appManager.getMapReduceManager(AppWithCustomTx.MAPREDUCE_TX) .start(txTimeoutArguments(txDefaulTimeoutMapReduce)); MapReduceManager notxMRManager = appManager.getMapReduceManager(AppWithCustomTx.MAPREDUCE_NOTX) .start(txTimeoutArguments(txDefaulTimeoutMapReduce)); SparkManager txSparkManager = appManager.getSparkManager(AppWithCustomTx.SPARK_TX) .start(txTimeoutArguments(txDefaulTimeoutSpark)); SparkManager notxSparkManager = appManager.getSparkManager(AppWithCustomTx.SPARK_NOTX) .start(txTimeoutArguments(txDefaulTimeoutSpark)); appManager.stopAll();
@Test public void testSpark2Service() throws Exception { ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, Spark2TestApp.class); SparkManager manager = applicationManager.getSparkManager(ScalaSparkServiceProgram.class.getSimpleName()).start(); URL url = manager.getServiceURL(5, TimeUnit.MINUTES); Assert.assertNotNull(url); // GET request to sum n numbers. URL sumURL = url.toURI().resolve("sum?n=" + Joiner.on("&n=").join(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)).toURL(); HttpURLConnection urlConn = (HttpURLConnection) sumURL.openConnection(); Assert.assertEquals(HttpURLConnection.HTTP_OK, urlConn.getResponseCode()); try (InputStream is = urlConn.getInputStream()) { Assert.assertEquals(55, Integer.parseInt(new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8))); } }
@Category(SlowTests.class) @Test public void testMapperDatasetAccess() throws Exception { addDatasetInstance("keyValueTable", "table1"); addDatasetInstance("keyValueTable", "table2"); DataSetManager<KeyValueTable> tableManager = getDataset("table1"); KeyValueTable inputTable = tableManager.get(); inputTable.write("hello", "world"); tableManager.flush(); ApplicationManager appManager = deployApplication(DatasetWithMRApp.class); Map<String, String> argsForMR = ImmutableMap.of(DatasetWithMRApp.INPUT_KEY, "table1", DatasetWithMRApp.OUTPUT_KEY, "table2", "task.*." + SystemArguments.METRICS_CONTEXT_TASK_INCLUDED, "false"); MapReduceManager mrManager = appManager.getMapReduceManager(DatasetWithMRApp.MAPREDUCE_PROGRAM).start(argsForMR); mrManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES); appManager.stopAll(); DataSetManager<KeyValueTable> outTableManager = getDataset("table2"); verifyMapperJobOutput(DatasetWithMRApp.class, outTableManager); // test that the metrics emitted by MR task is collected testTaskMetric(mrManager.getHistory().get(0).getPid(), true); // test that the metrics is not emitted with instance tag(task-level) testTaskTagLevelExists(DatasetWithMRApp.class.getSimpleName(), DatasetWithMRApp.MAPREDUCE_PROGRAM, mrManager.getHistory().get(0).getPid(), "table1", false); }
@Test public void testAdminMapReduce() throws Exception { testAdminBatchProgram(appManager.getMapReduceManager(AdminApp.MAPREDUCE_NAME)); }