@VisibleForTesting void startYarnClient() { this.yarnClient.start(); }
public YarnRMClient(YarnConfiguration conf) { this.conf = conf; yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); }
/** * Kills all jobs tagged with the given tag that have been started after the * given timestamp. */ @Override public void killJobs(String tag, long timestamp) { try { LOG.info("Looking for jobs to kill..."); Set<ApplicationId> childJobs = getYarnChildJobs(tag, timestamp); if (childJobs.isEmpty()) { LOG.info("No jobs found from"); return; } else { LOG.info(String.format("Found MR jobs count: %d", childJobs.size())); LOG.info("Killing all found jobs"); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); for (ApplicationId app: childJobs) { LOG.info(String.format("Killing job: %s ...", app)); yarnClient.killApplication(app); LOG.info(String.format("Job %s killed", app)); } } } catch (YarnException ye) { throw new RuntimeException("Exception occurred while killing child job(s)", ye); } catch (IOException ioe) { throw new RuntimeException("Exception occurred while killing child job(s)", ioe); } }
private AbstractYarnClusterDescriptor getClusterDescriptor( Configuration configuration, YarnConfiguration yarnConfiguration, String configurationDirectory) { final YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(yarnConfiguration); yarnClient.start(); return new YarnClusterDescriptor( configuration, yarnConfiguration, configurationDirectory, yarnClient, false); } }
public static void killChildYarnJobs(Configuration conf, String tag) { try { if (tag == null) { return; } LOG.info("Killing yarn jobs using query tag:" + tag); Set<ApplicationId> childYarnJobs = getChildYarnJobs(conf, tag); if (!childYarnJobs.isEmpty()) { YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); for (ApplicationId app : childYarnJobs) { yarnClient.killApplication(app); } } } catch (IOException | YarnException ye) { LOG.warn("Exception occurred while killing child job({})", ye); } }
public boolean isApplicationAccepted(HiveConf conf, String applicationId) { if (applicationId == null) { return false; } YarnClient yarnClient = null; try { LOG.info("Trying to find " + applicationId); ApplicationId appId = getApplicationIDFromString(applicationId); yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); ApplicationReport appReport = yarnClient.getApplicationReport(appId); return appReport != null && appReport.getYarnApplicationState() == YarnApplicationState.ACCEPTED; } catch (Exception ex) { LOG.error("Failed getting application status for: " + applicationId + ": " + ex, ex); return false; } finally { if (yarnClient != null) { try { yarnClient.stop(); } catch (Exception ex) { LOG.error("Failed to stop yarn client: " + ex, ex); } } } } }
public static boolean isApplicationAccepted(HiveConf conf, String applicationId) { if (applicationId == null) { return false; } YarnClient yarnClient = null; try { ApplicationId appId = getApplicationIDFromString(applicationId); yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); ApplicationReport appReport = yarnClient.getApplicationReport(appId); return appReport != null && appReport.getYarnApplicationState() == YarnApplicationState.ACCEPTED; } catch (Exception ex) { LOG.error("Failed getting application status for: " + applicationId + ": " + ex, ex); return false; } finally { if (yarnClient != null) { try { yarnClient.stop(); } catch (Exception ex) { LOG.error("Failed to stop yarn client: " + ex, ex); } } } }
jstormClientContext.yarnClient.start();
@BeforeClass public static void setupClass() { yarnConfiguration = new YarnConfiguration(); yarnClient = YarnClient.createYarnClient(); yarnClient.init(yarnConfiguration); yarnClient.start(); }
yarnClient.start(); LOG.info("Requesting a new application from cluster with " + yarnClient.getYarnClusterMetrics().getNumNodeManagers() + " NodeManagers"); newAPP = yarnClient.createApplication();
@Before public void checkClusterEmpty() { if (yarnClient == null) { yarnClient = YarnClient.createYarnClient(); yarnClient.init(getYarnConfiguration()); yarnClient.start(); } flinkConfiguration = new org.apache.flink.configuration.Configuration(globalConfiguration); }
YarnClient yc = YarnClient.createYarnClient(); yc.init(YARN_CONFIGURATION); yc.start(); List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)); Assert.assertEquals(1, apps.size()); // Only one running
@Override public void start(CallbackHandler resourceCallback, org.apache.hadoop.yarn.client.api.async.NMClientAsync.CallbackHandler nodeCallback ) { conf = new YarnConfiguration(); resourceMgr = AMRMClientAsync.createAMRMClientAsync(pollPeriodMs, resourceCallback); resourceMgr.init(conf); resourceMgr.start(); // Create the asynchronous node manager client nodeMgr = NMClientAsync.createNMClientAsync(nodeCallback); nodeMgr.init(conf); nodeMgr.start(); client = YarnClient.createYarnClient(); client.init(conf); client.start(); String appIdStr = System.getenv(DrillOnYarnConfig.APP_ID_ENV_VAR); if (appIdStr != null) { appId = ConverterUtils.toApplicationId(appIdStr); try { appReport = client.getApplicationReport(appId); } catch (YarnException | IOException e) { LOG.error( "Failed to get YARN applicaiton report for App ID: " + appIdStr, e); } } }
YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start();
this.yarnClient.start();
/** * Tests that the cluster retrieval of a finished YARN application fails. */ @Test(expected = ClusterRetrieveException.class) public void testClusterClientRetrievalOfFinishedYarnApplication() throws Exception { final ApplicationId applicationId = ApplicationId.newInstance(System.currentTimeMillis(), 42); final ApplicationReport applicationReport = createApplicationReport( applicationId, YarnApplicationState.FINISHED, FinalApplicationStatus.SUCCEEDED); final YarnClient yarnClient = new TestingYarnClient(Collections.singletonMap(applicationId, applicationReport)); final YarnConfiguration yarnConfiguration = new YarnConfiguration(); yarnClient.init(yarnConfiguration); yarnClient.start(); final TestingAbstractYarnClusterDescriptor clusterDescriptor = new TestingAbstractYarnClusterDescriptor( new Configuration(), yarnConfiguration, temporaryFolder.newFolder().getAbsolutePath(), yarnClient, false); try { clusterDescriptor.retrieve(applicationId); } finally { clusterDescriptor.close(); } }
closableYarnClient.start();
ClusterInfo(String name, YarnClusterConfiguration conf) { this.name = name; this.client = YarnClient.createYarnClient(); client.init(conf.conf()); client.start(); this.conf = conf; }
YarnConfiguration conf = cluster.getYarnConfiguration(); client.init(conf); client.start(); UUID jobUUID = UUID.randomUUID(); try (InstanceManager manager = new InstanceManager(
@Test public void testCreateAthenaXCluster() throws Exception { ScheduledExecutorService executor = Executors.newScheduledThreadPool(1); Configuration flinkConf = new Configuration(); flinkConf.setString(JobManagerOptions.ADDRESS, "localhost"); try (MiniAthenaXCluster cluster = new MiniAthenaXCluster(JobDeployerITest.class.getSimpleName())) { cluster.start(); YarnConfiguration conf = cluster.getYarnConfiguration(); YarnClusterConfiguration clusterConf = cluster.getYarnClusterConf(); final ApplicationId appId; try (YarnClient client = YarnClient.createYarnClient()) { client.init(conf); client.start(); JobDeployer deployer = new JobDeployer(clusterConf, client, executor, flinkConf); appId = deployer.createApplication(); InstanceMetadata md = new InstanceMetadata(UUID.randomUUID(), UUID.randomUUID()); JobDefinitionResource resource = new JobDefinitionResource() .queue(null).vCores(1L).executionSlots(1L).memory(2048L); JobConf jobConf = new JobConf(appId, "test", Collections.emptyList(), resource, md); deployer.start(JobITestUtil.trivialJobGraph(), jobConf); YarnApplicationState state = MiniAthenaXCluster.pollFinishedApplicationState(client, appId); assertEquals(FINISHED, state); } } } }