private List<String> createCommand(final Configuration launcherJobConf, final Context context) { final List<String> vargs = new ArrayList<String>(6); String launcherLogLevel = launcherJobConf.get(LauncherAM.OOZIE_LAUNCHER_LOG_LEVEL_PROPERTY); if (Strings.isNullOrEmpty(launcherLogLevel)) { launcherLogLevel = "INFO"; } vargs.add(Apps.crossPlatformify(ApplicationConstants.Environment.JAVA_HOME.toString()) + "/bin/java"); vargs.add("-Dlog4j.configuration=container-log4j.properties"); vargs.add("-Dlog4j.debug=true"); vargs.add("-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR + "=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR); vargs.add("-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_SIZE + "=" + 1024 * 1024); vargs.add("-Dhadoop.root.logger=" + launcherLogLevel + ",CLA"); vargs.add("-Dhadoop.root.logfile=" + TaskLog.LogName.SYSLOG); vargs.add("-Dsubmitter.user=" + context.getWorkflow().getUser()); return vargs; }
private List<String> createCommand(final Configuration launcherJobConf, final Context context) { final List<String> vargs = new ArrayList<String>(6); String launcherLogLevel = launcherJobConf.get(LauncherAM.OOZIE_LAUNCHER_LOG_LEVEL_PROPERTY); if (Strings.isNullOrEmpty(launcherLogLevel)) { launcherLogLevel = "INFO"; } vargs.add(Apps.crossPlatformify(ApplicationConstants.Environment.JAVA_HOME.toString()) + "/bin/java"); vargs.add("-Dlog4j.configuration=container-log4j.properties"); vargs.add("-Dlog4j.debug=true"); vargs.add("-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR + "=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR); vargs.add("-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_SIZE + "=" + 1024 * 1024); vargs.add("-Dhadoop.root.logger=" + launcherLogLevel + ",CLA"); vargs.add("-Dhadoop.root.logfile=" + TaskLog.LogName.SYSLOG); vargs.add("-Dsubmitter.user=" + context.getWorkflow().getUser()); return vargs; }
private void doChgrpOperation(Context context, XConfiguration fsConf, Path nameNodePath, Element commandElement) throws ActionExecutorException { Path path = getPath(commandElement, FS_TAG_PATH); String groupTag = commandElement.getAttributeValue("group"); String dirFilesTag = commandElement.getAttributeValue(FS_TAG_DIRFILES); boolean dirFiles = (dirFilesTag == null) || Boolean.parseBoolean(dirFilesTag); boolean recursive = commandElement.getChild(FS_TAG_RECURSIVE, commandElement.getNamespace()) != null; chgrp(context, fsConf, nameNodePath, path, context.getWorkflow().getUser(), groupTag, dirFiles, recursive); }
protected FileSystem getAppFileSystem(WorkflowJob workflow) throws HadoopAccessorException, IOException, URISyntaxException { URI uri = new URI(workflow.getAppPath()); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration fsConf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(workflow.getUser(), uri, fsConf); }
protected FileSystem getAppFileSystem(WorkflowJob workflow) throws HadoopAccessorException, IOException, URISyntaxException { URI uri = new URI(workflow.getAppPath()); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration fsConf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(workflow.getUser(), uri, fsConf); }
private static FileSystem getFileSystem(URI uri) throws HadoopAccessorException { WorkflowJob workflow = DagELFunctions.getWorkflow(); String user = workflow.getUser(); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration conf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(user, uri, conf); }
private static FileSystem getFileSystem(URI uri) throws HadoopAccessorException { WorkflowJob workflow = DagELFunctions.getWorkflow(); String user = workflow.getUser(); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration conf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(user, uri, conf); }
/** * Create job client object * * @param context * @param jobConf * @return JobClient * @throws HadoopAccessorException */ protected JobClient createJobClient(Context context, Configuration jobConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); return Services.get().get(HadoopAccessorService.class).createJobClient(user, jobConf); }
public FileSystem getAppFileSystem() throws HadoopAccessorException, IOException, URISyntaxException { WorkflowJob workflow = getWorkflow(); URI uri = new URI(getWorkflow().getAppPath()); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration fsConf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(workflow.getUser(), uri, fsConf); }
/** * Create yarn client object * * @param context * @param jobConf * @return YarnClient * @throws HadoopAccessorException */ protected YarnClient createYarnClient(Context context, Configuration jobConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); return Services.get().get(HadoopAccessorService.class).createYarnClient(user, jobConf); }
/** * Return true if partitions exists or false if not. * * @param uri hcatalog partition uri. * @return <code>true</code> if the uri exists, <code>false</code> if it does not. * @throws Exception */ public static boolean hcat_exists(String uri) throws Exception { URI hcatURI = new URI(uri); URIHandlerService uriService = Services.get().get(URIHandlerService.class); URIHandler handler = uriService.getURIHandler(hcatURI); WorkflowJob workflow = DagELFunctions.getWorkflow(); String user = workflow.getUser(); return handler.exists(hcatURI, EMPTY_CONF, user); }
public FileSystem getAppFileSystem() throws HadoopAccessorException, IOException, URISyntaxException { WorkflowJob workflow = getWorkflow(); URI uri = new URI(getWorkflow().getAppPath()); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration fsConf = has.createConfiguration(uri.getAuthority()); return has.createFileSystem(workflow.getUser(), uri, fsConf); }
/** * Create job client object * * @param context * @param jobConf * @return JobClient * @throws HadoopAccessorException */ protected JobClient createJobClient(Context context, Configuration jobConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); return Services.get().get(HadoopAccessorService.class).createJobClient(user, jobConf); }
/** * Create yarn client object * * @param context * @param jobConf * @return YarnClient * @throws HadoopAccessorException */ protected YarnClient createYarnClient(Context context, Configuration jobConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); return Services.get().get(HadoopAccessorService.class).createYarnClient(user, jobConf); }
/** * Return true if partitions exists or false if not. * * @param uri hcatalog partition uri. * @return <code>true</code> if the uri exists, <code>false</code> if it does not. * @throws Exception */ public static boolean hcat_exists(String uri) throws Exception { URI hcatURI = new URI(uri); URIHandlerService uriService = Services.get().get(URIHandlerService.class); URIHandler handler = uriService.getURIHandler(hcatURI); WorkflowJob workflow = DagELFunctions.getWorkflow(); String user = workflow.getUser(); return handler.exists(hcatURI, EMPTY_CONF, user); }
/** * Create an MRClientProtocol to the JHS * Copied over from ClientCache in Hadoop. * @return the protocol that can be used to get a token with * @throws IOException */ private MRClientProtocol instantiateHistoryProxy(final Configuration configuration, final ActionExecutor.Context context) throws IOException { final String serviceAddr = configuration.get(JHAdminConfig.MR_HISTORY_ADDRESS); if (StringUtils.isEmpty(serviceAddr)) { return null; } LOG.debug("Connecting to JHS at: " + serviceAddr); final YarnRPC rpc = YarnRPC.create(configuration); LOG.debug("Connected to JHS at: " + serviceAddr); UserGroupInformation currentUser = Services.get().get(UserGroupInformationService.class) .getProxyUser(context.getWorkflow().getUser()); return currentUser.doAs(new PrivilegedAction<MRClientProtocol>() { @Override public MRClientProtocol run() { return (MRClientProtocol) rpc.getProxy(HSClientProtocol.class, NetUtils.createSocketAddr(serviceAddr), configuration); } }); } }
/** * @param path * @param context * @param fsConf * @return FileSystem * @throws HadoopAccessorException */ private FileSystem getFileSystemFor(Path path, Context context, XConfiguration fsConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration conf = has.createConfiguration(path.toUri().getAuthority()); XConfiguration.copy(context.getProtoActionConf(), conf); if (fsConf != null) { XConfiguration.copy(fsConf, conf); } return has.createFileSystem(user, path.toUri(), conf); }
/** * Add an HDFS_DELEGATION_TOKEN to the {@link Credentials} provided. * This is also important to ensure that log aggregation works correctly from the NM * * @param credentials the credentials object which is updated * @param config launcher AM configuration * @param props properties for getting credential token or certificate * @param context workflow context * @throws Exception thrown if failed */ @Override public void updateCredentials(Credentials credentials, Configuration config, CredentialsProperties props, ActionExecutor.Context context) throws Exception { final String jobNameNodes[] = config.getStrings(MRJobConfig.JOB_NAMENODES); if (jobNameNodes != null) { final Path[] paths = new Path[jobNameNodes.length]; for (int i = 0; i != jobNameNodes.length; ++i) { paths[i] = new Path(jobNameNodes[i]); } final UserGroupInformation ugi = Services.get().get(UserGroupInformationService.class) .getProxyUser(context.getWorkflow().getUser()); obtainTokensForNamenodes(credentials, config, ugi, paths); } else { obtainTokenForAppFileSystemNameNode(credentials, config, context); } }
/** * @param path * @param context * @param fsConf * @return FileSystem * @throws HadoopAccessorException */ private FileSystem getFileSystemFor(Path path, Context context, XConfiguration fsConf) throws HadoopAccessorException { String user = context.getWorkflow().getUser(); HadoopAccessorService has = Services.get().get(HadoopAccessorService.class); Configuration conf = has.createConfiguration(path.toUri().getAuthority()); XConfiguration.copy(context.getProtoActionConf(), conf); if (fsConf != null) { XConfiguration.copy(fsConf, conf); } return has.createFileSystem(user, path.toUri(), conf); }
private void obtainToken(Credentials credentials, final Configuration jobConf, Context context) throws IOException, InterruptedException { String user = context.getWorkflow().getUser(); UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser()); User u = User.create(ugi); // A direct doAs is required here vs. User#obtainAuthTokenForJob(...) // See OOZIE-2419 for more XLog.getLog(getClass()).debug("Getting Hbase token for user {0}", user); Token<AuthenticationTokenIdentifier> token = u.runAs( new PrivilegedExceptionAction<Token<AuthenticationTokenIdentifier>>() { public Token<AuthenticationTokenIdentifier> run() throws Exception { Token<AuthenticationTokenIdentifier> newToken = null; try (Connection connection = ConnectionFactory.createConnection(jobConf)) { newToken = TokenUtil.obtainToken(connection); } return newToken; } } ); XLog.getLog(getClass()).debug("Got token, adding it to credentials."); credentials.addToken(CredentialsProviderFactory.getUniqueAlias(token), token); }