/** * Parse user libs. * * @return Parsed libs search patterns. * @throws IOException If failed. */ public static Collection<SearchDirectory> parseUserLibs() throws IOException { return parseUserLibs(systemOrEnv(HADOOP_USER_LIBS, null)); }
/** * Gets Hadoop class path as a list of URLs (for in-process class loader usage). * * @return List of class path URLs. * @throws IOException If failed. */ public static List<URL> classpathForClassLoader() throws IOException { List<URL> res = new ArrayList<>(); for (SearchDirectory dir : classpathDirectories()) { for (File file : dir.files()) { try { res.add(file.toURI().toURL()); } catch (MalformedURLException ignored) { throw new IOException("Failed to convert file path to URL: " + file.getPath()); } } } return res; }
/** * @return Collection of jar URLs. * @throws IgniteCheckedException If failed. */ public static Collection<URL> hadoopUrls() throws IgniteCheckedException { Collection<URL> hadoopUrls = hadoopJars; if (hadoopUrls != null) return hadoopUrls; synchronized (HadoopClassLoader.class) { hadoopUrls = hadoopJars; if (hadoopUrls != null) return hadoopUrls; try { hadoopUrls = HadoopClasspathUtils.classpathForClassLoader(); } catch (IOException e) { throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e); } hadoopJars = hadoopUrls; return hadoopUrls; } }
String hadoopHome = systemOrEnv(PREFIX, systemOrEnv(HOME, EMPTY_STR)); String commonHome = systemOrEnv(COMMON_HOME, EMPTY_STR); String hdfsHome = systemOrEnv(HDFS_HOME, EMPTY_STR); String mapredHome = systemOrEnv(MAPRED_HOME, EMPTY_STR); if (!isEmpty(commonHome) || !isEmpty(hdfsHome) || !isEmpty(mapredHome)) { HadoopLocations res = new HadoopLocations(hadoopHome, commonHome, hdfsHome, mapredHome); "[env=" + MAPRED_HOME + ", value=" + mapredHome + ", exists=" + res.mapredExists() + "]]"); else if (!isEmpty(hadoopHome)) { if (!exists(hadoopHome)) throw new IOException("Failed to resolve Hadoop classpath because " + HOME + " environment " + "variable points to nonexistent directory: " + hadoopHome);
/** * Gets base directories to discover classpath elements in. * * @return Collection of directory and mask pairs. * @throws IOException if a mandatory classpath location is not found. */ private static Collection<SearchDirectory> classpathDirectories() throws IOException { HadoopLocations loc = locations(); Collection<SearchDirectory> res = new ArrayList<>(); // Add libraries from Hadoop distribution: res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-"))); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-"))); res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-common"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-core"))); // Add user provided libs: res.addAll(parseUserLibs()); return res; }
/** * Parse string. * * @param str String. * @return Files. * @throws IOException If failed. */ Collection<File> parse(String str) throws IOException { Collection<HadoopClasspathUtils.SearchDirectory> dirs = HadoopClasspathUtils.parseUserLibs(str); Collection<File> res = new HashSet<>(); for (HadoopClasspathUtils.SearchDirectory dir : dirs) Collections.addAll(res, dir.files()); return res; }
HadoopLocations loc = HadoopClasspathUtils.locations();
/** * Constructor. * * @param home Hadoop home. * @param common Common home. * @param hdfs HDFS home. * @param mapred Mapred home. */ public HadoopLocations(String home, String common, String hdfs, String mapred) { assert common != null && hdfs != null && mapred != null; this.home = home; this.common = common; this.hdfs = hdfs; this.mapred = mapred; commonExists = HadoopClasspathUtils.exists(common); hdfsExists = HadoopClasspathUtils.exists(hdfs); mapredExists = HadoopClasspathUtils.exists(mapred); }
/** * Gets base directories to discover classpath elements in. * * @return Collection of directory and mask pairs. * @throws IOException if a mandatory classpath location is not found. */ private static Collection<SearchDirectory> classpathDirectories() throws IOException { HadoopLocations loc = locations(); Collection<SearchDirectory> res = new ArrayList<>(); // Add libraries from Hadoop distribution: res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-"))); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-"))); res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-common"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-core"))); // Add user provided libs: res.addAll(parseUserLibs()); return res; }
String hadoopHome = systemOrEnv(PREFIX, systemOrEnv(HOME, EMPTY_STR)); String commonHome = systemOrEnv(COMMON_HOME, EMPTY_STR); String hdfsHome = systemOrEnv(HDFS_HOME, EMPTY_STR); String mapredHome = systemOrEnv(MAPRED_HOME, EMPTY_STR); if (!isEmpty(commonHome) || !isEmpty(hdfsHome) || !isEmpty(mapredHome)) { HadoopLocations res = new HadoopLocations(hadoopHome, commonHome, hdfsHome, mapredHome); "[env=" + MAPRED_HOME + ", value=" + mapredHome + ", exists=" + res.mapredExists() + "]]"); else if (!isEmpty(hadoopHome)) { if (!exists(hadoopHome)) throw new IOException("Failed to resolve Hadoop classpath because " + HOME + " environment " + "variable points to nonexistent directory: " + hadoopHome);
/** * Constructor. * * @param dir Directory. * @param filter Filter. * @param strict Whether directory must exist. * @throws IOException If failed. */ private SearchDirectory(File dir, DirectoryFilter filter, boolean strict) throws IOException { this.dir = dir; this.filter = filter; this.strict = strict; if (strict && !exists(dir.getAbsolutePath())) throw new IOException("Directory cannot be read: " + dir.getAbsolutePath()); }
/** * Parse user libs. * * @return Parsed libs search patterns. * @throws IOException If failed. */ public static Collection<SearchDirectory> parseUserLibs() throws IOException { return parseUserLibs(systemOrEnv(HADOOP_USER_LIBS, null)); }
/** * Constructor. * * @param home Hadoop home. * @param common Common home. * @param hdfs HDFS home. * @param mapred Mapred home. */ public HadoopLocations(String home, String common, String hdfs, String mapred) { assert common != null && hdfs != null && mapred != null; this.home = home; this.common = common; this.hdfs = hdfs; this.mapred = mapred; commonExists = HadoopClasspathUtils.exists(common); hdfsExists = HadoopClasspathUtils.exists(hdfs); mapredExists = HadoopClasspathUtils.exists(mapred); }
/** * Gets Hadoop class path as a list of URLs (for in-process class loader usage). * * @return List of class path URLs. * @throws IOException If failed. */ public static List<URL> classpathForClassLoader() throws IOException { List<URL> res = new ArrayList<>(); for (SearchDirectory dir : classpathDirectories()) { for (File file : dir.files()) { try { res.add(file.toURI().toURL()); } catch (MalformedURLException ignored) { throw new IOException("Failed to convert file path to URL: " + file.getPath()); } } } return res; }
/** * @return Collection of jar URLs. * @throws IgniteCheckedException If failed. */ public static Collection<URL> hadoopUrls() throws IgniteCheckedException { Collection<URL> hadoopUrls = hadoopJars; if (hadoopUrls != null) return hadoopUrls; synchronized (HadoopClassLoader.class) { hadoopUrls = hadoopJars; if (hadoopUrls != null) return hadoopUrls; try { hadoopUrls = HadoopClasspathUtils.classpathForClassLoader(); } catch (IOException e) { throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations: " + e.getMessage(), e); } hadoopJars = hadoopUrls; return hadoopUrls; } }
/** * Constructor. * * @param dir Directory. * @param filter Filter. * @param strict Whether directory must exist. * @throws IOException If failed. */ private SearchDirectory(File dir, DirectoryFilter filter, boolean strict) throws IOException { this.dir = dir; this.filter = filter; this.strict = strict; if (strict && !exists(dir.getAbsolutePath())) throw new IOException("Directory cannot be read: " + dir.getAbsolutePath()); }