/** * Gets Hadoop class path as a list of URLs (for in-process class loader usage). * * @return List of class path URLs. * @throws IOException If failed. */ public static List<URL> classpathForClassLoader() throws IOException { List<URL> res = new ArrayList<>(); for (SearchDirectory dir : classpathDirectories()) { for (File file : dir.files()) { try { res.add(file.toURI().toURL()); } catch (MalformedURLException ignored) { throw new IOException("Failed to convert file path to URL: " + file.getPath()); } } } return res; }
assert dir != null; res.add(new SearchDirectory(dir, AcceptAllDirectoryFilter.INSTANCE, false)); continue; res.add(new SearchDirectory(dir, new ExactDirectoryFilter(file.getName()), false));
assert dir != null; res.add(new SearchDirectory(dir, AcceptAllDirectoryFilter.INSTANCE, false)); continue; res.add(new SearchDirectory(dir, new ExactDirectoryFilter(file.getName()), false));
/** * Gets base directories to discover classpath elements in. * * @return Collection of directory and mask pairs. * @throws IOException if a mandatory classpath location is not found. */ private static Collection<SearchDirectory> classpathDirectories() throws IOException { HadoopLocations loc = locations(); Collection<SearchDirectory> res = new ArrayList<>(); // Add libraries from Hadoop distribution: res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-"))); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-"))); res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-common"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-core"))); // Add user provided libs: res.addAll(parseUserLibs()); return res; }
/** * Gets Hadoop class path as a list of URLs (for in-process class loader usage). * * @return List of class path URLs. * @throws IOException If failed. */ public static List<URL> classpathForClassLoader() throws IOException { List<URL> res = new ArrayList<>(); for (SearchDirectory dir : classpathDirectories()) { for (File file : dir.files()) { try { res.add(file.toURI().toURL()); } catch (MalformedURLException ignored) { throw new IOException("Failed to convert file path to URL: " + file.getPath()); } } } return res; }
/** * Gets base directories to discover classpath elements in. * * @return Collection of directory and mask pairs. * @throws IOException if a mandatory classpath location is not found. */ private static Collection<SearchDirectory> classpathDirectories() throws IOException { HadoopLocations loc = locations(); Collection<SearchDirectory> res = new ArrayList<>(); // Add libraries from Hadoop distribution: res.add(new SearchDirectory(new File(loc.common(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.mapred(), "lib"), AcceptAllDirectoryFilter.INSTANCE)); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-common-"))); res.add(new SearchDirectory(new File(loc.common()), new PrefixDirectoryFilter("hadoop-auth-"))); res.add(new SearchDirectory(new File(loc.hdfs()), new PrefixDirectoryFilter("hadoop-hdfs-"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-common"))); res.add(new SearchDirectory(new File(loc.mapred()), new PrefixDirectoryFilter("hadoop-mapreduce-client-core"))); // Add user provided libs: res.addAll(parseUserLibs()); return res; }
/** * Parse string. * * @param str String. * @return Files. * @throws IOException If failed. */ Collection<File> parse(String str) throws IOException { Collection<HadoopClasspathUtils.SearchDirectory> dirs = HadoopClasspathUtils.parseUserLibs(str); Collection<File> res = new HashSet<>(); for (HadoopClasspathUtils.SearchDirectory dir : dirs) Collections.addAll(res, dir.files()); return res; }