/** * Populates the list of {@link URL} that this ClassLoader uses, including all URLs used by the parent of the * given ClassLoader. This is the same as calling {@link #getClassLoaderURLs(ClassLoader, boolean, Collection)} with * {@code childFirst} set to {@code false}. * * @param classLoader the {@link ClassLoader} for searching urls * @param urls a {@link Collection} for storing the {@link URL}s * @return the same {@link Collection} passed from the parameter */ public static <T extends Collection<? super URL>> T getClassLoaderURLs(ClassLoader classLoader, T urls) { return getClassLoaderURLs(classLoader, false, urls); }
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
@Override protected ClassLoader createProgramInvocationClassLoader() { return new WeakReferenceDelegatorClassLoader(mapReduceClassLoader); }
@Override public Handler get() { // we don't instantiate the handler class via injection, to avoid giving it access to objects bound in guice, // such as SConfiguration return new InstantiatorFactory(false).get(TypeToken.of(handlerClass)).create(); } }
@Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // Try to load it from bootstrap class loader first try { return extensionClassLoader.loadClass(name); } catch (ClassNotFoundException e) { if (filter.acceptResource(classNameToResourceName(name))) { return super.loadClass(name, resolve); } throw e; } }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
/** * Creates a new instance for the following set of {@link URL}. * * @param urls the URLs from which to load classes and resources * @param parent the parent classloader for delegation */ public SparkContainerClassLoader(URL[] urls, ClassLoader parent) { super(urls, parent); this.sparkClassRewriter = new SparkClassRewriter(name -> ClassLoaders.openResource(this, name), false); }
/** * Creates a new {@link ClassLoader} that only exposes classes in packages declared by "Export-Package" * in the manifest. */ public ClassLoader getExportPackagesClassLoader() { return new PackageFilterClassLoader(this, Predicates.in(exportPackages)); } }
/** * Finds the {@link MapReduceClassLoader} from the {@link ClassLoader} inside the given {@link Configuration}. * * @throws IllegalArgumentException if no {@link MapReduceClassLoader} can be found from the {@link Configuration}. */ public static MapReduceClassLoader getFromConfiguration(Configuration configuration) { return Delegators.getDelegate(configuration.getClassLoader(), MapReduceClassLoader.class); }
@Override public boolean acceptPackage(String packageName) { return packageName.startsWith("org.slf4j") || filter.acceptPackage(packageName); } });
@Override public T next() throws IOException { return transform.apply(itor.next()); } };
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
@Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // Try to load it from bootstrap class loader first try { return extensionClassLoader.loadClass(name); } catch (ClassNotFoundException e) { if (filter.acceptResource(classNameToResourceName(name))) { return super.loadClass(name, resolve); } throw e; } }
@Override protected ClassLoader createProgramInvocationClassLoader() { return new WeakReferenceDelegatorClassLoader(mapReduceClassLoader); }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
/** * Populates the list of {@link URL} that this ClassLoader uses, including all URLs used by the parent of the * given ClassLoader. This is the same as calling {@link #getClassLoaderURLs(ClassLoader, boolean, Collection)} with * {@code childFirst} set to {@code false}. * * @param classLoader the {@link ClassLoader} for searching urls * @param urls a {@link Collection} for storing the {@link URL}s * @return the same {@link Collection} passed from the parameter */ public static <T extends Collection<? super URL>> T getClassLoaderURLs(ClassLoader classLoader, T urls) { return getClassLoaderURLs(classLoader, false, urls); }
@Override protected ClassLoader createProgramInvocationClassLoader() { if (mapReduceClassLoader == null) { // This shouldn't happen. Just to prevent bug and be able to catch it in unit-test. throw new IllegalStateException("The MapReduceClassLoader is not yet set"); } return new WeakReferenceDelegatorClassLoader(mapReduceClassLoader); }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }