ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); } }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); } }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); } }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); }
private static ClassLoader createClassFilteredClassLoader(Iterable<String> allowedClasses, ClassLoader parentClassLoader) { final Set<String> allowedResources = ImmutableSet.copyOf(Iterables.transform(allowedClasses, CLASS_TO_RESOURCE_NAME)); return new FilterClassLoader(parentClassLoader, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return allowedResources.contains(resource); } @Override public boolean acceptPackage(String packageName) { return true; } }); }
private static ClassLoader createClassFilteredClassLoader(Iterable<String> allowedClasses, ClassLoader parentClassLoader) { final Set<String> allowedResources = ImmutableSet.copyOf(Iterables.transform(allowedClasses, CLASS_TO_RESOURCE_NAME)); return new FilterClassLoader(parentClassLoader, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return allowedResources.contains(resource); } @Override public boolean acceptPackage(String packageName) { return true; } }); }
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
@Override protected ClassLoader getExtensionParentClassLoader() { return new FilterClassLoader(super.getExtensionParentClassLoader(), new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return resource.startsWith("co/cask/cdap/runtime/spi"); } @Override public boolean acceptPackage(String packageName) { return packageName.startsWith("co/cask/cdap/runtime/spi"); } }); }
/** * @return parent classloader for extensions */ private ClassLoader getExtensionParentClassLoader() { FilterClassLoader.Filter filter = getExtensionParentClassLoaderFilter(); // SLF4j resources are always coming from parent. return new FilterClassLoader(getClass().getClassLoader(), new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return resource.startsWith("org/slf4j") || filter.acceptResource(resource); } @Override public boolean acceptPackage(String packageName) { return packageName.startsWith("org.slf4j") || filter.acceptPackage(packageName); } }); } }
@VisibleForTesting static ClassLoader createParent() { ClassLoader baseClassLoader = AuthorizerClassLoader.class.getClassLoader(); final Set<String> authorizerResources = traceSecurityDependencies(baseClassLoader); // by default, FilterClassLoader's defaultFilter allows all hadoop classes, which makes it so that // the authorizer extension can share the same instance of UserGroupInformation. This allows kerberos credential // renewal to also renew for any extension final FilterClassLoader.Filter defaultFilter = FilterClassLoader.defaultFilter(); return new FilterClassLoader(baseClassLoader, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return defaultFilter.acceptResource(resource) || authorizerResources.contains(resource); } @Override public boolean acceptPackage(String packageName) { return true; } }); }
/** * @param filter A {@link FilterClassLoader.Filter} for filtering out classes from the * @param extraClasspath extra list of {@link URL} to be added to the end of the classpath for the * {@link MainClassLoader} to be created * @return a new instance from the current context classloader or the system classloader. The returned * {@link MainClassLoader} will be the defining classloader for classes in the context classloader * that the filter rejected. For classes that pass the filter, the defining classloader will be the original * context classloader. * It will return {@code null} if it is not able to create a new instance due to lack of classpath information. */ @Nullable public static MainClassLoader createFromContext(FilterClassLoader.Filter filter, URL...extraClasspath) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = ClassLoader.getSystemClassLoader(); } List<URL> classpath = new ArrayList<>(); if (classLoader instanceof URLClassLoader) { classpath.addAll(Arrays.asList(((URLClassLoader) classLoader).getURLs())); } else if (classLoader == ClassLoader.getSystemClassLoader()) { addClassPath(classpath); } else { // No able to create a new MainClassLoader return null; } classpath.addAll(Arrays.asList(extraClasspath)); ClassLoader filtered = new FilterClassLoader(classLoader, filter); ClassLoader parent = new CombineClassLoader(classLoader.getParent(), filtered); return new MainClassLoader(classpath.toArray(new URL[classpath.size()]), parent); }
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }
private static Program createProgram(CConfiguration cConf, SparkRuntimeContextConfig contextConfig) throws IOException { File programJar = new File(PROGRAM_JAR_NAME); File programDir = new File(PROGRAM_JAR_EXPANDED_NAME); ClassLoader parentClassLoader = new FilterClassLoader(SparkRuntimeContextProvider.class.getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); ClassLoader classLoader = new ProgramClassLoader(cConf, programDir, parentClassLoader); return new DefaultProgram(new ProgramDescriptor(contextConfig.getProgramId(), contextConfig.getApplicationSpecification()), Locations.toLocation(programJar), classLoader); }