ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
@Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // Try to load it from bootstrap class loader first try { return extensionClassLoader.loadClass(name); } catch (ClassNotFoundException e) { if (filter.acceptResource(classNameToResourceName(name))) { return super.loadClass(name, resolve); } throw e; } }
/** * Get a {@link SystemDatasetInstantiator} that can instantiate datasets using the given classloader as the * parent classloader for datasets. Must be closed after it is no longer needed, as dataset jars may be unpacked * in order to create classloaders for custom datasets. * * The given parent classloader will be wrapped in a {@link FilterClassLoader} * to prevent CDAP dependencies from leaking through. For example, if a custom dataset has an avro dependency, * the classloader should use the avro from the custom dataset and not from cdap. * * @param parentClassLoader the parent classloader to use when instantiating datasets. If null, the system * classloader will be used * @return a dataset instantiator that can be used to instantiate datasets */ public SystemDatasetInstantiator createDatasetInstantiator(@Nullable ClassLoader parentClassLoader) { parentClassLoader = parentClassLoader == null ? Objects.firstNonNull(Thread.currentThread().getContextClassLoader(), getClass().getClassLoader()) : parentClassLoader; return datasetInstantiatorFactory.create(FilterClassLoader.create(parentClassLoader)); }
@Test public void testBootstrapResourcesVisible() throws IOException { FilterClassLoader classLoader = FilterClassLoader.create(this.getClass().getClassLoader()); Assert.assertNotNull(classLoader.getResource("java/lang/String.class")); }
/** * Get a {@link SystemDatasetInstantiator} that can instantiate datasets using the given classloader as the * parent classloader for datasets. Must be closed after it is no longer needed, as dataset jars may be unpacked * in order to create classloaders for custom datasets. * * The given parent classloader will be wrapped in a {@link FilterClassLoader} * to prevent CDAP dependencies from leaking through. For example, if a custom dataset has an avro dependency, * the classloader should use the avro from the custom dataset and not from cdap. * * @param parentClassLoader the parent classloader to use when instantiating datasets. If null, the system * classloader will be used * @return a dataset instantiator that can be used to instantiate datasets */ public SystemDatasetInstantiator createDatasetInstantiator(@Nullable ClassLoader parentClassLoader) { parentClassLoader = parentClassLoader == null ? Objects.firstNonNull(Thread.currentThread().getContextClassLoader(), getClass().getClassLoader()) : parentClassLoader; return datasetInstantiatorFactory.create(FilterClassLoader.create(parentClassLoader)); }
@Test public void testAPIVisible() throws ClassNotFoundException { FilterClassLoader classLoader = FilterClassLoader.create(this.getClass().getClassLoader()); Assert.assertSame(Application.class, classLoader.loadClass(Application.class.getName())); // Dependencies of API classes should also be visible Assert.assertSame(Logger.class, classLoader.loadClass(Logger.class.getName())); // JAX-RS classes should also be visible Assert.assertSame(PUT.class, classLoader.loadClass(PUT.class.getName())); }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
/** * Creates a program {@link ClassLoader} based on the MR job config. */ private static ClassLoader createProgramClassLoader(MapReduceContextConfig contextConfig) { // In distributed mode, the program is created by expanding the program jar. // The program jar is localized to container with the program jar name. // It's ok to expand to a temp dir in local directory, as the YARN container will be gone. Location programLocation = Locations.toLocation(new File(contextConfig.getProgramJarName())); try { File unpackDir = DirUtils.createTempDir(new File(System.getProperty("user.dir"))); LOG.info("Create ProgramClassLoader from {}, expand to {}", programLocation, unpackDir); BundleJarUtil.unJar(programLocation, unpackDir); return new ProgramClassLoader(contextConfig.getCConf(), unpackDir, FilterClassLoader.create(contextConfig.getHConf().getClassLoader())); } catch (IOException e) { LOG.error("Failed to create ProgramClassLoader", e); throw Throwables.propagate(e); } }
/** * Creates a new {@link FilterClassLoader} that filter classes based on the {@link #defaultFilter()} on the * given parent ClassLoader * * @param parentClassLoader the ClassLoader to filter from. * @return a new intance of {@link FilterClassLoader}. */ public static FilterClassLoader create(ClassLoader parentClassLoader) { return new FilterClassLoader(parentClassLoader, defaultFilter()); }
@Test public void testExtensionResourcesVisible() throws ClassNotFoundException { // isn't really a way to guarantee what classes are in the extensions directory. // so we'll just check that if the system classloader can load it, the filter classloader should be able to load it. ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader(); Object o; try { o = systemClassLoader.loadClass("com.sun.nio.zipfs.ZipInfo"); } catch (ClassNotFoundException e) { // class isn't in extensions, this test will be a no-op return; } FilterClassLoader classLoader = FilterClassLoader.create(this.getClass().getClassLoader()); Assert.assertEquals(o.getClass(), classLoader.loadClass("com.sun.nio.zipfs.ZipInfo").getClass()); }
@Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { // Try to load it from bootstrap class loader first try { return extensionClassLoader.loadClass(name); } catch (ClassNotFoundException e) { if (filter.acceptResource(classNameToResourceName(name))) { return super.loadClass(name, resolve); } throw e; } }
ScalaFilterClassLoader(ClassLoader parent) { super(new FilterClassLoader(parent, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return !resource.startsWith("org/apache/spark/") && !resource.startsWith("org/spark-project/") && !resource.startsWith("scala/") && !"scala.class".equals(resource); } @Override public boolean acceptPackage(String packageName) { return !packageName.startsWith("org/apache/spark") && !packageName.startsWith("org/spark-project/") && !packageName.startsWith("scala/"); } })); }
/** * Creates a program {@link ClassLoader} based on the MR job config. */ private static ClassLoader createProgramClassLoader(MapReduceContextConfig contextConfig) { // In distributed mode, the program is created by expanding the program jar. // The program jar is localized to container with the program jar name. // It's ok to expand to a temp dir in local directory, as the YARN container will be gone. Location programLocation = Locations.toLocation(new File(contextConfig.getProgramJarName())); try { File unpackDir = DirUtils.createTempDir(new File(System.getProperty("user.dir"))); LOG.info("Create ProgramClassLoader from {}, expand to {}", programLocation, unpackDir); BundleJarUtil.unJar(programLocation, unpackDir); return new ProgramClassLoader(contextConfig.getCConf(), unpackDir, FilterClassLoader.create(contextConfig.getHConf().getClassLoader())); } catch (IOException e) { LOG.error("Failed to create ProgramClassLoader", e); throw Throwables.propagate(e); } }
@VisibleForTesting static ClassLoader createParent() { ClassLoader baseClassLoader = AuthorizerClassLoader.class.getClassLoader(); final Set<String> authorizerResources = traceSecurityDependencies(baseClassLoader); // by default, FilterClassLoader's defaultFilter allows all hadoop classes, which makes it so that // the authorizer extension can share the same instance of UserGroupInformation. This allows kerberos credential // renewal to also renew for any extension final FilterClassLoader.Filter defaultFilter = FilterClassLoader.defaultFilter(); return new FilterClassLoader(baseClassLoader, new FilterClassLoader.Filter() { @Override public boolean acceptResource(String resource) { return defaultFilter.acceptResource(resource) || authorizerResources.contains(resource); } @Override public boolean acceptPackage(String packageName) { return true; } }); }
@Test public void testHadoopResourcesVisible() throws ClassNotFoundException { FilterClassLoader classLoader = FilterClassLoader.create(this.getClass().getClassLoader()); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { // VersionInfo will based on the context class loader to find the "common-version-info.properties" file. // If it is missing/failed to locate that, getVersion() will returns "Unknown". Assert.assertNotEquals("Unknown", VersionInfo.getVersion()); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } // Load standard Hadoop class. It should pass. The class loader of the loaded class should be the same // as the system Configuration class. Assert.assertSame(Configuration.class, classLoader.loadClass(Configuration.class.getName())); } }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); } }
/** * Creates a {@link Program} that can be executed by the given {@link ProgramRunner}. * * @param cConf the CDAP configuration * @param programRunner the {@link ProgramRunner} for executing the program. If provided and if it implements * {@link ProgramClassLoaderProvider}, then the * {@link ClassLoader} created for the {@link Program} will be determined based on it. * Otherwise, the {@link ClassLoader} will only have visibility * to cdap-api and hadoop classes. * @param programDescriptor description of the program to create * @param programJarLocation the {@link Location} of the program jar file * @param unpackedDir a directory that the program jar file was unpacked to * @return a new {@link Program} instance. */ public static Program create(CConfiguration cConf, @Nullable ProgramRunner programRunner, ProgramDescriptor programDescriptor, Location programJarLocation, File unpackedDir) { ClassLoader parentClassLoader = null; if (programRunner instanceof ProgramClassLoaderProvider) { parentClassLoader = ((ProgramClassLoaderProvider) programRunner).createProgramClassLoaderParent(); } if (parentClassLoader == null) { parentClassLoader = FilterClassLoader.create(Programs.class.getClassLoader()); } return new DefaultProgram(programDescriptor, programJarLocation, new ProgramClassLoader(cConf, unpackedDir, parentClassLoader)); }
@Override public ClassLoader createProgramClassLoaderParent() { return new FilterClassLoader(getClass().getClassLoader(), SparkResourceFilters.SPARK_PROGRAM_CLASS_LOADER_FILTER); } }