/** * Create class loader with the given name. * * @param name Name. * @return Class loader. */ private HadoopClassLoader createClassLoader(String name) { return new HadoopClassLoader(rsrcMgr.classPath(), name, libNames, helper); } }
/** * @param urls URLs. * @return URLs. */ private static URL[] addHadoopUrls(URL[] urls) { Collection<URL> hadoopJars; try { hadoopJars = hadoopUrls(); } catch (IgniteCheckedException e) { throw new RuntimeException(e); } ArrayList<URL> list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length)); list.addAll(appJars); list.addAll(hadoopJars); if (!F.isEmpty(urls)) list.addAll(F.asList(urls)); return list.toArray(new URL[list.size()]); }
/** {@inheritDoc} */ @Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { try { // Always load Hadoop classes explicitly, since Hadoop can be available in App classpath. if (name.equals(CLS_SHUTDOWN_HOOK_MANAGER)) // Dirty hack to get rid of Hadoop shutdown hooks. return loadReplace(name, CLS_SHUTDOWN_HOOK_MANAGER_REPLACE); else if (name.equals(CLS_DAEMON)) // We replace this in order to be able to forcibly stop some daemon threads // that otherwise never stop (e.g. PeerCache runnables): return loadReplace(name, CLS_DAEMON_REPLACE); // For Ignite Hadoop and IGFS classes we have to check if they depend on Hadoop. if (loadByCurrentClassloader(name)) return loadClassExplicitly(name, resolve); return super.loadClass(name, resolve); } catch (NoClassDefFoundError | ClassNotFoundException e) { throw new ClassNotFoundException("Failed to load class: " + name, e); } }
/** * @param name Class name. * @param resolve Resolve class. * @return Class. * @throws ClassNotFoundException If failed. */ private Class<?> loadClassExplicitly(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { // First, check if the class has already been loaded Class c = findLoadedClass(name); if (c == null) c = findClass(name); if (resolve) resolveClass(c); return c; } }
/** * Constructor. * * @param urls Urls. * @param name Classloader name. * @param libNames Optional additional native library names to be linked from parent classloader. */ public HadoopClassLoader(URL[] urls, String name, @Nullable String[] libNames, HadoopHelper helper) { super(addHadoopUrls(urls), APP_CLS_LDR); assert !(getParent() instanceof HadoopClassLoader); this.name = name; this.helper = helper; initializeNativeLibraries(libNames); }
/** * Load a class replacing it with our own implementation. * * @param originalName Name. * @param replaceName Replacement. * @return Class. */ private Class<?> loadReplace(final String originalName, final String replaceName) { synchronized (getClassLoadingLock(originalName)) { // First, check if the class has already been loaded Class c = findLoadedClass(originalName); if (c != null) return c; byte[] bytes = bytesCache.get(originalName); if (bytes == null) { InputStream in = helper.loadClassBytes(this, replaceName); if (in == null) throw new IgniteException("Failed to replace class [originalName=" + originalName + ", replaceName=" + replaceName + ']'); bytes = helper.loadReplace(in, originalName, replaceName); bytesCache.put(originalName, bytes); } return defineClass(originalName, bytes, 0, bytes.length); } }
/** {@inheritDoc} */ @Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { if (HadoopClassLoader.loadByCurrentClassloader(name)) { try { synchronized (getClassLoadingLock(name)) { // First, check if the class has already been loaded Class c = findLoadedClass(name); if (c == null) c = findClass(name); if (resolve) resolveClass(c); return c; } } catch (NoClassDefFoundError | ClassNotFoundException e) { throw new IgniteException("Failed to load class by test class loader: " + name, e); } } return super.loadClass(name, resolve); } }
/** * @param name Class name. * @param resolve Resolve class. * @return Class. * @throws ClassNotFoundException If failed. */ private Class<?> loadClassExplicitly(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { // First, check if the class has already been loaded Class c = findLoadedClass(name); if (c == null) c = findClass(name); if (resolve) resolveClass(c); return c; } }
/** * Constructor. * * @param urls Urls. * @param name Classloader name. * @param libNames Optional additional native library names to be linked from parent classloader. */ public HadoopClassLoader(URL[] urls, String name, @Nullable String[] libNames, HadoopHelper helper) { super(addHadoopUrls(urls), APP_CLS_LDR); assert !(getParent() instanceof HadoopClassLoader); this.name = name; this.helper = helper; initializeNativeLibraries(libNames); }
/** * Load a class replacing it with our own implementation. * * @param originalName Name. * @param replaceName Replacement. * @return Class. */ private Class<?> loadReplace(final String originalName, final String replaceName) { synchronized (getClassLoadingLock(originalName)) { // First, check if the class has already been loaded Class c = findLoadedClass(originalName); if (c != null) return c; byte[] bytes = bytesCache.get(originalName); if (bytes == null) { InputStream in = helper.loadClassBytes(this, replaceName); if (in == null) throw new IgniteException("Failed to replace class [originalName=" + originalName + ", replaceName=" + replaceName + ']'); bytes = helper.loadReplace(in, originalName, replaceName); bytesCache.put(originalName, bytes); } return defineClass(originalName, bytes, 0, bytes.length); } }
/** {@inheritDoc} */ @Override protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { try { // Always load Hadoop classes explicitly, since Hadoop can be available in App classpath. if (name.equals(CLS_SHUTDOWN_HOOK_MANAGER)) // Dirty hack to get rid of Hadoop shutdown hooks. return loadReplace(name, CLS_SHUTDOWN_HOOK_MANAGER_REPLACE); else if (name.equals(CLS_DAEMON)) // We replace this in order to be able to forcibly stop some daemon threads // that otherwise never stop (e.g. PeerCache runnables): return loadReplace(name, CLS_DAEMON_REPLACE); // For Ignite Hadoop and IGFS classes we have to check if they depend on Hadoop. if (loadByCurrentClassloader(name)) return loadClassExplicitly(name, resolve); return super.loadClass(name, resolve); } catch (NoClassDefFoundError | ClassNotFoundException e) { throw new ClassNotFoundException("Failed to load class: " + name, e); } }
/** {@inheritDoc} */ @Override public HadoopClassLoader commonClassLoader() { HadoopClassLoader res = ldr; if (res == null) { synchronized (this) { res = ldr; if (res == null) { String[] libNames = null; if (ctx != null && ctx.config().getHadoopConfiguration() != null) libNames = ctx.config().getHadoopConfiguration().getNativeLibraryNames(); res = new HadoopClassLoader(null, "hadoop-common", libNames, this); ldr = res; } } } return res; }
HadoopClassLoader.hadoopUrls();
/** * Checks Snappy codec usage. * * @throws Exception On error. */ @Ignore("https://issues.apache.org/jira/browse/IGNITE-9920") @Test public void testSnappy() throws Throwable { // Run Snappy test in default class loader: checkSnappy(); // Run the same in several more class loaders simulating jobs and tasks: for (int i = 0; i < 2; i++) { ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, null, new HadoopHelperImpl()); Class<?> cls = (Class)Class.forName(HadoopSnappyTest.class.getName(), true, hadoopClsLdr); assertEquals(hadoopClsLdr, cls.getClassLoader()); U.invoke(cls, null, "checkSnappy"); } }
/** * @param urls URLs. * @return URLs. */ private static URL[] addHadoopUrls(URL[] urls) { Collection<URL> hadoopJars; try { hadoopJars = hadoopUrls(); } catch (IgniteCheckedException e) { throw new RuntimeException(e); } ArrayList<URL> list = new ArrayList<>(hadoopJars.size() + appJars.size() + (urls == null ? 0 : urls.length)); list.addAll(appJars); list.addAll(hadoopJars); if (!F.isEmpty(urls)) list.addAll(F.asList(urls)); return list.toArray(new URL[list.size()]); }