/** * Loads a {@link Class} from the given {@link ClassLoader} with the context ClassLoader * set to the given ClassLoader and reset it after loading is done. */ @SuppressWarnings("unchecked") private static <T> Class<T> loadClass(ClassLoader classLoader, String className) throws ClassNotFoundException { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { return (Class<T>) classLoader.loadClass(className); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Loads a {@link Class} from the given {@link ClassLoader} with the context ClassLoader * set to the given ClassLoader and reset it after loading is done. */ @SuppressWarnings("unchecked") private static <T> Class<T> loadClass(ClassLoader classLoader, String className) throws ClassNotFoundException { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { return (Class<T>) classLoader.loadClass(className); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Run some code with the context class loader combined from the program class loader and the system class loader. */ public <T> T execute(Callable<T> callable) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { return callable.call(); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Run some code with the context class loader combined from the program class loader and the system class loader. */ public <T> T execute(Callable<T> callable) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { return callable.call(); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Run some code with the context class loader combined from the program class loader and the system class loader. */ public void execute(ThrowingRunnable runnable) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { runnable.run(); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Creates a {@link ProgramRunner} that execute Spark program from the given {@link Injector}. */ private ProgramRunner createSparkProgramRunner(Injector injector, String programRunnerClassName, ClassLoader classLoader) { try { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { return createInstance(injector, Key.get(classLoader.loadClass(programRunnerClassName)), classLoader); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } catch (Throwable t) { throw Throwables.propagate(t); } }
@Override public void execute(int timeoutInSeconds, final TxRunnable runnable) throws TransactionFailureException { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getClass().getClassLoader()); try { transactional.execute(timeoutInSeconds, new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { runnable.run(context); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } }); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
@Override public void execute(int timeoutInSeconds, final TxRunnable runnable) throws TransactionFailureException { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getClass().getClassLoader()); try { transactional.execute(timeoutInSeconds, new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { runnable.run(context); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } }); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Creates a {@link ProgramRunner} that execute Spark program from the given {@link Injector}. */ private ProgramRunner createSparkProgramRunner(Injector injector, String programRunnerClassName, ClassLoader classLoader) { try { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { return createInstance(injector, Key.get(classLoader.loadClass(programRunnerClassName)), classLoader); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } catch (Throwable t) { throw Throwables.propagate(t); } }
/** * Creates a {@link ProgramRunner} that execute Spark program from the given {@link Injector}. */ private ProgramRunner createSparkProgramRunner(Injector injector, String programRunnerClassName, ClassLoader classLoader) { try { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { return createInstance(injector, Key.get(classLoader.loadClass(programRunnerClassName)), classLoader); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } catch (Throwable t) { throw Throwables.propagate(t); } }
/** * Run some code with the context class loader combined from the program class loader and the system class loader. */ public void execute(ThrowingRunnable runnable) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { runnable.run(); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Execute in a transaction with optional retry on conflict. */ public void execute(final TxRunnable runnable, boolean retryOnConflict) throws TransactionFailureException { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getClass().getClassLoader()); try { Transactional txnl = retryOnConflict ? Transactions.createTransactionalWithRetry(transactional, RetryStrategies.retryOnConflict(20, 100)) : transactional; txnl.execute(new TxRunnable() { @Override public void run(DatasetContext context) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(getProgramInvocationClassLoader()); try { runnable.run(context); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } } }); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Either calls onSuccess or onFailure on all of the DatasetOutputCommitters. */ private void finishDatasets(final JobContext jobContext, final boolean success) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(jobContext.getConfiguration().getClassLoader()); Map<String, DatasetOutputCommitter> datasetOutputCommitters = getDatasetOutputCommitters(outputs); try { if (success) { commitOutputs(datasetOutputCommitters); } else { // but this output committer failed: call onFailure() for all committers failOutputs(datasetOutputCommitters); } } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Either calls onSuccess or onFailure on all of the DatasetOutputCommitters. */ private void finishDatasets(final JobContext jobContext, final boolean success) throws Exception { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(jobContext.getConfiguration().getClassLoader()); Map<String, DatasetOutputCommitter> datasetOutputCommitters = getDatasetOutputCommitters(outputs); try { if (success) { commitOutputs(datasetOutputCommitters); } else { // but this output committer failed: call onFailure() for all committers failOutputs(datasetOutputCommitters); } } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Submits the Spark job using {@link SparkSubmit}. * * @param runtimeContext context representing the Spark program * @param args arguments for the {@link SparkSubmit#main(String[])} method. */ private void submit(SparkRuntimeContext runtimeContext, String[] args) { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(runtimeContext.getProgramInvocationClassLoader()); try { LOG.debug("Calling SparkSubmit for {} {}: {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId(), Arrays.toString(args)); // Explicitly set the SPARK_SUBMIT property as it is no longer set on the System properties by the SparkSubmit // after the class rewrite. This property only control logging of a warning when submitting the Spark job, // hence it's harmless to just leave it there. System.setProperty("SPARK_SUBMIT", "true"); SparkSubmit.main(args); LOG.debug("SparkSubmit returned for {} {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId()); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Submits the Spark job using {@link SparkSubmit}. * * @param runtimeContext context representing the Spark program * @param args arguments for the {@link SparkSubmit#main(String[])} method. */ private void submit(SparkRuntimeContext runtimeContext, String[] args) { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(runtimeContext.getProgramInvocationClassLoader()); try { LOG.debug("Calling SparkSubmit for {} {}: {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId(), Arrays.toString(args)); // Explicitly set the SPARK_SUBMIT property as it is no longer set on the System properties by the SparkSubmit // after the class rewrite. This property only control logging of a warning when submitting the Spark job, // hence it's harmless to just leave it there. System.setProperty("SPARK_SUBMIT", "true"); SparkSubmit.main(args); LOG.debug("SparkSubmit returned for {} {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId()); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
/** * Submits the Spark job using {@link SparkSubmit}. * * @param runtimeContext context representing the Spark program * @param args arguments for the {@link SparkSubmit#main(String[])} method. */ private void submit(SparkRuntimeContext runtimeContext, String[] args) { ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(runtimeContext.getProgramInvocationClassLoader()); try { LOG.debug("Calling SparkSubmit for {} {}: {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId(), Arrays.toString(args)); // Explicitly set the SPARK_SUBMIT property as it is no longer set on the System properties by the SparkSubmit // after the class rewrite. This property only control logging of a warning when submitting the Spark job, // hence it's harmless to just leave it there. System.setProperty("SPARK_SUBMIT", "true"); SparkSubmit.main(args); LOG.debug("SparkSubmit returned for {} {}", runtimeContext.getProgram().getId(), runtimeContext.getRunId()); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } }
@Override public void run(RunNotifier notifier) { // Set the context classloader to the test class classloader before running test ClassLoader cl = ClassLoaders.setContextClassLoader(getTestClass().getJavaClass().getClassLoader()); try { super.run(notifier); } finally { ClassLoaders.setContextClassLoader(cl); } }
@Override public void run(RunNotifier notifier) { // Set the context classloader to the test class classloader before running test ClassLoader cl = ClassLoaders.setContextClassLoader(getTestClass().getJavaClass().getClassLoader()); try { super.run(notifier); } finally { ClassLoaders.setContextClassLoader(cl); } }
@Test public void testHadoopResourcesVisible() throws ClassNotFoundException { FilterClassLoader classLoader = FilterClassLoader.create(this.getClass().getClassLoader()); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(classLoader); try { // VersionInfo will based on the context class loader to find the "common-version-info.properties" file. // If it is missing/failed to locate that, getVersion() will returns "Unknown". Assert.assertNotEquals("Unknown", VersionInfo.getVersion()); } finally { ClassLoaders.setContextClassLoader(oldClassLoader); } // Load standard Hadoop class. It should pass. The class loader of the loaded class should be the same // as the system Configuration class. Assert.assertSame(Configuration.class, classLoader.loadClass(Configuration.class.getName())); } }