/** * Common initialization code for RecordProcessors * @param mrReporter * @param inputs map of Input names to {@link LogicalInput}s * @param outputs map of Output names to {@link LogicalOutput}s * @throws Exception */ void init(MRTaskReporter mrReporter, Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception { this.reporter = mrReporter; this.inputs = inputs; this.outputs = outputs; checkAbortCondition(); //log classpaths try { if (l4j.isDebugEnabled()) { l4j.debug("conf classpath = " + Arrays.asList(((URLClassLoader) jconf.getClassLoader()).getURLs())); l4j.debug("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread() .getContextClassLoader()).getURLs())); } } catch (Exception e) { l4j.info("cannot get classpath: " + e.getMessage()); } }
public <K, V> void init(JobConf job, OutputCollector<K, V> output, Reporter reporter) throws Exception { jc = job; MapredContext.init(false, new JobConf(jc)); MapredContext.get().setReporter(reporter); oc = output; rp = reporter; LOG.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread() .getContextClassLoader()).getURLs())); } catch (Exception e) { LOG.info("cannot get classpath: " + e.getMessage()); } }
public <K, V> void init(JobConf job, OutputCollector<K, V> output, Reporter reporter) throws Exception { jc = job; MapredContext.init(false, new JobConf(jc)); MapredContext.get().setReporter(reporter); oc = output; rp = reporter; LOG.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax()); MemoryInfoLogger memoryInfoLogger = new MemoryInfoLogger(); memoryInfoLogger.run(); try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays .asList(((URLClassLoader) Thread.currentThread().getContextClassLoader()).getURLs())); } catch (Exception e) { LOG.info("cannot get classpath: " + e.getMessage()); } }
/** * Common initialization code for RecordProcessors * @param mrReporter * @param inputs map of Input names to {@link LogicalInput}s * @param outputs map of Output names to {@link LogicalOutput}s * @throws Exception */ void init(MRTaskReporter mrReporter, Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception { this.reporter = mrReporter; this.inputs = inputs; this.outputs = outputs; isLogInfoEnabled = l4j.isInfoEnabled(); isLogTraceEnabled = l4j.isTraceEnabled(); checkAbortCondition(); //log classpaths try { if (l4j.isDebugEnabled()) { l4j.debug("conf classpath = " + Arrays.asList(((URLClassLoader) jconf.getClassLoader()).getURLs())); l4j.debug("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread() .getContextClassLoader()).getURLs())); } } catch (Exception e) { l4j.info("cannot get classpath: " + e.getMessage()); } }
/** * Set working directory in local file system. * * @param dir Working directory. * @throws IOException If fails. */ private void setLocalFSWorkingDirectory(File dir) throws IOException { JobConf cfg = ctx.getJobConf(); ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(cfg.getClassLoader()); try { cfg.set(HadoopFileSystemsUtils.LOC_FS_WORK_DIR_PROP, dir.getAbsolutePath()); if (!cfg.getBoolean(FILE_DISABLE_CACHING_PROPERTY_NAME, false)) FileSystem.getLocal(cfg).setWorkingDirectory(new Path(dir.getAbsolutePath())); } finally { HadoopCommonUtils.restoreContextClassLoader(oldLdr); } }
try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread()
try { LOG.info("conf classpath = " + Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); LOG.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread()
ClassLoader loader = conf.getClassLoader(); if (StringUtils.isNotBlank(libjars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, ","));
+ Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); l4j.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread()
+ Arrays.asList(((URLClassLoader) job.getClassLoader()).getURLs())); l4j.info("thread classpath = " + Arrays.asList(((URLClassLoader) Thread.currentThread()
ClassLoader loader = conf.getClassLoader(); if (StringUtils.isNotBlank(libjars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, ","));
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf.getClassLoader());
/** {@inheritDoc} */ @Override public void prepareTaskEnvironment() throws IgniteCheckedException { File locDir; switch(taskInfo().type()) { case MAP: case REDUCE: job().prepareTaskEnvironment(taskInfo()); locDir = taskLocalDir(job.igniteWorkDirectory(), locNodeId, taskInfo()); break; default: locDir = jobLocalDir(job.igniteWorkDirectory(), locNodeId, taskInfo().jobId()); } ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf().getClassLoader()); try { FileSystem.get(jobConf()); LocalFileSystem locFs = FileSystem.getLocal(jobConf()); locFs.setWorkingDirectory(new Path(locDir.getAbsolutePath())); } catch (Throwable e) { if (e instanceof Error) throw (Error)e; throw transformException(e); } finally { HadoopCommonUtils.restoreContextClassLoader(oldLdr); } }
/** {@inheritDoc} */ @Override public void run() throws IgniteCheckedException { ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf().getClassLoader()); try { try { task = createTask(); } catch (Throwable e) { if (e instanceof Error) throw e; throw transformException(e); } if (cancelled) throw new HadoopTaskCancelledException("Task cancelled."); try { task.run(this); } catch (Throwable e) { if (e instanceof Error) throw e; throw transformException(e); } } finally { task = null; HadoopCommonUtils.restoreContextClassLoader(oldLdr); } }
@SuppressWarnings("unchecked") public RR_ClassLoaderChecker(JobConf job) { assertTrue("The class loader has not been inherited from " + CompositeRecordReader.class.getSimpleName(), job.getClassLoader() instanceof Fake_ClassLoader); keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass", NullWritable.class, WritableComparable.class); valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass", NullWritable.class, WritableComparable.class); }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
@SuppressWarnings("unchecked") public RR_ClassLoaderChecker(JobConf job) { assertTrue("The class loader has not been inherited from " + CompositeRecordReader.class.getSimpleName(), job.getClassLoader() instanceof Fake_ClassLoader); keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass", NullWritable.class, WritableComparable.class); valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass", NullWritable.class, WritableComparable.class); }