info("Setting hadoop jar file for class:" + getClass() + " to " + conf.getJar()); info("*************************************************************************"); info(" Running on Real Hadoop Cluster(" + conf.get("mapred.job.tracker")
private void initialize(Class<?> cls) { hiveJar = (new JobConf(cls)).getJar();
private synchronized void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException { // add hive-exec jar addJars((new JobConf(this.getClass())).getJar()); // add aux jars addJars(conf.getAuxJars()); addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars()); // add added jars String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars); addJars(addedJars); // add plugin module jars on demand // jobConf will hold all the configuration for hadoop, tez, and hive JobConf jobConf = new JobConf(conf); jobConf.set(MR_JAR_PROPERTY, ""); for (BaseWork work : sparkWork.getAllWork()) { work.configureJobConf(jobConf); } addJars(jobConf.get(MR_JAR_PROPERTY)); // remove the location of container tokens conf.unset(MR_CREDENTIALS_LOCATION_PROPERTY); // add added files String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles); addResources(addedFiles); // add added archives String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives); addResources(addedArchives); }
addJars((new JobConf(this.getClass())).getJar());
job.setOutputValueClass(NullWritable.class); job.setJarByClass(CompactorMR.class); LOG.debug("User jar set to " + job.getJar()); job.setMapperClass(CompactorMap.class); job.setNumReduceTasks(0);
job.setOutputValueClass(NullWritable.class); job.setJarByClass(CompactorMR.class); LOG.debug("User jar set to " + job.getJar()); job.setMapperClass(CompactorMap.class); job.setNumReduceTasks(0);
private void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException { // add hive-exec jar addJars((new JobConf(this.getClass())).getJar()); // add aux jars addJars(conf.getAuxJars()); addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars()); // add added jars String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDJARS, addedJars); addJars(addedJars); // add plugin module jars on demand // jobConf will hold all the configuration for hadoop, tez, and hive JobConf jobConf = new JobConf(conf); jobConf.set(MR_JAR_PROPERTY, ""); for (BaseWork work : sparkWork.getAllWork()) { work.configureJobConf(jobConf); } addJars(conf.get(MR_JAR_PROPERTY)); // add added files String addedFiles = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDFILES, addedFiles); addResources(addedFiles); // add added archives String addedArchives = Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEADDEDARCHIVES, addedArchives); addResources(addedArchives); }
addJars((new JobConf(this.getClass())).getJar());
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
/** * Get the pathname of the job's jar. * @return the pathname */ public String getJar() { return conf.getJar(); }
conf.getStringCollection("tmpjars"), conf.getStringCollection("tmparchives"), conf.getJar(), statCache); Assert.assertTrue("Limits check succeeded when it should have failed.", checkShouldSucceed);
private static ClassLoader makeClassLoader(JobConf conf, File workDir) throws IOException { List<URL> cp = new ArrayList<URL>(); String jar = conf.getJar(); if (jar != null) { // if jar exists, it into workDir File[] libs = new File(workDir, "lib").listFiles(); if (libs != null) { for (int i = 0; i < libs.length; i++) { cp.add(new URL("file:" + libs[i].toString())); } } cp.add(new URL("file:" + new File(workDir, "classes/").toString())); cp.add(new URL("file:" + workDir.toString() + "/")); } return new URLClassLoader(cp.toArray(new URL[cp.size()])); }
private ClassLoader makeClassLoader(JobConf conf, File workDir) throws IOException { List<String> classPaths = new ArrayList<String>(); // Add jar clas files (includes lib/* and classes/*) String jar = conf.getJar(); if (jar != null) { TaskRunner.appendJobJarClasspaths(conf.getJar(), classPaths); } // Add the workdir, too. classPaths.add(workDir.toString()); // Note: TaskRunner.run() does more, including DistributedCache files. // Convert to URLs URL[] urls = new URL[classPaths.size()]; for (int i = 0; i < classPaths.size(); ++i) { urls[i] = new File(classPaths.get(i)).toURL(); } return new URLClassLoader(urls); }
/** */ private static List<String> getClassPaths(JobConf conf, File workDir, TaskDistributedCacheManager taskDistributedCacheManager) throws IOException { // Accumulates class paths for child. List<String> classPaths = new ArrayList<String>(); // start with same classpath as parent process appendSystemClasspaths(classPaths); // include the user specified classpath appendJobJarClasspaths(conf.getJar(), classPaths); // Distributed cache paths classPaths.addAll(taskDistributedCacheManager.getClassPaths()); // Include the working dir too classPaths.add(workDir.toString()); return classPaths; }