/** * Checks Snappy codec usage. * * @throws Exception On error. */ @Ignore("https://issues.apache.org/jira/browse/IGNITE-9920") @Test public void testSnappy() throws Throwable { // Run Snappy test in default class loader: checkSnappy(); // Run the same in several more class loaders simulating jobs and tasks: for (int i = 0; i < 2; i++) { ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, null, new HadoopHelperImpl()); Class<?> cls = (Class)Class.forName(HadoopSnappyTest.class.getName(), true, hadoopClsLdr); assertEquals(hadoopClsLdr, cls.getClassLoader()); U.invoke(cls, null, "checkSnappy"); } }
/** * Creates WordCount hadoop job for API v2. * * @param inFile Input file name for the job. * @param outFile Output file name for the job. * @return Hadoop job. * @throws Exception if fails. */ @Override public HadoopJobEx getHadoopJob(String inFile, String outFile) throws Exception { Job job = Job.getInstance(); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); HadoopWordCount2.setTasksClasses(job, true, true, true, false); Configuration conf = job.getConfiguration(); setupFileSystems(conf); FileInputFormat.setInputPaths(job, new Path(inFile)); FileOutputFormat.setOutputPath(job, new Path(outFile)); job.setJarByClass(HadoopWordCount2.class); Job hadoopJob = HadoopWordCount2.getJob(inFile, outFile); HadoopDefaultJobInfo jobInfo = createJobInfo(hadoopJob.getConfiguration(), null); UUID uuid = new UUID(0, 0); HadoopJobId jobId = new HadoopJobId(uuid, 0); return jobInfo.createJob(HadoopV2Job.class, jobId, log, null, new HadoopHelperImpl()); }
HadoopJobEx job = info.createJob(HadoopV2Job.class, id, log, null, new HadoopHelperImpl());
/** * Creates WordCount hadoop job for API v1. * * @param inFile Input file name for the job. * @param outFile Output file name for the job. * @return Hadoop job. * @throws IOException If fails. */ @Override public HadoopJobEx getHadoopJob(String inFile, String outFile) throws Exception { JobConf jobConf = HadoopWordCount1.getJob(inFile, outFile); setupFileSystems(jobConf); HadoopDefaultJobInfo jobInfo = createJobInfo(jobConf, null); UUID uuid = new UUID(0, 0); HadoopJobId jobId = new HadoopJobId(uuid, 0); return jobInfo.createJob(HadoopV2Job.class, jobId, log, null, new HadoopHelperImpl()); }
job = req.jobInfo().createJob(jobCls, req.jobId(), log, null, new HadoopHelperImpl());