/** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { jobId = new HadoopJobId(); jobId.readExternal(in); }
/** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof HadoopTaskInfo)) return false; HadoopTaskInfo that = (HadoopTaskInfo)o; return attempt == that.attempt && taskNum == that.taskNum && jobId.equals(that.jobId) && type == that.type; }
/** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { jobId.writeExternal(out); }
/** {@inheritDoc} */ @Override public JobID getNewJobID() throws IOException, InterruptedException { try { conf.setLong(HadoopCommonUtils.REQ_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); HadoopJobId jobID = execute(HadoopProtocolNextTaskIdTask.class); conf.setLong(HadoopCommonUtils.RESPONSE_NEW_JOBID_TS_PROPERTY, U.currentTimeMillis()); return new JobID(jobID.globalId().toString(), jobID.localId()); } catch (GridClientException e) { throw new IOException("Failed to get new job ID.", e); } }
/** {@inheritDoc} */ @Override public HadoopJobId nextJobId() { return new HadoopJobId(ctx.localNodeId(), idCtr.incrementAndGet()); }
/** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeInt("jobId", jobId)) return false; writer.incrementState(); case 1: if (!writer.writeUuid("nodeId", nodeId)) return false; writer.incrementState(); } return true; }
/** {@inheritDoc} */ @Override public int hashCode() { int res = type.hashCode(); res = 31 * res + jobId.hashCode(); res = 31 * res + taskNum; res = 31 * res + attempt; return res; }
/** {@inheritDoc} */ @Override public Boolean run(ComputeJobContext jobCtx, Hadoop hadoop, HadoopProtocolTaskArguments args) throws IgniteCheckedException { UUID nodeId = UUID.fromString(args.<String>get(0)); Integer id = args.get(1); assert nodeId != null; assert id != null; HadoopJobId jobId = new HadoopJobId(nodeId, id); return hadoop.kill(jobId); } }
/** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeInt("jobId", jobId)) return false; writer.incrementState(); case 1: if (!writer.writeUuid("nodeId", nodeId)) return false; writer.incrementState(); } return true; }
/** {@inheritDoc} */ @Override public int hashCode() { int res = type.hashCode(); res = 31 * res + jobId.hashCode(); res = 31 * res + taskNum; res = 31 * res + attempt; return res; }
/** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { jobId = new HadoopJobId(); jobId.readExternal(in); msgCnt = in.readLong(); }
/** {@inheritDoc} */ @Override public HadoopCounters run(ComputeJobContext jobCtx, Hadoop hadoop, HadoopProtocolTaskArguments args) throws IgniteCheckedException { UUID nodeId = UUID.fromString(args.<String>get(0)); Integer id = args.get(1); assert nodeId != null; assert id != null; return hadoop.counters(new HadoopJobId(nodeId, id)); } }
/** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { jobId.writeExternal(out); out.writeLong(msgCnt); }
@Override public void apply(IgniteInternalFuture<?> gridFut) { assert initGuard.get(); assert req.jobId().equals(job.id()); if (req.reducersAddresses() != null) { if (shuffleJob.initializeReduceAddresses(req.reducersAddresses())) { shuffleJob.startSending("external", new IgniteInClosure2X<HadoopProcessDescriptor, HadoopMessage>() { @Override public void applyx(HadoopProcessDescriptor dest, HadoopMessage msg) throws IgniteCheckedException { comm.sendMessage(dest, msg); } }); } } } });
/** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { jobId = new HadoopJobId(); jobId.readExternal(in); msgId = in.readLong(); }
/** * @throws Exception If failed. */ @Test public void testMapperException() throws Exception { prepareFile("/testFile", 1000); Configuration cfg = new Configuration(); cfg.setStrings("fs.igfs.impl", IgniteHadoopFileSystem.class.getName()); Job job = Job.getInstance(cfg); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(FailMapper.class); job.setNumReduceTasks(0); job.setInputFormatClass(TextInputFormat.class); FileInputFormat.setInputPaths(job, new Path("igfs://" + igfsName + "@/")); FileOutputFormat.setOutputPath(job, new Path("igfs://" + igfsName + "@/output/")); job.setJarByClass(getClass()); final IgniteInternalFuture<?> fut = grid(0).hadoop().submit(new HadoopJobId(UUID.randomUUID(), 3), createJobInfo(job.getConfiguration(), null)); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { fut.get(); return null; } }, IgniteCheckedException.class, null); }
/** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { jobId.writeExternal(out); out.writeLong(msgId); }