/** {@inheritDoc} */ @Override public Object getCurrentKey() throws IOException, InterruptedException { if (reader != null) return reader.getCurrentKey(); return input.key(); }
private void check(HadoopHashMultimap m, Multimap<Integer, Integer> mm, HadoopTaskContext taskCtx) throws Exception { final HadoopTaskInput in = m.input(taskCtx); Map<Integer, Collection<Integer>> mmm = mm.asMap(); int keys = 0; while (in.next()) { keys++; IntWritable k = (IntWritable)in.key(); assertNotNull(k); ArrayList<Integer> vs = new ArrayList<>(); Iterator<?> it = in.values(); while (it.hasNext()) vs.add(((IntWritable) it.next()).get()); Collection<Integer> exp = mmm.get(k.get()); assertEquals(sorted(exp), sorted(vs)); } X.println("keys: " + keys + " cap: " + m.capacity()); assertEquals(mmm.size(), keys); assertEquals(m.keys(), keys); in.close(); }
keys++; IntWritable k = (IntWritable)in.key();
keys++; IntWritable k = (IntWritable)in.key();
IntWritable key = (IntWritable)in.key();
IntWritable key = (IntWritable) in.key();
throw new HadoopTaskCancelledException("Reduce task cancelled."); reducer.reduce(input.key(), input.values(), collector, Reporter.NULL);