private static void newApiCounter(TaskInputOutputContext tioc, Enum<?> counter, long value) { try { org.apache.hadoop.mapreduce.Counter c = tioc.getCounter(counter); if (c != null) { CompatHandler.counter(c).increment(value); } } catch (Exception ex) { // counter unavailable } } }
public static CounterGroup counterGroup(org.apache.hadoop.mapreduce.CounterGroup target) { return proxy(target, CounterGroup.class); }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { org.elasticsearch.hadoop.mr.compat.TaskAttemptContext compatContext = CompatHandler.taskAttemptContext(context); compatContext.setStatus(split.toString()); init((EsInputSplit) split, compatContext.getConfiguration(), compatContext); }
@Override public org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(TaskAttemptContext context) { return (org.apache.hadoop.mapreduce.RecordWriter) getRecordWriter(null, HadoopCfgUtils.asJobConf(CompatHandler.taskAttemptContext(context).getConfiguration()), null, context); }
@Override public void checkOutputSpecs(JobContext context) throws IOException { // careful as it seems the info here saved by in the config is discarded init(CompatHandler.jobContext(context).getConfiguration()); }
@SuppressWarnings({ "rawtypes" }) static void report(Progressable progressable, Stats stats) { progressable = (Progressable) CompatHandler.unwrap(progressable); if (progressable == null || progressable == Reporter.NULL) { return; } if (progressable instanceof Reporter) { Reporter reporter = (Reporter) progressable; for (Counter count : Counter.ALL) { oldApiCounter(reporter, count, count.get(stats)); } } if (progressable instanceof org.apache.hadoop.mapreduce.TaskInputOutputContext) { TaskInputOutputContext compatTioc = CompatHandler.taskInputOutputContext((org.apache.hadoop.mapreduce.TaskInputOutputContext) progressable); for (Counter count : Counter.ALL) { newApiCounter(compatTioc, count, count.get(stats)); } } }
public static Object unwrap(Object object) { if (object instanceof CompatProxy) { return ((ReflectiveInvoker) Proxy.getInvocationHandler(object)).target(); } return object; } }
@SuppressWarnings("unchecked") private static <P, T> P proxy(T target, Class<P> proxy) { return (P) Proxy.newProxyInstance(proxy.getClassLoader(), new Class[] { CompatProxy.class, proxy }, new ReflectiveInvoker(target)); }
@SuppressWarnings("unchecked") @Override public EsInputRecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) { return (EsInputRecordReader<K, V>) (isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new JsonWritableEsInputRecordReader() : new WritableEsInputRecordReader()); }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.jobContext(context).getConfiguration()); for (OutputFormat format : formats) { format.checkOutputSpecs(context); } }
public static ReduceContext reduceContext(org.apache.hadoop.mapreduce.ReduceContext target) { return proxy(target, ReduceContext.class); }
@SuppressWarnings("unchecked") @Override public AbstractPigEsInputRecordReader createRecordReader(InputSplit split, TaskAttemptContext context) { return isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new PigJsonEsInputRecordReader() : new PigEsInputRecordReader(); }
@Override public List<InputSplit> getSplits(JobContext context) throws IOException { JobConf conf = HadoopCfgUtils.asJobConf(CompatHandler.jobContext(context).getConfiguration()); // NOTE: this method expects a ShardInputSplit to be returned (which implements both the old and the new API). return Arrays.asList((InputSplit[]) getSplits(conf, conf.getNumMapTasks())); }
public static JobContext jobContext(org.apache.hadoop.mapreduce.JobContext target) { return proxy(target, JobContext.class); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<RecordWriter> writers = new ArrayList<RecordWriter>(); for (OutputFormat format : formats) { writers.add(format.getRecordWriter(context)); } return new MultiNewRecordWriter(writers); }
public static Counter counter(org.apache.hadoop.mapreduce.Counter target) { return proxy(target, Counter.class); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
public static MapContext mapContext(org.apache.hadoop.mapreduce.MapContext target) { return proxy(target, MapContext.class); }
public static TaskAttemptContext taskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext target) { return proxy(target, TaskAttemptContext.class); }
public static TaskInputOutputContext taskInputOutputContext(org.apache.hadoop.mapreduce.TaskInputOutputContext target) { return proxy(target, TaskInputOutputContext.class); }