@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { org.elasticsearch.hadoop.mr.compat.TaskAttemptContext compatContext = CompatHandler.taskAttemptContext(context); compatContext.setStatus(split.toString()); init((EsInputSplit) split, compatContext.getConfiguration(), compatContext); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<RecordWriter> writers = new ArrayList<RecordWriter>(); for (OutputFormat format : formats) { writers.add(format.getRecordWriter(context)); } return new MultiNewRecordWriter(writers); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
@Override public org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(TaskAttemptContext context) { return (org.apache.hadoop.mapreduce.RecordWriter) getRecordWriter(null, HadoopCfgUtils.asJobConf(CompatHandler.taskAttemptContext(context).getConfiguration()), null, context); }
@SuppressWarnings("unchecked") @Override public EsInputRecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) { return (EsInputRecordReader<K, V>) (isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new JsonWritableEsInputRecordReader() : new WritableEsInputRecordReader()); }
@SuppressWarnings("unchecked") @Override public AbstractPigEsInputRecordReader createRecordReader(InputSplit split, TaskAttemptContext context) { return isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new PigJsonEsInputRecordReader() : new PigEsInputRecordReader(); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<RecordWriter> writers = new ArrayList<RecordWriter>(); for (OutputFormat format : formats) { writers.add(format.getRecordWriter(context)); } return new MultiNewRecordWriter(writers); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { org.elasticsearch.hadoop.mr.compat.TaskAttemptContext compatContext = CompatHandler.taskAttemptContext(context); compatContext.setStatus(split.toString()); init((EsInputSplit) split, compatContext.getConfiguration(), compatContext); }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { org.elasticsearch.hadoop.mr.compat.TaskAttemptContext compatContext = CompatHandler.taskAttemptContext(context); compatContext.setStatus(split.toString()); init((ShardInputSplit) split, compatContext.getConfiguration(), compatContext); }
@Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException { org.elasticsearch.hadoop.mr.compat.TaskAttemptContext compatContext = CompatHandler.taskAttemptContext(context); compatContext.setStatus(split.toString()); init((EsInputSplit) split, compatContext.getConfiguration(), compatContext); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<RecordWriter> writers = new ArrayList<RecordWriter>(); for (OutputFormat format : formats) { writers.add(format.getRecordWriter(context)); } return new MultiNewRecordWriter(writers); }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<OutputCommitter> committers = new ArrayList<OutputCommitter>(); for (OutputFormat format : formats) { committers.add(format.getOutputCommitter(context)); } return new MultiNewOutputCommitter(committers); }
@Override public RecordWriter getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { List<OutputFormat> formats = getNewApiFormats(CompatHandler.taskAttemptContext(context).getConfiguration()); List<RecordWriter> writers = new ArrayList<RecordWriter>(); for (OutputFormat format : formats) { writers.add(format.getRecordWriter(context)); } return new MultiNewRecordWriter(writers); }
@Override public org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(TaskAttemptContext context) { return (org.apache.hadoop.mapreduce.RecordWriter) getRecordWriter(null, HadoopCfgUtils.asJobConf(CompatHandler.taskAttemptContext(context).getConfiguration()), null, context); }
@SuppressWarnings("unchecked") @Override public AbstractPigEsInputRecordReader createRecordReader(InputSplit split, TaskAttemptContext context) { return isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new PigJsonEsInputRecordReader() : new PigEsInputRecordReader(); }
@SuppressWarnings("unchecked") @Override public EsInputRecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) { return (EsInputRecordReader<K, V>) (isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new JsonWritableEsInputRecordReader() : new WritableEsInputRecordReader()); }
@SuppressWarnings("unchecked") @Override public ShardRecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) { return (ShardRecordReader<K, V>) (isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new JsonWritableShardRecordReader() : new WritableShardRecordReader()); }