@SuppressWarnings("unchecked") @Override public EsInputRecordReader<K, V> getRecordReader(org.apache.hadoop.mapred.InputSplit split, JobConf job, Reporter reporter) { return (EsInputRecordReader<K, V>) (isOutputAsJson(job) ? new JsonWritableEsInputRecordReader(split, job, reporter) : new WritableEsInputRecordReader(split, job, reporter)); }
@SuppressWarnings("rawtypes") @Override public Map createMap() { return new LinkedMapWritable(); }
@Override public Properties asProperties() { return HadoopCfgUtils.asProperties(cfg); } }
@SuppressWarnings("unchecked") @Override public EsInputRecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) { return (EsInputRecordReader<K, V>) (isOutputAsJson(CompatHandler.taskAttemptContext(context).getConfiguration()) ? new JsonWritableEsInputRecordReader() : new WritableEsInputRecordReader()); }
@Override public boolean nextKeyValue() throws IOException { // new API call routed to old API // under the new API always create new objects since consumers can (and sometimes will) modify them currentKey = createKey(); currentValue = createValue(); return next(currentKey, currentValue); }
@SuppressWarnings({ "unchecked", "rawtypes" }) @Override public AbstractWritableEsInputRecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) { InputSplit delegate = ((EsHiveSplit) split).delegate; return isOutputAsJson(job) ? new JsonWritableEsInputRecordReader(delegate, job, reporter) : new WritableEsInputRecordReader(delegate, job, reporter); } }
@Override public void close(boolean abort) throws IOException { // TODO: check whether a proper Reporter can be passed in super.doClose(progress); } }
/** * Copy constructor. * * @param other the map to copy from */ public LinkedMapWritable(MapWritable other) { this(); copy(other); }
@Override public void checkOutputSpecs(FileSystem ignored, JobConf cfg) throws IOException { init(cfg); }
@Override public InputStream loadResource(String location) { return HadoopIOUtils.open(location, cfg); }
@Override protected Object processShort(Short value) { return WritableCompatUtil.availableShortWritable(value); }
@Override protected Object parseDate(Long value, boolean richDate) { return processLong(value); }
@Override protected Object parseDate(String value, boolean richDate) { return parseString(value); }
@Override public org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(TaskAttemptContext context) { return new EsOutputCommitter(); }
@Override public org.apache.hadoop.mapred.RecordWriter getRecordWriter(FileSystem ignored, JobConf job, String name, Progressable progress) { return new EsRecordWriter(job, progress); }
@Override public float getProgress() { return size == 0 ? 0 : ((float) getPos()) / size; }
@SuppressWarnings("unchecked") @Override public OutputFormat<Object, Map<Writable, Writable>> getOutputFormat() throws IOException { return new EsOutputFormat(); }
@Override public ValueReader createValueReader() { return new WritableValueReader(); }
@Override public void close(TaskAttemptContext context) throws IOException { doClose(context); }
@Override public void close(Reporter reporter) throws IOException { doClose(reporter); }