@Override
public RecordWriterImpl getRecordWriter(TaskAttemptContext taskAttemptContext)
throws IOException, InterruptedException
{
HadoopUtils.setWorkOutputDir(taskAttemptContext);
Configuration conf = taskAttemptContext.getConfiguration();
OutputConf outputConf = new OutputConf(conf, myProfileId);
OutputInfo oti = outputConf.readOutputTableInfo();
HiveUtils.setRCileNumColumns(conf, oti.getColumnInfo().size());
HadoopUtils.setOutputKeyWritableClass(conf, NullWritable.class);
Serializer serializer = oti.createSerializer(conf);
HadoopUtils.setOutputValueWritableClass(conf,
serializer.getSerializedClass());
org.apache.hadoop.mapred.OutputFormat baseOutputFormat =
ReflectionUtils.newInstance(oti.getOutputFormatClass(), conf);
org.apache.hadoop.mapred.RecordWriter<WritableComparable, Writable> baseWriter =
getBaseRecordWriter(taskAttemptContext, baseOutputFormat);
StructObjectInspector soi = Inspectors.createFor(oti.getColumnInfo());
if (!outputConf.shouldResetSlowWrites()) {
return new RecordWriterImpl(baseWriter, serializer, soi);
} else {
long writeTimeout = outputConf.getWriteResetTimeout();
return new ResettableRecordWriterImpl(baseWriter, serializer, soi, taskAttemptContext,
baseOutputFormat, writeTimeout);
}
}