/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0)); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); }
/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0)); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); }
(CombineHiveInputSplit) split : new CombineHiveInputSplit(jobConf, (CombineFileSplit) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try {
/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0).toString(), hsplit.getPath(0).toUri().getPath()); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, ((CombineHiveInputSplit) split).getInputSplitShim(), reporter, CombineHiveRecordReader.class); }
/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0).toString(), hsplit.getPath(0).toUri().getPath()); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); }
public CombineHiveRecordReader(InputSplit split, Configuration conf, Reporter reporter, Integer partition) throws IOException { super((JobConf)conf); CombineHiveInputSplit hsplit = split instanceof CombineHiveInputSplit ? (CombineHiveInputSplit) split : new CombineHiveInputSplit(jobConf, (CombineFileSplit) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try { inputFormatClass = JavaUtils.loadClass(inputFormatClassName); } catch (ClassNotFoundException e) { throw new IOException("CombineHiveRecordReader: class not found " + inputFormatClassName); } InputFormat inputFormat = HiveInputFormat.getInputFormatFromCache( inputFormatClass, jobConf); // create a split for the given partition FileSplit fsplit = new FileSplit(hsplit.getPaths()[partition], hsplit .getStartOffsets()[partition], hsplit.getLengths()[partition], hsplit .getLocations()); this.setRecordReader(inputFormat.getRecordReader(fsplit, jobConf, reporter)); this.initIOContext(fsplit, jobConf, inputFormatClass, this.recordReader); }
public CombineHiveRecordReader(InputSplit split, Configuration conf, Reporter reporter, Integer partition) throws IOException { JobConf job = (JobConf) conf; CombineHiveInputSplit hsplit = new CombineHiveInputSplit(job, (InputSplitShim) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try { inputFormatClass = Class.forName(inputFormatClassName); } catch (ClassNotFoundException e) { throw new IOException("CombineHiveRecordReader: class not found " + inputFormatClassName); } InputFormat inputFormat = HiveInputFormat.getInputFormatFromCache( inputFormatClass, job); // create a split for the given partition FileSplit fsplit = new FileSplit(hsplit.getPaths()[partition], hsplit .getStartOffsets()[partition], hsplit.getLengths()[partition], hsplit .getLocations()); this.recordReader = inputFormat.getRecordReader(fsplit, job, reporter); this.initIOContext(fsplit, job, inputFormatClass, this.recordReader); }
(CombineHiveInputSplit) split : new CombineHiveInputSplit(jobConf, (CombineFileSplit) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try {