@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { if (LOG.isDebugEnabled()) { LOG.debug("getSplits started"); } Configuration conf = ShimLoader.getHadoopShims().getConfiguration(jobContext); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(conf, createContext(conf, -1)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } if (LOG.isDebugEnabled()) { LOG.debug("getSplits finished"); } return result; }
@Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("getSplits started"); } Configuration conf = job; if (HiveConf.getBoolVar(job, HiveConf.ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED)) { // Create HiveConf once, since this is expensive. conf = new HiveConf(conf, OrcInputFormat.class); } List<OrcSplit> result = generateSplitsInfo(conf, new Context(conf, numSplits, createExternalCaches())); if (LOG.isDebugEnabled()) { LOG.debug("getSplits finished"); } return result.toArray(new InputSplit[result.size()]); }
@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { if (LOG.isDebugEnabled()) { LOG.debug("getSplits started"); } Configuration conf = ShimLoader.getHadoopShims().getConfiguration(jobContext); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(conf, createContext(conf, -1)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } if (LOG.isDebugEnabled()) { LOG.debug("getSplits finished"); } return result; }
@Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { if (isDebugEnabled) { LOG.debug("getSplits started"); } Configuration conf = job; if (HiveConf.getBoolVar(job, HiveConf.ConfVars.HIVE_ORC_MS_FOOTER_CACHE_ENABLED)) { // Create HiveConf once, since this is expensive. conf = new HiveConf(conf, OrcInputFormat.class); } List<OrcSplit> result = generateSplitsInfo(conf, new Context(conf, numSplits, createExternalCaches())); if (isDebugEnabled) { LOG.debug("getSplits finished"); } return result.toArray(new InputSplit[result.size()]); }
@Override public Void run() throws Exception { OrcInputFormat.generateSplitsInfo(conf, new Context(conf, -1, null)); return null; } });
List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(conf, new Context(conf, -1, null)); assertEquals(1, splits.size()); } finally {
@Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); List<OrcSplit> result = generateSplitsInfo(job); perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); return result.toArray(new InputSplit[result.size()]); }
@Override public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); List<OrcSplit> splits = OrcInputFormat.generateSplitsInfo(ShimLoader.getHadoopShims() .getConfiguration(jobContext)); List<InputSplit> result = new ArrayList<InputSplit>(splits.size()); for(OrcSplit split: splits) { result.add(new OrcNewSplit(split)); } perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_SPLITS); return result; }