public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path) throws IOException { updateMrWork(jobConf); // TODO: refactor this in HIVE-6366 final JobConf cloneJobConf = new JobConf(jobConf); final PartitionDesc part = pathToPartitionInfo.get(path); if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf); } pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath()); return cloneJobConf; } }
public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path) throws IOException { updateMrWork(jobConf); // TODO: refactor this in HIVE-6366 final JobConf cloneJobConf = new JobConf(jobConf); final PartitionDesc part = HiveFileFormatUtils.getFromPathRecursively( pathToPartitionInfo, path, null, false, true); try { if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf); } } catch (Exception e) { throw new IOException(e); } pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath()); return cloneJobConf; } }
Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job); InputFormat inputFormat = getInputFormatFromCache(formatter, job); List<Path> dirs = new ArrayList<>(), dirsWithOriginals = new ArrayList<>();
private void createHiveOutputFormat(JobConf job) throws HiveException { if (hiveOutputFormat == null) { Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), job); } try { hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(job, getConf().getTableInfo()); } catch (Throwable t) { throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t); } }
public InputSplit[] doGetSplits(JobConf job, int numSplits) throws IOException { super.init(job); Path[] dirs = FileInputFormat.getInputPaths(job); if (dirs.length == 0) { throw new IOException("No input paths specified in job"); } JobConf newjob = new JobConf(job); ArrayList<InputSplit> result = new ArrayList<InputSplit>(); // for each dir, get the InputFormat, and do getSplits. PartitionDesc part; for (Path dir : dirs) { part = HiveFileFormatUtils .getPartitionDescFromPathRecursively(pathToPartitionInfo, dir, IOPrepareCache.get().allocatePartitionDescMap(), true); // create a new InputFormat instance if this is the first time to see this // class Class inputFormatClass = part.getInputFileFormatClass(); InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), newjob); FileInputFormat.setInputPaths(newjob, dir); newjob.setInputFormat(inputFormat.getClass()); InputSplit[] iss = inputFormat.getSplits(newjob, numSplits / dirs.length); for (InputSplit is : iss) { result.add(new HiveInputSplit(is, inputFormatClass.getName())); } } return result.toArray(new HiveInputSplit[result.size()]); }
private void createHiveOutputFormat(Configuration hconf) throws HiveException { if (hiveOutputFormat == null) { Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), hconf); } try { hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, getConf().getTableInfo()); } catch (Throwable t) { throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t); } }
TableDesc tableDesc = Utilities.getTableDesc(table); PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc); Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf); long len = estimator.estimate(jobConf, scanOp, threshold).getTotalLength(); if (LOG.isDebugEnabled()) {
Utilities.setColumnTypeList(jobConf, scanOp, true); PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc); Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf); total += estimator.estimate(jobConf, scanOp, -1).getTotalLength();
Utilities.setColumnTypeList(jobConf, scanOp, true); PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc); Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf); total += estimator.estimate(jobConf, scanOp, -1).getTotalLength();
TableDesc tableDesc = Utilities.getTableDesc(table); PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc); Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf); long len = estimator.estimate(jobConf, scanOp, threshold).getTotalLength(); if (LOG.isDebugEnabled()) {
protected FetchInputFormatSplit[] getNextSplits() throws Exception { while (getNextPath()) { // not using FileInputFormat.setInputPaths() here because it forces a connection to the // default file system - which may or may not be online during pure metadata operations job.set("mapred.input.dir", StringUtils.escapeString(currPath.toString())); // Fetch operator is not vectorized and as such turn vectorization flag off so that // non-vectorized record reader is created below. HiveConf.setBoolVar(job, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); Class<? extends InputFormat> formatter = currDesc.getInputFileFormatClass(); Utilities.copyTableJobPropertiesToConf(currDesc.getTableDesc(), job); InputFormat inputFormat = getInputFormatFromCache(formatter, job); InputSplit[] splits = inputFormat.getSplits(job, 1); FetchInputFormatSplit[] inputSplits = new FetchInputFormatSplit[splits.length]; for (int i = 0; i < splits.length; i++) { inputSplits[i] = new FetchInputFormatSplit(splits[i], inputFormat); } if (work.getSplitSample() != null) { inputSplits = splitSampling(work.getSplitSample(), inputSplits); } if (inputSplits.length > 0) { return inputSplits; } } return null; }
Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), jc);
Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job); nonNative = part.getTableDesc().isNonNative();
Utilities.copyTableJobPropertiesToConf( Utilities.getTableDesc(tbl), jobConf);
Utilities.setColumnNameList(jobConf, tableScanOp); Utilities.setColumnTypeList(jobConf, tableScanOp); Utilities.copyTableJobPropertiesToConf( Utilities.getTableDesc(tbl), jobConf);
Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), jc);
Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job); nonNative = part.getTableDesc().isNonNative();
public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path) throws IOException { updateMrWork(jobConf); // TODO: refactor this in HIVE-6366 final JobConf cloneJobConf = new JobConf(jobConf); final PartitionDesc part = pathToPartitionInfo.get(path.toString()); if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf); } pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().getPath()); return cloneJobConf; } }
/** * {@inheritDoc} */ @Override public JobConf pushProjectionsAndFilters(JobConf jobConf, Path path) throws IOException { init(jobConf); final JobConf cloneJobConf = new JobConf(jobConf); final PartitionDesc part = pathToPartitionInfo.get(path.toString()); if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf); } pushProjectionsAndFilters(cloneJobConf, path.toString(), path.toUri().toString()); return cloneJobConf; }
private void createHiveOutputFormat(Configuration hconf) throws HiveException { if (hiveOutputFormat == null) { Utilities.copyTableJobPropertiesToConf(conf.getTableInfo(), hconf); } try { hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, getConf().getTableInfo()); } catch (Throwable t) { throw (t instanceof HiveException) ? (HiveException)t : new HiveException(t); } }