private boolean isNativeTable() { return !conf.getTableInfo().isNonNative(); }
public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { if (hiveOutputFormat == null) { try { createHiveOutputFormat(job); } catch (HiveException ex) { logOutputFormatError(job, ex); throw new IOException(ex); } } if (conf.getTableInfo().isNonNative()) { //check the ouput specs only if it is a storage handler (native tables's outputformats does //not set the job's output properties correctly) try { hiveOutputFormat.checkOutputSpecs(ignored, job); } catch (NoSuchMethodError e) { //For BC, ignore this for now, but leave a log message LOG.warn("HiveOutputFormat should implement checkOutputSpecs() method`"); } } }
public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { if (hiveOutputFormat == null) { try { createHiveOutputFormat(job); } catch (HiveException ex) { logOutputFormatError(job, ex); throw new IOException(ex); } } if (conf.getTableInfo().isNonNative()) { //check the ouput specs only if it is a storage handler (native tables's outputformats does //not set the job's output properties correctly) try { hiveOutputFormat.checkOutputSpecs(ignored, job); } catch (NoSuchMethodError e) { //For BC, ignore this for now, but leave a log message LOG.warn("HiveOutputFormat should implement checkOutputSpecs() method`"); } } }
public FetchOperator(FetchWork work, JobConf job, Operator<?> operator, List<VirtualColumn> vcCols) throws HiveException { this.job = job; this.work = work; this.operator = operator; if (operator instanceof TableScanOperator) { Utilities.addTableSchemaToConf(job, (TableScanOperator) operator); } this.vcCols = vcCols; this.hasVC = vcCols != null && !vcCols.isEmpty(); this.isStatReader = work.getTblDesc() == null; this.isPartitioned = !isStatReader && work.isPartitioned(); this.isNonNativeTable = !isStatReader && work.getTblDesc().isNonNative(); initialize(); }
@SuppressWarnings("rawtypes") private static Path createDummyFileForEmptyTable(JobConf job, MapWork work, Path hiveScratchDir, String alias) throws Exception { TableDesc tableDesc = work.getAliasToPartnInfo().get(alias).getTableDesc(); if (tableDesc.isNonNative()) { // if it does not need native storage, we can't create an empty file for it. return null; } Properties props = tableDesc.getProperties(); HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, tableDesc); Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, false); LOG.info("Changed input file for alias {} to newPath", alias, newPath); // update the work LinkedHashMap<Path, ArrayList<String>> pathToAliases = work.getPathToAliases(); ArrayList<String> newList = new ArrayList<String>(1); newList.add(alias); pathToAliases.put(newPath, newList); work.setPathToAliases(pathToAliases); PartitionDesc pDesc = work.getAliasToPartnInfo().get(alias).clone(); work.addPathToPartitionInfo(newPath, pDesc); return newPath; }
public FetchOperator(FetchWork work, JobConf job, Operator<?> operator, List<VirtualColumn> vcCols) throws HiveException { this.job = job; this.work = work; this.operator = operator; if (operator instanceof TableScanOperator) { Utilities.addTableSchemaToConf(job, (TableScanOperator) operator); } this.vcCols = vcCols; this.hasVC = vcCols != null && !vcCols.isEmpty(); this.isStatReader = work.getTblDesc() == null; this.isPartitioned = !isStatReader && work.isPartitioned(); this.isNonNativeTable = !isStatReader && work.getTblDesc().isNonNative(); initialize(); }
@SuppressWarnings("rawtypes") private static Path createDummyFileForEmptyTable(JobConf job, MapWork work, Path hiveScratchDir, String alias) throws Exception { TableDesc tableDesc = work.getAliasToPartnInfo().get(alias).getTableDesc(); if (tableDesc.isNonNative()) { // if it does not need native storage, we can't create an empty file for it. return null; } Properties props = tableDesc.getProperties(); HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, tableDesc); Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, false); if (LOG.isInfoEnabled()) { LOG.info("Changed input file for alias " + alias + " to " + newPath); } // update the work LinkedHashMap<Path, ArrayList<String>> pathToAliases = work.getPathToAliases(); ArrayList<String> newList = new ArrayList<String>(); newList.add(alias); pathToAliases.put(newPath, newList); work.setPathToAliases(pathToAliases); PartitionDesc pDesc = work.getAliasToPartnInfo().get(alias).clone(); work.addPathToPartitionInfo(newPath, pDesc); return newPath; }
@SuppressWarnings("rawtypes") private static Path createDummyFileForEmptyPartition(Path path, JobConf job, PartitionDesc partDesc, Path hiveScratchDir) throws Exception { String strPath = path.toString(); // The input file does not exist, replace it by a empty file if (partDesc.getTableDesc().isNonNative()) { // if this isn't a hive table we can't create an empty file for it. return path; } Properties props = SerDeUtils.createOverlayedProperties( partDesc.getTableDesc().getProperties(), partDesc.getProperties()); HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, partDesc); boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class; Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, oneRow); LOG.info("Changed input file {} to empty file {} ({})", strPath, newPath, oneRow); return newPath; }
pathToPartitionInfo, path, IOPrepareCache.get().allocatePartitionDescMap()); TableDesc tableDesc = part.getTableDesc(); if ((tableDesc != null) && tableDesc.isNonNative()) { return super.getSplits(job, numSplits);
@SuppressWarnings("rawtypes") private static Path createDummyFileForEmptyPartition(Path path, JobConf job, MapWork work, Path hiveScratchDir) throws Exception { String strPath = path.toString(); // The input file does not exist, replace it by a empty file PartitionDesc partDesc = work.getPathToPartitionInfo().get(path); if (partDesc.getTableDesc().isNonNative()) { // if this isn't a hive table we can't create an empty file for it. return path; } Properties props = SerDeUtils.createOverlayedProperties( partDesc.getTableDesc().getProperties(), partDesc.getProperties()); HiveOutputFormat outFileFormat = HiveFileFormatUtils.getHiveOutputFormat(job, partDesc); boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class; Path newPath = createEmptyFile(hiveScratchDir, outFileFormat, job, props, oneRow); if (LOG.isInfoEnabled()) { LOG.info("Changed input file " + strPath + " to empty file " + newPath + " (" + oneRow + ")"); } // update the work work.addPathToAlias(newPath, work.getPathToAliases().get(path)); work.removePathToAlias(path); work.removePathToPartitionInfo(path); work.addPathToPartitionInfo(newPath, partDesc); return newPath; }
pathToPartitionInfo, path, IOPrepareCache.get().allocatePartitionDescMap()); TableDesc tableDesc = part.getTableDesc(); if ((tableDesc != null) && tableDesc.isNonNative()) { return super.getSplits(job, numSplits);
TableDesc mockTableDesc = mock(TableDesc.class); when(mockTableDesc.isNonNative()).thenReturn(false); when(mockTableDesc.getProperties()).thenReturn(new Properties());
TableDesc mockTableDesc = mock(TableDesc.class); when(mockTableDesc.isNonNative()).thenReturn(false); when(mockTableDesc.getProperties()).thenReturn(new Properties()); when(mockPartitionDesc.getProperties()).thenReturn(new Properties());
if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job); nonNative = part.getTableDesc().isNonNative();
nonNative = part.getTableDesc().isNonNative();
this.hconf = hconf; filesCreated = false; isNativeTable = !conf.getTableInfo().isNonNative(); isTemporary = conf.isTemporary(); multiFileSpray = conf.isMultiFileSpray();
boolean isNonNativeTable = tableDescriptor.isNonNative(); if (!isNonNativeTable) { AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID;
public void initialize(JobConf job) { this.job = job; tblDataDone = false; rowWithPart = new Object[2]; if (work.getTblDesc() != null) { isNativeTable = !work.getTblDesc().isNonNative(); } else { isNativeTable = true; } }
public FetchOperator(FetchWork work, JobConf job, Operator<?> operator, List<VirtualColumn> vcCols) throws HiveException { this.job = job; this.work = work; this.operator = operator; this.vcCols = vcCols; this.hasVC = vcCols != null && !vcCols.isEmpty(); this.isStatReader = work.getTblDesc() == null; this.isPartitioned = !isStatReader && work.isPartitioned(); this.isNonNativeTable = !isStatReader && work.getTblDesc().isNonNative(); initialize(); }
public void checkOutputSpecs(FileSystem ignored, JobConf job) throws IOException { if (hiveOutputFormat == null) { try { createHiveOutputFormat(job); } catch (HiveException ex) { logOutputFormatError(job, ex); throw new IOException(ex); } } if (conf.getTableInfo().isNonNative()) { //check the ouput specs only if it is a storage handler (native tables's outputformats does //not set the job's output properties correctly) try { hiveOutputFormat.checkOutputSpecs(ignored, job); } catch (NoSuchMethodError e) { //For BC, ignore this for now, but leave a log message LOG.warn("HiveOutputFormat should implement checkOutputSpecs() method`"); } } }