public Table toTable(HiveConf conf) throws HiveException { String databaseName = getDatabaseName(); String tableName = getTableName(); if (getTblProps() != null) { tbl.getTTable().getParameters().putAll(getTblProps()); if (getPartCols() != null) { tbl.setPartCols(getPartCols()); if (getNumBuckets() != -1) { tbl.setNumBuckets(getNumBuckets()); if (getStorageHandler() != null) { tbl.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, getStorageHandler()); if (getSerName() == null) { if (storageHandler == null) { LOG.info("Default to LazySimpleSerDe for table " + tableName); DDLTask.validateSerDe(getSerName(), conf); tbl.setSerializationLib(getSerName()); if (getFieldDelim() != null) { tbl.setSerdeParam(serdeConstants.FIELD_DELIM, getFieldDelim()); tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, getFieldDelim()); if (getFieldEscape() != null) {
Map<String, String> tblProps = desc.getTblProps(); if (tblProps == null) { String storageHandler = desc.getStorageHandler(); if (StringUtils.isEmpty(storageHandler)) { } else { HiveStorageHandler storageHandlerInst = HCatUtil .getStorageHandler(context.getConf(), desc.getStorageHandler(), desc.getSerName(), desc.getInputFormat(), desc.getOutputFormat()); Table table = context.getHive().newTable(desc.getTableName()); if (desc.getLocation() != null) { table.setDataLocation(new Path(desc.getLocation())); if (desc.getStorageHandler() != null) { table.setProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, desc.getStorageHandler()); for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) { table.setSerdeParam(prop.getKey(), prop.getValue()); desc.setTblProps(tblProps); context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
public LoadFileDesc(final CreateTableDesc createTableDesc, final CreateViewDesc createViewDesc, final Path sourcePath, final Path targetDir, final boolean isDfsDir, final String columns, final String columnTypes) { this(sourcePath, targetDir, isDfsDir, columns, columnTypes); if (createTableDesc != null && createTableDesc.getDatabaseName() != null && createTableDesc.getTableName() != null) { destinationCreateTable = (createTableDesc.getTableName().contains(".") ? "" : createTableDesc .getDatabaseName() + ".") + createTableDesc.getTableName(); } else if (createViewDesc != null) { // The work is already done in analyzeCreateView to assure that the view name is fully // qualified. destinationCreateTable = createViewDesc.getViewName(); } }
if (directoryDesc.getFieldDelim() != null) { properties.setProperty( serdeConstants.FIELD_DELIM, directoryDesc.getFieldDelim()); properties.setProperty( serdeConstants.SERIALIZATION_FORMAT, directoryDesc.getFieldDelim()); if (directoryDesc.getLineDelim() != null) { properties.setProperty( serdeConstants.LINE_DELIM, directoryDesc.getLineDelim()); if (directoryDesc.getCollItemDelim() != null) { properties.setProperty( serdeConstants.COLLECTION_DELIM, directoryDesc.getCollItemDelim()); if (directoryDesc.getMapKeyDelim() != null) { properties.setProperty( serdeConstants.MAPKEY_DELIM, directoryDesc.getMapKeyDelim()); if (directoryDesc.getFieldEscape() !=null) { properties.setProperty( serdeConstants.ESCAPE_CHAR, directoryDesc.getFieldEscape()); if (directoryDesc.getSerName() != null) { properties.setProperty( serdeConstants.SERIALIZATION_LIB, directoryDesc.getSerName()); if (directoryDesc.getSerdeProps() != null) { properties.putAll(directoryDesc.getSerdeProps());
Table tbl = db.newTable(crtTbl.getTableName()); if (crtTbl.getTblProps() != null) { tbl.getTTable().getParameters().putAll(crtTbl.getTblProps()); if (crtTbl.getPartCols() != null) { tbl.setPartCols(crtTbl.getPartCols()); if (crtTbl.getNumBuckets() != -1) { tbl.setNumBuckets(crtTbl.getNumBuckets()); if (crtTbl.getStorageHandler() != null) { tbl.setProperty( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE, if (crtTbl.getSerName() == null) { if (storageHandler == null) { LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName()); tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { String serDeClassName = storageHandler.getSerDeClass().getName(); LOG.info("Use StorageHandler-supplied " + serDeClassName + " for table " + crtTbl.getTableName()); tbl.setSerializationLib(serDeClassName); validateSerDe(crtTbl.getSerName()); tbl.setSerializationLib(crtTbl.getSerName()); if (crtTbl.getFieldDelim() != null) {
if (qb.getTableDesc() != null && qb.getTableDesc().getLocation() != null) { location = new Path(qb.getTableDesc().getLocation()); } else { String destTableDb = qb.getTableDesc() != null ? qb.getTableDesc().getDatabaseName() : null; if (destTableDb == null) { destTableDb = names[0]; CreateTableDesc tblDesc = qb.getTableDesc(); if (tblDesc != null && tblDesc.isTemporary() && AcidUtils.isInsertOnlyTable(tblDesc.getTblProps(), true)) { fname = FileUtils.makeQualified(location, conf).toString(); } else { CreateTableDesc directoryDesc = new CreateTableDesc(); boolean directoryDescIsSet = false; int numCh = ast.getChildCount(); if (child != null) { if (storageFormat.fillStorageFormat(child)) { directoryDesc.setInputFormat(storageFormat.getInputFormat()); directoryDesc.setOutputFormat(storageFormat.getOutputFormat()); directoryDesc.setSerName(storageFormat.getSerde()); directoryDescIsSet = true; continue; case HiveParser.TOK_TABLEROWFORMAT: rowFormatParams.analyzeRowFormat(child);
if ((this.getCols() == null) || (this.getCols().size() == 0)) { StringUtils.isEmpty(getStorageHandler())) { throw new SemanticException(ErrorMsg.INVALID_TBL_DDL_SERDE.getMsg()); if (this.getStorageHandler() == null) { try { Class<?> origin = Class.forName(this.getOutputFormat(), true, Utilities.getSessionSpecifiedClassLoader()); Class<? extends OutputFormat> replaced = HiveFileFormatUtils List<String> colNames = ParseUtils.validateColumnNameUniqueness(this.getCols()); if (this.getBucketCols() != null) { Iterator<String> bucketCols = this.getBucketCols().iterator(); while (bucketCols.hasNext()) { String bucketCol = bucketCols.next(); if (this.getSortCols() != null) { Iterator<Order> sortCols = this.getSortCols().iterator(); while (sortCols.hasNext()) { String sortCol = sortCols.next().getCol(); if (this.getPartCols() != null) { Iterator<FieldSchema> partColsIter = this.getPartCols().iterator(); while (partColsIter.hasNext()) {
if ( (tableDesc.isExternal()) // IMPORT statement speicified EXTERNAL && (!table.isPartitioned() || !table.getTableType().equals(TableType.EXTERNAL_TABLE)) ){ if ((tableDesc.getLocation() != null) && (!table.isPartitioned()) && (!table.getDataLocation().equals(new Path(tableDesc.getLocation()))) ){ throw new SemanticException( ErrorMsg.INCOMPATIBLE_SCHEMA.getMsg(" Location does not match")); List<FieldSchema> importedTableCols = tableDesc.getCols(); if (!EximUtil.schemaCompare(importedTableCols, existingTableCols)) { throw new SemanticException( List<FieldSchema> importedTablePartCols = tableDesc.getPartCols(); if (!EximUtil.schemaCompare(importedTablePartCols, existingTablePartCols)) { throw new SemanticException( Map<String, String> importedTableParams = tableDesc.getTblProps(); String error = checkParams(existingTableParams, importedTableParams, new String[] { "howl.isd", String importedifc = tableDesc.getInputFormat(); String existingofc = table.getOutputFormatClass().getName(); String importedofc = tableDesc.getOutputFormat(); String importedSerde = tableDesc.getSerName(); if (!existingSerde.equals(importedSerde)) { throw new SemanticException(
Table tbl = crtTbl.toTable(conf); List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys(); List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys(); List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints(); List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints(); List<SQLDefaultConstraint> defaultConstraints = crtTbl.getDefaultConstraints(); List<SQLCheckConstraint> checkConstraints = crtTbl.getCheckConstraints(); LOG.debug("creating table {} on {}",tbl.getFullyQualifiedName(),tbl.getDataLocation()); if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())){ if (crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable.getParameters())){ crtTbl.setReplaceMode(true); // we replace existing table. } else { LOG.debug("DDLTask: Create Table is skipped as table {} is newer than update", crtTbl.getTableName()); return 0; // no replacement, the existing table state is newer than our update. if (crtTbl.getReplaceMode()) { ReplicationSpec replicationSpec = crtTbl.getReplicationSpec(); long writeId = 0; EnvironmentContext environmentContext = null; writeId = crtTbl.getReplWriteId(); (checkConstraints!= null && checkConstraints.size() > 0) || defaultConstraints != null && defaultConstraints.size() > 0) { db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
if (crtTblDesc.getSerName() != null) { Class c = Class.forName(crtTblDesc.getSerName()); serdeClass = c; if (crtTblDesc.getFieldDelim() != null) { separatorCode = crtTblDesc.getFieldDelim(); if (crtTblDesc.getCollItemDelim() != null) { properties.setProperty(Constants.COLLECTION_DELIM, crtTblDesc .getCollItemDelim()); if (crtTblDesc.getMapKeyDelim() != null) { properties.setProperty(Constants.MAPKEY_DELIM, crtTblDesc .getMapKeyDelim()); if (crtTblDesc.getFieldEscape() != null) { properties.setProperty(Constants.ESCAPE_CHAR, crtTblDesc .getFieldEscape()); if (crtTblDesc.getLineDelim() != null) { properties.setProperty(Constants.LINE_DELIM, crtTblDesc.getLineDelim()); Class c1 = Class.forName(crtTblDesc.getInputFormat()); Class c2 = Class.forName(crtTblDesc.getOutputFormat()); Class<? extends InputFormat> in_class = c1; Class<? extends HiveOutputFormat> out_class = c2;
if ((crtTblDesc.getCols() == null) || (crtTblDesc.getCols().size() == 0)) { if (StringUtils.isEmpty(crtTblDesc.getSerName()) || !SerDeUtils.shouldGetColsFromSerDe(crtTblDesc.getSerName())) { throw new SemanticException(ErrorMsg.INVALID_TBL_DDL_SERDE.getMsg()); if (crtTblDesc.getStorageHandler() == null) { try { Class<?> origin = Class.forName(crtTblDesc.getOutputFormat(), true, JavaUtils.getClassLoader()); Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils List<String> colNames = validateColumnNameUniqueness(crtTblDesc.getCols()); if (crtTblDesc.getBucketCols() != null) { Iterator<String> bucketCols = crtTblDesc.getBucketCols().iterator(); while (bucketCols.hasNext()) { String bucketCol = bucketCols.next(); if (crtTblDesc.getSortCols() != null) { Iterator<Order> sortCols = crtTblDesc.getSortCols().iterator(); while (sortCols.hasNext()) { String sortCol = sortCols.next().getCol(); if (crtTblDesc.getPartCols() != null) { Iterator<FieldSchema> partColsIter = crtTblDesc.getPartCols().iterator();
fieldSchemas = new ArrayList<>(); partitionColumns = new ArrayList<>(); partitionColumnNames = tblDesc.getPartColNames(); fileSinkColInfos = new ArrayList<>(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); if (AcidUtils.isInsertOnlyTable(tblDesc.getTblProps(), true)) { isMmTable = isMmCtas = true; try { writeId = txnMgr.getTableWriteId(tblDesc.getDatabaseName(), tblDesc.getTableName()); tblDesc.setInitialMmWriteId(writeId); tblDesc.setCols(new ArrayList<>(fieldSchemas)); tblDesc.setPartCols(new ArrayList<>(partitionColumns)); } else if (viewDesc != null) { viewDesc.setSchema(new ArrayList<>(fieldSchemas)); destinationTable = tblDesc != null ? tblDesc.toTable(conf) : viewDesc != null ? viewDesc.toTable(conf) : null; } catch (HiveException e) { throw new SemanticException(e); if (isMmCtas) { tableDesc.setWriter(fileSinkDesc);
LOG.debug("table " + tblDesc.getTableName() + " does not exist"); table = new Table(tblDesc.getDatabaseName(), tblDesc.getTableName()); Database parentDb = db.getDatabase(tblDesc.getDatabaseName()); if (tblDesc.isExternal() && (tblDesc.getLocation() == null)) { LOG.debug("Importing in place, no emptiness check, no copying/loading"); Path dataPath = new Path(fromURI.toString(), "data"); tblDesc.setLocation(dataPath.toString()); } else { Path tablePath = null; if (tblDesc.getLocation() != null) { tablePath = new Path(tblDesc.getLocation()); } else { tablePath = wh.getTablePath(parentDb, tblDesc.getTableName());
if (tblDesc != null) { field_schemas = new ArrayList<FieldSchema>(); destTableIsTemporary = tblDesc.isTemporary(); destTableIsMaterialization = tblDesc.isMaterialization(); } else if (viewDesc != null) { field_schemas = new ArrayList<FieldSchema>(); tblDesc.setCols(new ArrayList<FieldSchema>(field_schemas)); } else if (viewDesc != null) { viewDesc.setSchema(new ArrayList<FieldSchema>(field_schemas)); String tName = Utilities.getDbTableName(tableDesc.getTableName())[1]; try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), tName); } catch (MetaException|HiveException e) { throw new SemanticException(e);
pCtx.getCreateTable().getLocation() : pCtx.getCreateViewDesc().getLocation(); if (loc == null) { String protoName = null; if (pCtx.getQueryProperties().isCTAS()) { protoName = pCtx.getCreateTable().getTableName(); } else if (pCtx.getQueryProperties().isMaterializedView()) { protoName = pCtx.getCreateViewDesc().getViewName(); if (pCtx.getQueryProperties().isCTAS() && !pCtx.getCreateTable().isMaterialization()) { crtTblDesc.validate(conf);
private void handleLineage(LoadTableDesc ltd, Operator output) throws SemanticException { if (ltd != null) { queryState.getLineageState() .mapDirToOp(ltd.getSourcePath(), output); } else if ( queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) { Path tlocation = null; String tName = Utilities.getDbTableName(tableDesc.getTableName())[1]; try { Warehouse wh = new Warehouse(conf); tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), tName, tableDesc.isExternal()); } catch (MetaException|HiveException e) { throw new SemanticException(e); } queryState.getLineageState() .mapDirToOp(tlocation, output); } }
public static boolean isTransactionalTable(CreateTableDesc table) { if (table == null || table.getTblProps() == null) { return false; } return isTransactionalTable(table.getTblProps()); }
public void setTimePartCols(List<String> timePartCols) { this.timePartCols = timePartCols; if (super.getTblProps() == null) { super.setTblProps(new HashMap<String, String>()); } super.getTblProps().put(MetastoreConstants.TIME_PART_COLUMNS, StringUtils.join(this.timePartCols, ',')); }
/** * Helper method to set location properly in partSpec */ private void fixLocationInPartSpec( FileSystem fs, CreateTableDesc tblDesc, Table table, Warehouse wh, ReplicationSpec replicationSpec, AddPartitionDesc.OnePartitionDesc partSpec) throws MetaException, HiveException, IOException { Path tgtPath = null; if (tblDesc.getLocation() == null) { if (table.getDataLocation() != null) { tgtPath = new Path(table.getDataLocation().toString(), Warehouse.makePartPath(partSpec.getPartSpec())); } else { Database parentDb = db.getDatabase(tblDesc.getDatabaseName()); tgtPath = new Path( wh.getTablePath( parentDb, tblDesc.getTableName()), Warehouse.makePartPath(partSpec.getPartSpec())); } } else { tgtPath = new Path(tblDesc.getLocation(), Warehouse.makePartPath(partSpec.getPartSpec())); } checkTargetLocationEmpty(fs, tgtPath, replicationSpec); partSpec.setLocation(tgtPath.toString()); }
String loc = pCtx.getCreateTable().getLocation(); if (loc == null) { try { String[] names = Utilities.getDbTableName( pCtx.getCreateTable().getTableName()); if (!db.databaseExists(names[0])) { throw new SemanticException("ERROR: The database " + names[0] crtTblDesc.validate(conf);