@Override public String getFullTableName() { return getOldName(); }
private int dropConstraint(Hive db, AlterTableDesc alterTbl) throws SemanticException, HiveException { try { db.dropConstraint(Utilities.getDatabaseName(alterTbl.getOldName()), Utilities.getTableName(alterTbl.getOldName()), alterTbl.getConstraintName()); } catch (NoSuchObjectException e) { throw new HiveException(e); } return 0; }
private int dropConstraint(Hive db, AlterTableDesc alterTbl) throws SemanticException, HiveException { try { db.dropConstraint(Utilities.getDatabaseName(alterTbl.getOldName()), Utilities.getTableName(alterTbl.getOldName()), alterTbl.getConstraintName()); } catch (NoSuchObjectException e) { throw new HiveException(e); } return 0; }
Table tbl = db.getTable(alterTbl.getOldName()); StringUtils.join(alterTbl.getPartSpec().keySet(), ',') + " for table " + alterTbl.getOldName()); db.alterTable(alterTbl.getOldName(), tbl, alterTbl.getIsCascade(), alterTbl.getEnvironmentContext()); } else { db.alterPartitions(tbl.getTableName(), allPartitions, alterTbl.getEnvironmentContext());
String names[] = Utilities.getDbTableName(alterTbl.getOldName()); if (Utils.isBootstrapDumpInProgress(db, names[0])) { LOG.error("DDLTask: Rename Table not allowed as bootstrap dump in progress"); Table tbl = db.getTable(alterTbl.getOldName()); StringUtils.join(alterTbl.getPartSpec().keySet(), ',') + " for table " + alterTbl.getOldName()); db.alterTable(alterTbl.getOldName(), tbl, alterTbl.getIsCascade(), environmentContext, true); } else {
isSchemaEvolutionEnabled(tbl); if (isOrcSchemaEvolution && (first || (afterCol != null && !afterCol.trim().isEmpty()))) { throw new HiveException(ErrorMsg.CANNOT_REORDER_COLUMNS, alterTbl.getOldName()); && !serializationLib.equals(ParquetHiveSerDe.class.getName()) && !serializationLib.equals(OrcSerde.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, alterTbl.getOldName()); LOG.warn("Cannot drop columns from a partitioned parquet table without the CASCADE option"); throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, alterTbl.getOldName()); !serdeName.equalsIgnoreCase(OrcSerde.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_CHANGE_SERDE, OrcSerde.class.getSimpleName(), alterTbl.getOldName()); sd.getInputFormat().equals(OrcInputFormat.class.getName()) && !alterTbl.getInputFormat().equals(OrcInputFormat.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_CHANGE_FILEFORMAT, "ORC", alterTbl.getOldName());
isSchemaEvolutionEnabled(tbl); if (isOrcSchemaEvolution && (first || (afterCol != null && !afterCol.trim().isEmpty()))) { throw new HiveException(ErrorMsg.CANNOT_REORDER_COLUMNS, alterTbl.getOldName()); && !serializationLib.equals(ParquetHiveSerDe.class.getName()) && !serializationLib.equals(OrcSerde.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName()); throw new HiveException(ErrorMsg.REPLACE_CANNOT_DROP_COLUMNS, alterTbl.getOldName()); !serdeName.equalsIgnoreCase(OrcSerde.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_CHANGE_SERDE, OrcSerde.class.getSimpleName(), alterTbl.getOldName()); sd.getInputFormat().equals(OrcInputFormat.class.getName()) && !alterTbl.getInputFormat().equals(OrcInputFormat.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_CHANGE_FILEFORMAT, "ORC", alterTbl.getOldName());
if (alterTable != null) { Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
if (!allowOperationInReplicationScope(db, alterTbl.getOldName(), null, alterTbl.getReplicationSpec())) { LOG.debug("DDLTask: Alter Table is skipped as table {} is newer than update", alterTbl.getOldName()); return 0;
Table tbl = db.getTable(alterTbl.getOldName()); StringUtils.join(alterTbl.getPartSpec().keySet(), ',') + " for table " + alterTbl.getOldName()); db.alterTable(alterTbl.getOldName(), tbl, alterTbl.getIsCascade()); } else { db.alterPartitions(tbl.getTableName(), allPartitions);
&& !serializationLib.equals(DynamicSerDe.class.getName()) && !serializationLib.equals(ParquetHiveSerDe.class.getName())) { throw new HiveException(ErrorMsg.CANNOT_REPLACE_COLUMNS, alterTbl.getOldName());
Table tbl = db.getTable(alterTbl.getOldName()); db.alterTable(alterTbl.getOldName(), tbl); } else { db.alterPartition(tbl.getTableName(), part);
if (alterTable != null) { Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
if (alterTable != null) { Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
if (alterTable != null) { Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);
if (alterTable != null) { Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getOldName())[1], false);