@Override public List<Task<? extends Serializable>> handle(Context context) throws SemanticException { AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload()); String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName; String actualTblName = context.isTableNameEmpty() ? msg.getTable() : context.tableName; TruncateTableDesc truncateTableDesc = new TruncateTableDesc( actualDbName + "." + actualTblName, null, context.eventOnlyReplicationSpec()); truncateTableDesc.setWriteId(msg.getWriteId()); Task<DDLWork> truncateTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); context.log.debug("Added truncate tbl task : {}:{}:{}", truncateTableTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId()); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, null); try { return ReplUtils.addOpenTxnTaskForMigration(actualDbName, actualTblName, context.hiveConf, updatedMetadata, truncateTableTask, msg.getTableObjBefore()); } catch (Exception e) { throw new SemanticException(e.getMessage()); } } }
private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException { if (truncateTableDesc.getColumnIndexes() != null) { ColumnTruncateWork truncateWork = new ColumnTruncateWork( truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(), truncateTableDesc.getOutputDir()); truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); String tableName = truncateTableDesc.getTableName(); Map<String, String> partSpec = truncateTableDesc.getPartSpec(); ReplicationSpec replicationSpec = truncateTableDesc.getReplicationSpec(); if (!allowOperationInReplicationScope(db, tableName, partSpec, replicationSpec)) { replicationSpec != null && replicationSpec.isInReplicationScope() ? truncateTableDesc.getWriteId() : 0L); } catch (Exception e) { throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException { if (truncateTableDesc.getColumnIndexes() != null) { ColumnTruncateWork truncateWork = new ColumnTruncateWork( truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(), truncateTableDesc.getOutputDir()); truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); String tableName = truncateTableDesc.getTableName(); Map<String, String> partSpec = truncateTableDesc.getPartSpec();
TruncateTableDesc truncateTblDesc = new TruncateTableDesc(tableName, partSpec, null, table); if (truncateTblDesc.mayNeedWriteId()) { setAcidDdlDesc(truncateTblDesc); truncateTblDesc.setColumnIndexes(new ArrayList<Integer>(columnIndexes)); truncateTblDesc.setInputDir(oldTblPartLoc); truncateTblDesc.setLbCtx(lbCtx); truncateTblDesc.setOutputDir(queryTmpdir); LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc, partSpec == null ? new HashMap<>() : partSpec);
TruncateTableDesc truncateTblDesc = new TruncateTableDesc(tableName, partSpec); truncateTblDesc.setColumnIndexes(new ArrayList<Integer>(columnIndexes)); truncateTblDesc.setInputDir(oldTblPartLoc); truncateTblDesc.setLbCtx(lbCtx); truncateTblDesc.setOutputDir(queryTmpdir); LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc, partSpec == null ? new HashMap<String, String>() : partSpec);
private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException { if (truncateTableDesc.getColumnIndexes() != null) { ColumnTruncateWork truncateWork = new ColumnTruncateWork( truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(), truncateTableDesc.getOutputDir()); truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); String tableName = truncateTableDesc.getTableName(); Map<String, String> partSpec = truncateTableDesc.getPartSpec();
TruncateTableDesc truncateTblDesc = new TruncateTableDesc(tableName, partSpec); truncateTblDesc.setColumnIndexes(new ArrayList<Integer>(columnIndexes)); truncateTblDesc.setInputDir(oldTblPartLoc); truncateTblDesc.setLbCtx(lbCtx); truncateTblDesc.setOutputDir(queryTmpdir); LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc, partSpec == null ? new HashMap<String, String>() : partSpec);
TruncateTableDesc truncateTableDesc = new TruncateTableDesc( actualDbName + "." + actualTblName, partSpec, context.eventOnlyReplicationSpec()); truncateTableDesc.setWriteId(msg.getWriteId()); Task<DDLWork> truncatePtnTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, truncateTableDesc), context.hiveConf); context.log.debug("Added truncate ptn task : {}:{}:{}", truncatePtnTask.getId(), truncateTableDesc.getTableName(), truncateTableDesc.getWriteId()); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, actualTblName, partSpec);