public Task<? extends Serializable> getCreateTableTask(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, HiveConf conf) { switch (getDescType()) { case TABLE: return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), conf); case VIEW: return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); } return null; }
/** * Add a task to execute "ABORT TRANSACTIONS" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { List<Long> txnids = new ArrayList<Long>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTxnsDesc desc = new AbortTxnsDesc(txnids); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); }
List<Task<? extends Serializable>> tasks(TaskTracker tracker) { List<Task<? extends Serializable>> tasks = new ArrayList<>(); Iterator<DirCopyWork> itr = work.getPathsToCopyIterator(); while (tracker.canAddMoreTasks() && itr.hasNext()) { DirCopyWork dirCopyWork = itr.next(); Task<DirCopyWork> task = TaskFactory.get(dirCopyWork, conf); tasks.add(task); tracker.addTask(task); LOG.debug("added task for {}", dirCopyWork); } return tasks; }
private void createInsertDesc(Table table, boolean overwrite) { Task<? extends Serializable>[] tasks = new Task[this.rootTasks.size()]; tasks = this.rootTasks.toArray(tasks); PreInsertTableDesc preInsertTableDesc = new PreInsertTableDesc(table, overwrite); InsertTableDesc insertTableDesc = new InsertTableDesc(table, overwrite); this.rootTasks .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), preInsertTableDesc), conf)); TaskFactory .getAndMakeChild(new DDLWork(getInputs(), getOutputs(), insertTableDesc), conf, tasks); }
private void analyzeAlterIndexProps(ASTNode ast) throws SemanticException { String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String indexName = unescapeIdentifier(ast.getChild(1).getText()); HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2)) .getChild(0)); AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.ADDPROPS); alterIdxDesc.setProps(mapProp); alterIdxDesc.setIndexName(indexName); alterIdxDesc.setBaseTableName(getDotName(qualified)); rootTasks.add(TaskFactory.get(new DDLWork(alterIdxDesc), conf)); }
@Override public Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { String roleName; if (ast.getChildCount() == 1) { roleName = ast.getChild(0).getText(); } else { // the parser should not allow this throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS"); } RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleOperation.SHOW_ROLE_PRINCIPALS, null); roleDDLDesc.setResFile(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc)); }
@Override public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.CREATE_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); } @Override
private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) throws SemanticException { String dropConstraintName = unescapeIdentifier(ast.getChild(0).getText()); AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, dropConstraintName, (ReplicationSpec)null); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); }
@SuppressWarnings("unchecked") private void setStatsForNonNativeTable(String dbName, String tableName) throws SemanticException { String qTableName = DDLSemanticAnalyzer.getDotName(new String[] { dbName, tableName }); AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, null, false); HashMap<String, String> mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); alterTblDesc.setOldName(qTableName); alterTblDesc.setProps(mapProp); alterTblDesc.setDropIfExists(true); this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); }
private void createEndReplLogTask(Context context, Scope scope, ReplLogger replLogger) throws SemanticException { Database dbInMetadata = work.databaseEvent(context.hiveConf).dbInMetadata(work.dbNameToLoadIn); ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, dbInMetadata.getParameters()); Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork); if (scope.rootTasks.isEmpty()) { scope.rootTasks.add(replLogTask); } else { DAGTraversal.traverse(scope.rootTasks, new AddDependencyToLeaves(Collections.singletonList(replLogTask))); } }
@Override public Task<? extends Serializable> createDropRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.DROP_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); } @Override
private static Task<? extends Serializable> alterDbTask(String dbName, Map<String, String> props, HiveConf hiveConf) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, props, null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, hiveConf); }
/** * Add a task to execute "ABORT TRANSACTIONS" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { List<Long> txnids = new ArrayList<Long>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTxnsDesc desc = new AbortTxnsDesc(txnids); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf)); }
@SuppressWarnings("unchecked") private void setStatsForNonNativeTable(Table tab) throws SemanticException { String tableName = DDLSemanticAnalyzer.getDotName(new String[] { tab.getDbName(), tab.getTableName() }); AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, null, false); HashMap<String, String> mapProp = new HashMap<>(); mapProp.put(StatsSetupConst.COLUMN_STATS_ACCURATE, null); alterTblDesc.setOldName(tableName); alterTblDesc.setProps(mapProp); alterTblDesc.setDropIfExists(true); this.rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); }
@Override public void analyzeInternal(ASTNode ast) throws SemanticException { if (ast.getType() == HiveParser.TOK_CREATEFUNCTION) { analyzeCreateFunction(ast); } else if (ast.getType() == HiveParser.TOK_DROPFUNCTION) { analyzeDropFunction(ast); } else if (ast.getType() == HiveParser.TOK_RELOADFUNCTION) { rootTasks.add(TaskFactory.get(new FunctionWork(new ReloadFunctionDesc()))); } LOG.info("analyze done"); }
@Override public Task<? extends Serializable> createDropRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.DROP_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc), conf); } @Override
@Override public Task<? extends Serializable> createShowCurrentRoleTask( HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, Path resFile) throws SemanticException { RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE); ddlDesc.setResFile(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc)); }
private void addAlterDbDesc(AlterDatabaseDesc alterDesc) throws SemanticException { Database database = getDatabase(alterDesc.getDatabaseName()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); }
@Override public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.CREATE_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc), conf); } @Override