/** * Add a task to execute "ABORT TRANSACTIONS" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { List<Long> txnids = new ArrayList<Long>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTxnsDesc desc = new AbortTxnsDesc(txnids); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); }
private void analyzeRevoke(ASTNode ast) throws SemanticException { Task<?> task = hiveAuthorizationTaskFactory. createRevokeTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
private void analyzeSetShowRole(ASTNode ast) throws SemanticException { switch (ast.getChildCount()) { case 0: ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs())); break; default: throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump()); } }
private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) throws SemanticException { String dropConstraintName = unescapeIdentifier(ast.getChild(0).getText()); AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, dropConstraintName, (ReplicationSpec)null); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); }
private void analyzeGrantRevokeRole(boolean grant, ASTNode ast) throws SemanticException { Task<?> task; if(grant) { task = hiveAuthorizationTaskFactory.createGrantRoleTask(ast, getInputs(), getOutputs()); } else { task = hiveAuthorizationTaskFactory.createRevokeRoleTask(ast, getInputs(), getOutputs()); } if(task != null) { rootTasks.add(task); } }
private void analyzeSetShowRole(ASTNode ast) throws SemanticException { switch (ast.getChildCount()) { case 0: ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs())); break; default: throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump()); } }
/** * Add a task to execute "ABORT TRANSACTIONS" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { List<Long> txnids = new ArrayList<Long>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTxnsDesc desc = new AbortTxnsDesc(txnids); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf)); }
private void analyzeGrant(ASTNode ast) throws SemanticException { Task<?> task = hiveAuthorizationTaskFactory. createGrantTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
private void addAlterDbDesc(AlterDatabaseDesc alterDesc) throws SemanticException { Database database = getDatabase(alterDesc.getDatabaseName()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); }
private void analyzeDropRole(ASTNode ast) throws SemanticException { Task<?> task = hiveAuthorizationTaskFactory. createDropRoleTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
private void analyzeAlterTableDropConstraint(ASTNode ast, String tableName) throws SemanticException { String dropConstraintName = unescapeIdentifier(ast.getChild(0).getText()); AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, dropConstraintName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); }
private void analyzeCreateRole(ASTNode ast) throws SemanticException { Task<?> task = hiveAuthorizationTaskFactory. createCreateRoleTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
/** * Add a task to execute "Kill query" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeKillQuery(ASTNode ast) throws SemanticException { List<String> queryIds = new ArrayList<String>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { queryIds.add(stripQuotes(ast.getChild(i).getText())); } addServiceOutput(); KillQueryDesc desc = new KillQueryDesc(queryIds); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); }
private void analyzeGrantRevokeRole(boolean grant, ASTNode ast) throws SemanticException { Task<? extends Serializable> task; if(grant) { task = hiveAuthorizationTaskFactory.createGrantRoleTask(ast, getInputs(), getOutputs()); } else { task = hiveAuthorizationTaskFactory.createRevokeRoleTask(ast, getInputs(), getOutputs()); } if(task != null) { rootTasks.add(task); } }
private void addAlterDbDesc(AlterDatabaseDesc alterDesc) throws SemanticException { Database database = getDatabase(alterDesc.getDatabaseName()); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc), conf)); }
private void analyzeGrant(ASTNode ast) throws SemanticException { Task<? extends Serializable> task = hiveAuthorizationTaskFactory. createGrantTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
private void analyzeDropPool(ASTNode ast) throws SemanticException { if (ast.getChildCount() != 2) { throw new SemanticException("Invalid syntax for drop pool."); } String rpName = unescapeIdentifier(ast.getChild(0).getText()); String poolPath = poolPath(ast.getChild(1)); DropWMPoolDesc desc = new DropWMPoolDesc(rpName, poolPath); addServiceOutput(); rootTasks.add(TaskFactory.get( new DDLWork(getInputs(), getOutputs(), desc))); }
private void analyzeCreateRole(ASTNode ast) throws SemanticException { Task<? extends Serializable> task = hiveAuthorizationTaskFactory. createCreateRoleTask(ast, getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); } }
private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); Database database = getDatabase(dbName, true); ReadEntity dbReadEntity = new ReadEntity(database); dbReadEntity.noLockNeeded(); inputs.add(dbReadEntity); SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc))); }
private void analyzeShowRoles(ASTNode ast) throws SemanticException { @SuppressWarnings("unchecked") Task<DDLWork> roleDDLTask = (Task<DDLWork>) hiveAuthorizationTaskFactory .createShowRolesTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if (roleDDLTask != null) { rootTasks.add(roleDDLTask); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); } }