private String buildTriggerExpression(ASTNode ast) throws SemanticException { if (ast.getType() != HiveParser.TOK_TRIGGER_EXPRESSION || ast.getChildCount() == 0) { throw new SemanticException("Invalid trigger expression."); } StringBuilder builder = new StringBuilder(); for (int i = 0; i < ast.getChildCount(); ++i) { builder.append(ast.getChild(i).getText()); // Don't strip quotes. builder.append(' '); } builder.deleteCharAt(builder.length() - 1); return builder.toString(); }
private void init(ASTNode node){ // -> ^(TOK_REPLICATION $replId $isMetadataOnly) isInReplicationScope = true; eventId = PlanUtils.stripQuotes(node.getChild(0).getText()); if (node.getChildCount() > 1){ if (node.getChild(1).getText().toLowerCase().equals("metadata")) { isMetadataOnly= true; } } }
private void analyzeDropMapping(ASTNode ast) throws SemanticException { if (ast.getChildCount() != 3) { throw new SemanticException("Invalid syntax for drop mapping."); } String rpName = unescapeIdentifier(ast.getChild(0).getText()); String entityType = ast.getChild(1).getText(); String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText()); DropWMMappingDesc desc = new DropWMMappingDesc(new WMMapping(rpName, entityType, entityName)); addServiceOutput(); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); }
/** * Add a task to execute "ABORT TRANSACTIONS" * @param ast The parsed command tree * @throws SemanticException Parsing failed */ private void analyzeAbortTxns(ASTNode ast) throws SemanticException { List<Long> txnids = new ArrayList<Long>(); int numChildren = ast.getChildCount(); for (int i = 0; i < numChildren; i++) { txnids.add(Long.parseLong(ast.getChild(i).getText())); } AbortTxnsDesc desc = new AbortTxnsDesc(txnids); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); }
public static VarcharTypeInfo getVarcharTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() != 1) { throw new SemanticException("Bad params for type varchar"); } String lengthStr = node.getChild(0).getText(); return TypeInfoFactory.getVarcharTypeInfo(Integer.parseInt(lengthStr)); }
private void initReplLoad(ASTNode ast) { int numChildren = ast.getChildCount(); path = PlanUtils.stripQuotes(ast.getChild(0).getText()); if (numChildren > 1) { dbNameOrPattern = PlanUtils.stripQuotes(ast.getChild(1).getText()); } if (numChildren > 2) { tblNameOrPattern = PlanUtils.stripQuotes(ast.getChild(2).getText()); } }
private void analyzeDropTrigger(ASTNode ast) throws SemanticException { if (ast.getChildCount() != 2) { throw new SemanticException("Invalid syntax for drop trigger."); } String rpName = unescapeIdentifier(ast.getChild(0).getText()); String triggerName = unescapeIdentifier(ast.getChild(1).getText()); DropWMTriggerDesc desc = new DropWMTriggerDesc(rpName, triggerName); addServiceOutput(); rootTasks.add(TaskFactory.get( new DDLWork(getInputs(), getOutputs(), desc))); }
public static CharTypeInfo getCharTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() != 1) { throw new SemanticException("Bad params for type char"); } String lengthStr = node.getChild(0).getText(); return TypeInfoFactory.getCharTypeInfo(Integer.parseInt(lengthStr)); }
private void initReplStatus(ASTNode ast) { int numChildren = ast.getChildCount(); dbNameOrPattern = PlanUtils.stripQuotes(ast.getChild(0).getText()); if (numChildren > 1) { tblNameOrPattern = PlanUtils.stripQuotes(ast.getChild(1).getText()); } }
public static PrincipalDesc getPrincipalDesc(ASTNode principal) { PrincipalType type = getPrincipalType(principal); if (type != null) { String text = principal.getChild(0).getText(); String principalName = BaseSemanticAnalyzer.unescapeIdentifier(text); return new PrincipalDesc(principalName, type); } return null; }
@Override public Task<? extends Serializable> createDropRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.DROP_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); } @Override
public static DecimalTypeInfo getDecimalTypeTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() > 2) { throw new SemanticException("Bad params for type decimal"); } int precision = HiveDecimal.USER_DEFAULT_PRECISION; int scale = HiveDecimal.USER_DEFAULT_SCALE; if (node.getChildCount() >= 1) { String precStr = node.getChild(0).getText(); precision = Integer.parseInt(precStr); } if (node.getChildCount() == 2) { String scaleStr = node.getChild(1).getText(); scale = Integer.parseInt(scaleStr); } return TypeInfoFactory.getDecimalTypeInfo(precision, scale); }
public static Map.Entry<String,String> getDbTableNamePair(ASTNode tableNameNode) { assert(tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME); if (tableNameNode.getChildCount() == 2) { String dbName = unescapeIdentifier(tableNameNode.getChild(0).getText()); String tableName = unescapeIdentifier(tableNameNode.getChild(1).getText()); return Pair.of(dbName, tableName); } else { String tableName = unescapeIdentifier(tableNameNode.getChild(0).getText()); return Pair.of(null,tableName); } }
private void analyzeAlterDatabaseLocation(ASTNode ast) throws SemanticException { String dbName = getUnescapedName((ASTNode) ast.getChild(0)); String newLocation = unescapeSQLString(ast.getChild(1).getText()); addLocationToOutputs(newLocation); AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, newLocation); addAlterDbDesc(alterDesc); }
@Override public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) { String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()); RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleDDLDesc.RoleOperation.CREATE_ROLE, null); return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc)); } @Override
public static VarcharTypeInfo getVarcharTypeInfo(ASTNode node) throws SemanticException { if (node.getChildCount() != 1) { throw new SemanticException("Bad params for type varchar"); } String lengthStr = node.getChild(0).getText(); return TypeInfoFactory.getVarcharTypeInfo(Integer.parseInt(lengthStr)); }
private void doPhase1GetColumnAliasesFromSelect( ASTNode selectExpr, QBParseInfo qbp) { for (int i = 0; i < selectExpr.getChildCount(); ++i) { ASTNode selExpr = (ASTNode) selectExpr.getChild(i); if ((selExpr.getToken().getType() == HiveParser.TOK_SELEXPR) && (selExpr.getChildCount() == 2)) { String columnAlias = unescapeIdentifier(selExpr.getChild(1).getText()); qbp.setExprToColumnAlias((ASTNode) selExpr.getChild(0), columnAlias); } } }
@Override public Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException { String roleName; if (ast.getChildCount() == 1) { roleName = ast.getChild(0).getText(); } else { // the parser should not allow this throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS"); } RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE, RoleOperation.SHOW_ROLE_PRINCIPALS, null); roleDDLDesc.setResFile(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc)); }