public static String getText(ASTNode tree) { if (tree.getChildCount() == 0) { return tree.getText(); } return getText((ASTNode) tree.getChild(tree.getChildCount() - 1)); }
public static boolean isPartitionLevelStats(ASTNode tree) { boolean isPartitioned = false; ASTNode child = (ASTNode) tree.getChild(0); if (child.getChildCount() > 1) { child = (ASTNode) child.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { isPartitioned = true; } } return isPartitioned; }
@Override public void analyzeInternal(ASTNode ast) throws SemanticException { if (ast.getToken().getType() == HiveParser.TOK_CREATEMACRO) { LOG.debug("Analyzing create macro " + ast.dump()); analyzeCreateMacro(ast); } if (ast.getToken().getType() == HiveParser.TOK_DROPMACRO) { LOG.debug("Analyzing drop macro " + ast.dump()); analyzeDropMacro(ast); } }
@Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { if (ast.getType() == HiveParser.TOK_EXPLAIN) { int childCount = ast.getChildCount(); for (int i = 1; i < childCount; i++) { if (ast.getChild(i).getType() == HiveParser.KW_REOPTIMIZATION) { explainReOptimization = true; ast.deleteChild(i); break; } } if (explainReOptimization && firstExecution()) { Tree execTree = ast.getChild(0); execTree.setParent(ast.getParent()); ast.getParent().setChild(0, execTree); return (ASTNode) execTree; } } return ast; }
public ASTNode depthFirstSearch(ASTNode ast, int token) { searchQueue.clear(); searchQueue.add(ast); while (!searchQueue.isEmpty()) { ASTNode next = searchQueue.poll(); if (next.getType() == token) return next; for (int j = 0; j < next.getChildCount(); ++j) { searchQueue.add((ASTNode) next.getChild(j)); } } return null; }
public static List<PrincipalDesc> analyzePrincipalListDef(ASTNode node) { List<PrincipalDesc> principalList = new ArrayList<PrincipalDesc>(); for (int i = 0; i < node.getChildCount(); i++) { principalList.add(getPrincipalDesc((ASTNode) node.getChild(i))); } return principalList; }
private void visit(ASTNode n) { if (n.getType() == HiveParser.Identifier) { if (visitedNodes.containsKey(n)) { /** * Since we are modifying the stream, it's not idempotent. Ideally, the caller would take * care to only quote Identifiers in each subtree once, but this makes it safe */ return; } visitedNodes.put(n, n); trs.insertBefore(n.getToken(), "`"); trs.insertAfter(n.getToken(), "`"); } if (n.getChildCount() <= 0) { return; } for (Node c : n.getChildren()) { visit((ASTNode)c); } } }
private void addSetRCols(ASTNode node, Set<String> setRCols) { // See if this node is a TOK_TABLE_OR_COL. If so, find the value and put it in the list. If // not, recurse on any children if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode colName = (ASTNode)node.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; setRCols.add(normalizeColName(colName.getText())); } else if (node.getChildren() != null) { for (Node n : node.getChildren()) { addSetRCols((ASTNode)n, setRCols); } } }
private void analyzeSetShowRole(ASTNode ast) throws SemanticException { switch (ast.getChildCount()) { case 0: ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs())); break; default: throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump()); } }
private static void walkASTAndQualifyNames(ASTNode ast, Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, UnparseTranslator unparseTranslator) throws SemanticException { Queue<Node> queue = new LinkedList<>(); queue.add(ast); while (!queue.isEmpty()) { ASTNode astNode = (ASTNode) queue.poll(); if (astNode.getToken().getType() == HiveParser.TOK_TABNAME) { // Check if this is table name is qualified or not String tabIdName = getUnescapedName(astNode).toLowerCase(); // if alias to CTE contains the table name, we do not do the translation because // cte is actually a subquery. if (!cteAlias.contains(tabIdName)) { unparseTranslator.addTableNameTranslation(astNode, SessionState.get().getCurrentDatabase()); } } if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) { for (Node child : astNode.getChildren()) { queue.offer(child); } } } }
private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException { ASTNode parent = (ASTNode) ast.getParent(); ASTNode child = (ASTNode) ast.getChild(0); List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>(); List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>(); if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) { BaseSemanticAnalyzer.processPrimaryKeys(parent, child, primaryKeys); } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) { BaseSemanticAnalyzer.processForeignKeys(parent, child, foreignKeys); } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); }
protected ASTNode findLHSofAssignment(ASTNode assignment) { assert assignment.getToken().getType() == HiveParser.EQUAL : "Expected set assignments to use equals operator but found " + assignment.getName(); ASTNode tableOrColTok = (ASTNode)assignment.getChildren().get(0); assert tableOrColTok.getToken().getType() == HiveParser.TOK_TABLE_OR_COL : "Expected left side of assignment to be table or column"; ASTNode colName = (ASTNode)tableOrColTok.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; return colName; }
private ASTNode findFunctionNode(ASTNode n) { if (n.getType() == HiveParser.TOK_FUNCTION) { if ("internal_interval".equals(n.getChild(0).getText())) { return n; } } ArrayList<Node> children = n.getChildren(); if (children != null) { for (Node c : children) { ASTNode r = findFunctionNode((ASTNode) c); if (r != null) { return r; } } } return null; } }
private Map<ASTNode, String> translateFieldDesc(ASTNode node) { Map<ASTNode, String> map = new HashMap<>(); if (node.getType() == HiveParser.DOT) { for (Node child : node.getChildren()) { map.putAll(translateFieldDesc((ASTNode) child)); } } else if (node.getType() == HiveParser.Identifier) { map.put(node, HiveUtils.unparseIdentifier(node.getText(), conf)); } return map; }
private void parseStreamTables(QBJoinTree joinTree, QB qb) { List<String> streamAliases = joinTree.getStreamAliases(); for (Node hintNode : qb.getParseInfo().getHints().getChildren()) { ASTNode hint = (ASTNode) hintNode; if (hint.getChild(0).getType() == HintParser.TOK_STREAMTABLE) { for (int i = 0; i < hint.getChild(1).getChildCount(); i++) { if (streamAliases == null) { streamAliases = new ArrayList<String>(); } streamAliases.add(hint.getChild(1).getChild(i).getText()); } } } joinTree.setStreamAliases(streamAliases); }
private List<ASTNode> doPhase1GetDistinctFuncExprs( HashMap<String, ASTNode> aggregationTrees) throws SemanticException { List<ASTNode> exprs = new ArrayList<ASTNode>(); for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) { ASTNode value = entry.getValue(); assert (value != null); if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) { exprs.add(value); } } return exprs; }
public String getAstStringTree() { if (astStringTree == null) { astStringTree = astTree.dump(); } return astStringTree; }