public static boolean containsTokenOfType(ASTNode root, PTFUtils.Predicate<ASTNode> predicate) { Queue<ASTNode> queue = new ArrayDeque<ASTNode>(); // BFS queue.add(root); while (!queue.isEmpty()) { ASTNode current = queue.remove(); // If the predicate matches, then return true. // Otherwise visit the next set of nodes that haven't been seen. if (predicate.apply(current)) { return true; } else { // Guard because ASTNode.getChildren.iterator returns null if no children available (bug). if (current.getChildCount() > 0) { for (Node child : current.getChildren()) { queue.add((ASTNode) child); } } } } return false; }
private static boolean getPartExprNodeDesc(ASTNode astNode, HiveConf conf, Map<ASTNode, ExprNodeDesc> astExprNodeMap) throws SemanticException { if (astNode == null) { return true; } else if ((astNode.getChildren() == null) || (astNode.getChildren().size() == 0)) { return astNode.getType() != HiveParser.TOK_PARTVAL; } TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); String defaultPartitionName = HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME); boolean result = true; for (Node childNode : astNode.getChildren()) { ASTNode childASTNode = (ASTNode)childNode; if (childASTNode.getType() != HiveParser.TOK_PARTVAL) { result = getPartExprNodeDesc(childASTNode, conf, astExprNodeMap) && result; } else { boolean isDynamicPart = childASTNode.getChildren().size() <= 1; result = !isDynamicPart && result; if (!isDynamicPart) { ASTNode partVal = (ASTNode)childASTNode.getChildren().get(1); if (!defaultPartitionName.equalsIgnoreCase(unescapeSQLString(partVal.getText()))) { astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0), TypeCheckProcFactory.genExprNode(partVal, typeCheckCtx).get(partVal)); } } } } return result; }
public static boolean containsTokenOfType(ASTNode root, PTFUtils.Predicate<ASTNode> predicate) { Queue<ASTNode> queue = new ArrayDeque<ASTNode>(); // BFS queue.add(root); while (!queue.isEmpty()) { ASTNode current = queue.remove(); // If the predicate matches, then return true. // Otherwise visit the next set of nodes that haven't been seen. if (predicate.apply(current)) { return true; } else { // Guard because ASTNode.getChildren.iterator returns null if no children available (bug). if (current.getChildCount() > 0) { for (Node child : current.getChildren()) { queue.add((ASTNode) child); } } } } return false; }
private String replaceDefaultKeywordForMerge(String valueClause, Table table, ASTNode columnListNode) throws SemanticException { if (!valueClause.toLowerCase().contains("`default`")) { return valueClause; } Map<String, String> colNameToDefaultConstraint = getColNameToDefaultValueMap(table); String[] values = valueClause.trim().split(","); String[] replacedValues = new String[values.length]; // the list of the column names may be set in the query String[] columnNames = columnListNode == null ? table.getAllCols().stream().map(f -> f.getName()).toArray(size -> new String[size]) : columnListNode.getChildren().stream().map(n -> ((ASTNode)n).toString()).toArray(size -> new String[size]); for (int i = 0; i < values.length; i++) { if (values[i].trim().toLowerCase().equals("`default`")) { replacedValues[i] = MapUtils.getString(colNameToDefaultConstraint, columnNames[i], "null"); } else { replacedValues[i] = values[i]; } } return StringUtils.join(replacedValues, ','); }
private void addSetRCols(ASTNode node, Set<String> setRCols) { // See if this node is a TOK_TABLE_OR_COL. If so, find the value and put it in the list. If // not, recurse on any children if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode colName = (ASTNode)node.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; setRCols.add(normalizeColName(colName.getText())); } else if (node.getChildren() != null) { for (Node n : node.getChildren()) { addSetRCols((ASTNode)n, setRCols); } } }
private void addSetRCols(ASTNode node, Set<String> setRCols) { // See if this node is a TOK_TABLE_OR_COL. If so, find the value and put it in the list. If // not, recurse on any children if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode colName = (ASTNode)node.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; setRCols.add(normalizeColName(colName.getText())); } else if (node.getChildren() != null) { for (Node n : node.getChildren()) { addSetRCols((ASTNode)n, setRCols); } } }
protected Map<String, ASTNode> collectSetColumnsAndExpressions(ASTNode setClause, Set<String> setRCols, Table targetTable) throws SemanticException { // An update needs to select all of the columns, as we rewrite the entire row. Also, // we need to figure out which columns we are going to replace. assert setClause.getToken().getType() == HiveParser.TOK_SET_COLUMNS_CLAUSE : "Expected second child of update token to be set token"; // Get the children of the set clause, each of which should be a column assignment List<? extends Node> assignments = setClause.getChildren(); // Must be deterministic order map for consistent q-test output across Java versions Map<String, ASTNode> setCols = new LinkedHashMap<String, ASTNode>(assignments.size()); for (Node a : assignments) { ASTNode assignment = (ASTNode)a; ASTNode colName = findLHSofAssignment(assignment); if (setRCols != null) { addSetRCols((ASTNode) assignment.getChildren().get(1), setRCols); } checkValidSetClauseTarget(colName, targetTable); String columnName = normalizeColName(colName.getText()); // This means that in UPDATE T SET x = _something_ // _something_ can be whatever is supported in SELECT _something_ setCols.put(columnName, (ASTNode)assignment.getChildren().get(1)); } return setCols; }
private Map<ASTNode, String> translateFieldDesc(ASTNode node) { Map<ASTNode, String> map = new HashMap<>(); if (node.getType() == HiveParser.DOT) { for (Node child : node.getChildren()) { map.putAll(translateFieldDesc((ASTNode) child)); } } else if (node.getType() == HiveParser.Identifier) { map.put(node, HiveUtils.unparseIdentifier(node.getText(), conf)); } return map; }
protected ASTNode findLHSofAssignment(ASTNode assignment) { assert assignment.getToken().getType() == HiveParser.EQUAL : "Expected set assignments to use equals operator but found " + assignment.getName(); ASTNode tableOrColTok = (ASTNode)assignment.getChildren().get(0); assert tableOrColTok.getToken().getType() == HiveParser.TOK_TABLE_OR_COL : "Expected left side of assignment to be table or column"; ASTNode colName = (ASTNode)tableOrColTok.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; return colName; }
private Map<ASTNode, String> translateFieldDesc(ASTNode node) { Map<ASTNode, String> map = new HashMap<>(); if (node.getType() == HiveParser.DOT) { for (Node child : node.getChildren()) { map.putAll(translateFieldDesc((ASTNode) child)); } } else if (node.getType() == HiveParser.Identifier) { map.put(node, HiveUtils.unparseIdentifier(node.getText(), conf)); } return map; }
private ASTNode findLHSofAssignment(ASTNode assignment) { assert assignment.getToken().getType() == HiveParser.EQUAL : "Expected set assignments to use equals operator but found " + assignment.getName(); ASTNode tableOrColTok = (ASTNode)assignment.getChildren().get(0); assert tableOrColTok.getToken().getType() == HiveParser.TOK_TABLE_OR_COL : "Expected left side of assignment to be table or column"; ASTNode colName = (ASTNode)tableOrColTok.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; return colName; } private Map<String, ASTNode> collectSetColumnsAndExpressions(ASTNode setClause,
private static void walkASTAndQualifyNames(ASTNode ast, Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, UnparseTranslator unparseTranslator) throws SemanticException { Queue<Node> queue = new LinkedList<>(); queue.add(ast); while (!queue.isEmpty()) { ASTNode astNode = (ASTNode) queue.poll(); if (astNode.getToken().getType() == HiveParser.TOK_TABNAME) { // Check if this is table name is qualified or not String tabIdName = getUnescapedName(astNode).toLowerCase(); // if alias to CTE contains the table name, we do not do the translation because // cte is actually a subquery. if (!cteAlias.contains(tabIdName)) { unparseTranslator.addTableNameTranslation(astNode, SessionState.get().getCurrentDatabase()); } } if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) { for (Node child : astNode.getChildren()) { queue.offer(child); } } } }
private void visit(ASTNode n) { if (n.getType() == HiveParser.Identifier) { if (visitedNodes.containsKey(n)) { /** * Since we are modifying the stream, it's not idempotent. Ideally, the caller would take * care to only quote Identifiers in each subtree once, but this makes it safe */ return; } visitedNodes.put(n, n); trs.insertBefore(n.getToken(), "`"); trs.insertAfter(n.getToken(), "`"); } if (n.getChildCount() <= 0) { return; } for (Node c : n.getChildren()) { visit((ASTNode)c); } } }
private void visit(ASTNode n) { if(n.getType() == HiveParser.Identifier) { if(visitedNodes.containsKey(n)) { /** * Since we are modifying the stream, it's not idempotent. Ideally, the caller would take * care to only quote Identifiers in each subtree once, but this makes it safe */ return; } visitedNodes.put(n, n); trs.insertBefore(n.getToken(), "`"); trs.insertAfter(n.getToken(), "`"); } if(n.getChildCount() <= 0) {return;} for(Node c : n.getChildren()) { visit((ASTNode)c); } } }
private void parseStreamTables(QBJoinTree joinTree, QB qb) { List<String> streamAliases = joinTree.getStreamAliases(); for (Node hintNode : qb.getParseInfo().getHints().getChildren()) { ASTNode hint = (ASTNode) hintNode; if (hint.getChild(0).getType() == HintParser.TOK_STREAMTABLE) { for (int i = 0; i < hint.getChild(1).getChildCount(); i++) { if (streamAliases == null) { streamAliases = new ArrayList<String>(); } streamAliases.add(hint.getChild(1).getChild(i).getText()); } } } joinTree.setStreamAliases(streamAliases); }
private void parseStreamTables(QBJoinTree joinTree, QB qb) { List<String> streamAliases = joinTree.getStreamAliases(); for (Node hintNode : qb.getParseInfo().getHints().getChildren()) { ASTNode hint = (ASTNode) hintNode; if (hint.getChild(0).getType() == HintParser.TOK_STREAMTABLE) { for (int i = 0; i < hint.getChild(1).getChildCount(); i++) { if (streamAliases == null) { streamAliases = new ArrayList<String>(); } streamAliases.add(hint.getChild(1).getChild(i).getText()); } } } joinTree.setStreamAliases(streamAliases); }
private String unparseExprForValuesClause(ASTNode expr) throws SemanticException { switch (expr.getToken().getType()) { case HiveParser.Number: return expr.getText(); case HiveParser.StringLiteral: return BaseSemanticAnalyzer.unescapeSQLString(expr.getText()); case HiveParser.KW_FALSE: // UDFToBoolean casts any non-empty string to true, so set this to false return ""; case HiveParser.KW_TRUE: return "TRUE"; case HiveParser.MINUS: return "-" + unparseExprForValuesClause((ASTNode)expr.getChildren().get(0)); case HiveParser.TOK_NULL: // Hive's text input will translate this as a null return "\\N"; default: throw new SemanticException("Expression of type " + expr.getText() + " not supported in insert/values"); } }
private ASTNode findFunctionNode(ASTNode n) { if (n.getType() == HiveParser.TOK_FUNCTION) { if ("internal_interval".equals(n.getChild(0).getText())) { return n; } } ArrayList<Node> children = n.getChildren(); if (children != null) { for (Node c : children) { ASTNode r = findFunctionNode((ASTNode) c); if (r != null) { return r; } } } return null; } }
private void fixUpASTNoAggregateIncrementalRebuild(ASTNode newAST) throws SemanticException { // Replace INSERT OVERWRITE by INSERT INTO // AST tree will have this shape: // TOK_QUERY // TOK_FROM // ... // TOK_INSERT // TOK_DESTINATION <- THIS TOKEN IS REPLACED BY 'TOK_INSERT_INTO' // TOK_TAB // TOK_TABNAME // default.cmv_mat_view // TOK_SELECT // ... ASTNode dest = new ASTSearcher().simpleBreadthFirstSearch(newAST, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_DESTINATION); ASTNode newChild = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_INSERT_INTO, "TOK_INSERT_INTO"); newChild.addChildren(dest.getChildren()); ASTNode destParent = (ASTNode) dest.getParent(); int childIndex = dest.childIndex; destParent.deleteChild(childIndex); destParent.insertChild(childIndex, newChild); }
/** * Finds all columns and groups by table ref (if there is one). */ private void visit(ASTNode n) { if (n.getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode parent = (ASTNode) n.getParent(); if (parent != null && parent.getType() == HiveParser.DOT) { //the ref must be a table, so look for column name as right child of DOT if (parent.getParent() != null && parent.getParent().getType() == HiveParser.DOT) { //I don't think this can happen... but just in case throw new IllegalArgumentException("Found unexpected db.table.col reference in " + onClauseAsString); } addColumn2Table(n.getChild(0).getText(), parent.getChild(1).getText()); } else { //must be just a column name unresolvedColumns.add(n.getChild(0).getText()); } } if (n.getChildCount() == 0) { return; } for (Node child : n.getChildren()) { visit((ASTNode)child); } }