public static boolean isPartitionLevelStats(ASTNode tree) { boolean isPartitioned = false; ASTNode child = (ASTNode) tree.getChild(0); if (child.getChildCount() > 1) { child = (ASTNode) child.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { isPartitioned = true; } } return isPartitioned; }
/** * Performs a descent of the leftmost branch of a tree, stopping when either a * node with a non-null token is found or the leaf level is encountered. * * @param tree * candidate node from which to start searching * * @return node at which descent stopped */ private static ASTNode findRootNonNullToken(ASTNode tree) { while ((tree.getToken() == null) && (tree.getChildCount() > 0)) { tree = (ASTNode) tree.getChild(0); } return tree; }
if (ast.getChildCount() > 0 && HiveParser.TOK_CTE == ((ASTNode) ast.getChild(0)).getToken().getType()) { ASTNode cte = (ASTNode) ast.getChild(0); for (int index = cte.getChildCount() - 1; index >= 0; index--) { ASTNode subq = (ASTNode) cte.getChild(index); String alias = unescapeIdentifier(subq.getChild(1).getText()); if (cteAlias.contains(alias)) { throw new SemanticException("Duplicate definition of " + alias); } else { cteAlias.add(alias); for (int index = 1; index < ast.getChildCount(); index++) { walkASTAndQualifyNames(ast, cteAlias, ctx, db, ignoredTokens, unparseTranslator);
protected ASTNode findLHSofAssignment(ASTNode assignment) { assert assignment.getToken().getType() == HiveParser.EQUAL : "Expected set assignments to use equals operator but found " + assignment.getName(); ASTNode tableOrColTok = (ASTNode)assignment.getChildren().get(0); assert tableOrColTok.getToken().getType() == HiveParser.TOK_TABLE_OR_COL : "Expected left side of assignment to be table or column"; ASTNode colName = (ASTNode)tableOrColTok.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; return colName; }
private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException { ASTNode parent = (ASTNode) ast.getParent(); ASTNode child = (ASTNode) ast.getChild(0); List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>(); List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>(); if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) { BaseSemanticAnalyzer.processPrimaryKeys(parent, child, primaryKeys); } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) { BaseSemanticAnalyzer.processForeignKeys(parent, child, foreignKeys); } AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); }
@Override public void analyzeInternal(ASTNode ast) throws SemanticException { LOG.debug("ReplicationSemanticAanalyzer: analyzeInternal"); LOG.debug(ast.getName() + ":" + ast.getToken().getText() + "=" + ast.getText()); switch (ast.getToken().getType()) { case TOK_REPL_DUMP: { LOG.debug("ReplicationSemanticAnalyzer: analyzeInternal: dump"); initReplDump(ast); } catch (HiveException e) { throw new SemanticException(e.getMessage(), e); throw new SemanticException("Unexpected root token");
/** * process stored as directories * * @param child * @return */ protected boolean analyzeStoredAdDirs(ASTNode child) { boolean storedAsDirs = false; if ((child.getChildCount() == 3) && (((ASTNode) child.getChild(2)).getToken().getType() == HiveParser.TOK_STOREDASDIRS)) { storedAsDirs = true; } return storedAsDirs; }
if (ast.getChildCount() > 0 && HiveParser.TOK_CTE == ((ASTNode) ast.getChild(0)).getToken().getType()) { ASTNode cte = (ASTNode) ast.getChild(0); for (int index = cte.getChildCount() - 1; index >= 0; index--) { ASTNode subq = (ASTNode) cte.getChild(index); String alias = unescapeIdentifier(subq.getChild(1).getText()); if (cteAlias.contains(alias)) { throw new SemanticException("Duplicate definition of " + alias); } else { cteAlias.add(alias); for (int index = 1; index < ast.getChildCount(); index++) { walkASTMarkTABREF(tableMask, (ASTNode) ast.getChild(index), cteAlias, ctx, db, tabNameToTabObject, ignoredTokens); rewrittenTree = ParseUtils.parse(rewrittenQuery); } catch (ParseException e) { throw new SemanticException(e);
private ASTNode findLHSofAssignment(ASTNode assignment) { assert assignment.getToken().getType() == HiveParser.EQUAL : "Expected set assignments to use equals operator but found " + assignment.getName(); ASTNode tableOrColTok = (ASTNode)assignment.getChildren().get(0); assert tableOrColTok.getToken().getType() == HiveParser.TOK_TABLE_OR_COL : "Expected left side of assignment to be table or column"; ASTNode colName = (ASTNode)tableOrColTok.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; return colName; } private Map<String, ASTNode> collectSetColumnsAndExpressions(ASTNode setClause,
@Override public void analyzeInternal(ASTNode tree) throws SemanticException { if (useSuper) { super.analyzeInternal(tree); } else { if (!SessionState.get().getTxnMgr().supportsAcid()) { throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getMsg()); } switch (tree.getToken().getType()) { case HiveParser.TOK_DELETE_FROM: analyzeDelete(tree); break; case HiveParser.TOK_UPDATE_TABLE: analyzeUpdate(tree); break; case HiveParser.TOK_MERGE: analyzeMerge(tree); break; default: throw new RuntimeException("Asked to parse token " + tree.getName() + " in " + "UpdateDeleteSemanticAnalyzer"); } cleanUpMetaColumnAccessControl(); } } private boolean updating() {
/** * Performs a descent of the leftmost branch of a tree, stopping when either a * node with a non-null token is found or the leaf level is encountered. * * @param tree * candidate node from which to start searching * * @return node at which descent stopped */ private static ASTNode findRootNonNullToken(ASTNode tree) { while ((tree.getToken() == null) && (tree.getChildCount() > 0)) { tree = (ASTNode) tree.getChild(0); } return tree; }
private boolean shouldRewrite(ASTNode tree) { boolean rwt = false; if (tree.getChildCount() > 1) { ASTNode child0 = (ASTNode) tree.getChild(0); ASTNode child1; if (child0.getToken().getType() == HiveParser.TOK_TAB) { child0 = (ASTNode) child0.getChild(0); if (child0.getToken().getType() == HiveParser.TOK_TABNAME) { child1 = (ASTNode) tree.getChild(1); if (child1.getToken().getType() == HiveParser.KW_COLUMNS) { rwt = true; } } } } return rwt; }
private void addSetRCols(ASTNode node, Set<String> setRCols) { // See if this node is a TOK_TABLE_OR_COL. If so, find the value and put it in the list. If // not, recurse on any children if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode colName = (ASTNode)node.getChildren().get(0); assert colName.getToken().getType() == HiveParser.Identifier : "Expected column name"; setRCols.add(normalizeColName(colName.getText())); } else if (node.getChildren() != null) { for (Node n : node.getChildren()) { addSetRCols((ASTNode)n, setRCols); } } }
private static int getLine(ASTNode tree) { if (tree.getChildCount() == 0) { return tree.getToken().getLine(); } return getLine((ASTNode) tree.getChild(0)); }
public static boolean isPartitionLevelStats(ASTNode tree) { boolean isPartitioned = false; ASTNode child = (ASTNode) tree.getChild(0); if (child.getChildCount() > 1) { child = (ASTNode) child.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { isPartitioned = true; } } return isPartitioned; }
private static int getCharPositionInLine(ASTNode tree) { if (tree.getChildCount() == 0) { return tree.getToken().getCharPositionInLine(); } return getCharPositionInLine((ASTNode) tree.getChild(0)); }