private static void replaceASTChild(ASTNode child, ASTNode newChild) { ASTNode parent = (ASTNode) child.parent; int childIndex = child.childIndex; parent.deleteChild(childIndex); parent.insertChild(childIndex, newChild); }
private static void replaceASTChild(ASTNode child, ASTNode newChild) { ASTNode parent = (ASTNode) child.parent; int childIndex = child.childIndex; parent.deleteChild(childIndex); parent.insertChild(childIndex, newChild); }
static void addGroupExpressionToFront(ASTNode gBy, ASTNode expr) { ASTNode grpExpr = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_GROUPING_SETS_EXPRESSION, "TOK_GROUPING_SETS_EXPRESSION"); grpExpr.addChild(expr); List<ASTNode> newChildren = new ArrayList<ASTNode>(); newChildren.add(expr); int i = gBy.getChildCount() - 1; while ( i >= 0 ) { newChildren.add((ASTNode) gBy.deleteChild(i)); i--; } for(ASTNode child : newChildren ) { gBy.addChild(child); } }
static void addGroupExpressionToFront(ASTNode gBy, ASTNode expr) { ASTNode grpExpr = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_GROUPING_SETS_EXPRESSION, "TOK_GROUPING_SETS_EXPRESSION"); grpExpr.addChild(expr); List<ASTNode> newChildren = new ArrayList<ASTNode>(); newChildren.add(expr); int i = gBy.getChildCount() - 1; while ( i >= 0 ) { newChildren.add((ASTNode) gBy.deleteChild(i)); i--; } for(ASTNode child : newChildren ) { gBy.addChild(child); } }
private void replaceColumnReference(ASTNode checkExpr, Map<String, String> col2Col, RowResolver inputRR){ if(checkExpr.getType() == HiveParser.TOK_TABLE_OR_COL) { ASTNode oldColChild = (ASTNode)(checkExpr.getChild(0)); String oldColRef = oldColChild.getText().toLowerCase(); assert(col2Col.containsKey(oldColRef)); String internalColRef = col2Col.get(oldColRef); String fullQualColRef[] = inputRR.reverseLookup(internalColRef); String newColRef = fullQualColRef[1]; checkExpr.deleteChild(0); checkExpr.addChild(ASTBuilder.createAST(oldColChild.getType(), newColRef)); } else { for(int i=0; i< checkExpr.getChildCount(); i++) { replaceColumnReference((ASTNode)(checkExpr.getChild(i)), col2Col, inputRR); } } }
private void fixUpASTNoAggregateIncrementalRebuild(ASTNode newAST) throws SemanticException { // Replace INSERT OVERWRITE by INSERT INTO // AST tree will have this shape: // TOK_QUERY // TOK_FROM // ... // TOK_INSERT // TOK_DESTINATION <- THIS TOKEN IS REPLACED BY 'TOK_INSERT_INTO' // TOK_TAB // TOK_TABNAME // default.cmv_mat_view // TOK_SELECT // ... ASTNode dest = new ASTSearcher().simpleBreadthFirstSearch(newAST, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_DESTINATION); ASTNode newChild = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_INSERT_INTO, "TOK_INSERT_INTO"); newChild.addChildren(dest.getChildren()); ASTNode destParent = (ASTNode) dest.getParent(); int childIndex = dest.childIndex; destParent.deleteChild(childIndex); destParent.insertChild(childIndex, newChild); }
@Override public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { if (ast.getType() == HiveParser.TOK_EXPLAIN) { int childCount = ast.getChildCount(); for (int i = 1; i < childCount; i++) { if (ast.getChild(i).getType() == HiveParser.KW_REOPTIMIZATION) { explainReOptimization = true; ast.deleteChild(i); break; } } if (explainReOptimization && firstExecution()) { Tree execTree = ast.getChild(0); execTree.setParent(ast.getParent()); ast.getParent().setChild(0, execTree); return (ASTNode) execTree; } } return ast; }
assert t == HiveParser.TOK_SELECT || t == HiveParser.TOK_SELECTDI : t; int ix = setCols.parent.childIndex; parent.deleteChild(ix); for (ASTNode node : newChildren) { parent.insertChild(ix++, node);
assert t == HiveParser.TOK_SELECT || t == HiveParser.TOK_SELECTDI : t; int ix = setCols.parent.childIndex; parent.deleteChild(ix); for (ASTNode node : newChildren) { parent.insertChild(ix++, node);
ASTNode destinationParentNode = (ASTNode) destinationNode.getParent(); int childIndex = destinationNode.childIndex; destinationParentNode.deleteChild(childIndex); destinationParentNode.insertChild(childIndex, newInsertInto);
private static void replaceASTChild(ASTNode child, ASTNode newChild) { ASTNode parent = (ASTNode) child.parent; int childIndex = child.childIndex; parent.deleteChild(childIndex); parent.insertChild(childIndex, newChild); }
static void addGroupExpressionToFront(ASTNode gBy, ASTNode expr) { ASTNode grpExpr = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_GROUPING_SETS_EXPRESSION, "TOK_GROUPING_SETS_EXPRESSION"); grpExpr.addChild(expr); List<ASTNode> newChildren = new ArrayList<ASTNode>(); newChildren.add(expr); int i = gBy.getChildCount() - 1; while ( i >= 0 ) { newChildren.add((ASTNode) gBy.deleteChild(i)); i--; } for(ASTNode child : newChildren ) { gBy.addChild(child); } }
static ASTNode trimHavingAst(ASTNode astNode, Collection<String> columns) { if (astNode != null) { if (astNode.getParent() != null && astNode.getParent().getType() == DOT && astNode.getChildIndex() == 1) { return columns.contains(astNode.getText()) ? astNode : null; } for (int i = astNode.getChildCount() - 1; i >= 0; i--) { ASTNode replacement = trimHavingAst((ASTNode) astNode.getChild(i), columns); if (replacement == null) { astNode.deleteChild(i); } else { astNode.setChild(i, replacement); } } if (isAggregateAST(astNode) || BINARY_OPERATORS.contains(astNode.getType())) { if (astNode.getChildCount() == 1) { ASTNode child = (ASTNode) astNode.getChild(0); if (!BINARY_OPERATORS.contains(child.getType())) { return null; } else { return child; } } } } return astNode; } static ASTNode trimOrderByAst(ASTNode astNode, Collection<String> columns) {
static ASTNode trimHavingAst(ASTNode astNode, Collection<String> columns) { if (astNode != null) { if (astNode.getParent() != null && astNode.getParent().getType() == DOT && astNode.getChildIndex() == 1) { return columns.contains(astNode.getText()) ? astNode : null; } for (int i = astNode.getChildCount() - 1; i >= 0; i--) { ASTNode replacement = trimHavingAst((ASTNode) astNode.getChild(i), columns); if (replacement == null) { astNode.deleteChild(i); } else { astNode.setChild(i, replacement); } } if (isAggregateAST(astNode) || BINARY_OPERATORS.contains(astNode.getType())) { if (astNode.getChildCount() == 1) { ASTNode child = (ASTNode) astNode.getChild(0); if (!BINARY_OPERATORS.contains(child.getType())) { return null; } else { return child; } } } } return astNode; } static ASTNode trimOrderByAst(ASTNode astNode, Collection<String> columns) {
/** * Gets the mocked cube context. * * @param ast the ast * @return the mocked cube context * @throws ParseException the parse exception * @throws LensException the lens exception */ private CubeQueryContext getMockedCubeContext(ASTNode ast) throws ParseException, LensException { CubeQueryContext context = Mockito.mock(CubeQueryContext.class); if (ast.getToken().getType() == HiveParser.TOK_QUERY) { if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) { // remove cube child from AST for (int i = 0; i < ast.getChildCount() - 1; i++) { ast.setChild(i, ast.getChild(i + 1)); } ast.deleteChild(ast.getChildCount() - 1); } } StringBuilder builder = new StringBuilder(); HQLParser.toInfixString(ast, builder); Mockito.when(context.toHQL()).thenReturn(builder.toString()); Mockito.when(context.toAST(Matchers.any(Context.class))).thenReturn(ast); return context; }
ASTNode replacement = trimOrderByAst((ASTNode) astNode.getChild(i), columns); if (replacement == null) { astNode.deleteChild(i); } else { astNode.setChild(i, replacement);
ASTNode replacement = trimOrderByAst((ASTNode) astNode.getChild(i), columns); if (replacement == null) { astNode.deleteChild(i); } else { astNode.setChild(i, replacement);
@Override public void analyzeInternal(ASTNode ast) throws SemanticException { reset(true); cubeQB = new QB(null, null, false); if (ast.getToken().getType() == HiveParser.TOK_QUERY) { if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) { // remove cube child from AST for (int i = 0; i < ast.getChildCount() - 1; i++) { ast.setChild(i, ast.getChild(i + 1)); } ast.deleteChild(ast.getChildCount() - 1); } } doPhase1(ast, cubeQB, initPhase1Ctx(), null); } }
@Override public void analyzeInternal(ASTNode ast) throws SemanticException { reset(true); cubeQB = new QB(null, null, false); if (ast.getToken().getType() == HiveParser.TOK_QUERY) { if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) { // remove cube child from AST for (int i = 0; i < ast.getChildCount() - 1; i++) { ast.setChild(i, ast.getChild(i + 1)); } ast.deleteChild(ast.getChildCount() - 1); } } doPhase1(ast, cubeQB, initPhase1Ctx(), null); } }
private void updateAnswerableSelectColumns() throws LensException { // update select AST with selected fields int currentChild = 0; for (int i = 0; i < getCubeQueryContext().getSelectAST().getChildCount(); i++) { ASTNode selectExpr = (ASTNode) queryAst.getSelectAST().getChild(currentChild); Set<String> exprCols = HQLParser.getColsInExpr(getCubeQueryContext().getAliasForTableName(getCube()), selectExpr); if (getStorageCandidate().getColumns().containsAll(exprCols)) { ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, HiveParser.Identifier); String alias = getCubeQueryContext().getSelectPhrases().get(i).getSelectAlias(); if (aliasNode != null) { String queryAlias = aliasNode.getText(); if (!queryAlias.equals(alias)) { // replace the alias node ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias)); queryAst.getSelectAST().getChild(currentChild) .replaceChildren(selectExpr.getChildCount() - 1, selectExpr.getChildCount() - 1, newAliasNode); } } else { // add column alias ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias)); queryAst.getSelectAST().getChild(currentChild).addChild(newAliasNode); } } else { queryAst.getSelectAST().deleteChild(currentChild); currentChild--; } currentChild++; } }