/** * This allows us to take an arbitrary ASTNode and turn it back into SQL that produced it without * needing to understand what it is (except for QuotedIdentifiers). */ private String getMatchedText(ASTNode n) { quotedIdenfierHelper.visit(n); return ctx.getTokenRewriteStream().toString(n.getTokenStartIndex(), n.getTokenStopIndex() + 1).trim(); }
private String getMatchedText(ASTNode n) { return getTokenRewriteStream().toString(n.getTokenStartIndex(), n.getTokenStopIndex() + 1).trim(); } /**
/** * This allows us to take an arbitrary ASTNode and turn it back into SQL that produced it without * needing to understand what it is (except for QuotedIdentifiers) * */ private String getMatchedText(ASTNode n) { quotedIdenfierHelper.visit(n); return ctx.getTokenRewriteStream().toString(n.getTokenStartIndex(), n.getTokenStopIndex() + 1).trim(); } /**
private String getMatchedText(ASTNode n) { return getTokenRewriteStream().toString(n.getTokenStartIndex(), n.getTokenStopIndex() + 1).trim(); } /**
private String getQueryStringFromAst(ASTNode ast) { StringBuilder sb = new StringBuilder(); int startIdx = ast.getTokenStartIndex(); int endIdx = ast.getTokenStopIndex(); boolean queryNeedsQuotes = true; if (conf.getVar(ConfVars.HIVE_QUOTEDID_SUPPORT).equals("none")) { queryNeedsQuotes = false; } for (int idx = startIdx; idx <= endIdx; idx++) { Token curTok = ctx.getTokenRewriteStream().get(idx); if (curTok.getType() == Token.EOF) { continue; } else if (queryNeedsQuotes && curTok.getType() == HiveLexer.Identifier) { // The Tokens have no distinction between Identifiers and QuotedIdentifiers. // Ugly solution is just to surround all identifiers with quotes. sb.append('`'); // Re-escape any backtick (`) characters in the identifier. sb.append(curTok.getText().replaceAll("`", "``")); sb.append('`'); } else { sb.append(curTok.getText()); } } return sb.toString(); }
String havingClause = ctx.getTokenRewriteStream().toString( havingExpr.getTokenStartIndex(), havingExpr.getTokenStopIndex()); String msg = String.format("Encountered Select alias '%s' in having clause '%s'"
String havingClause = ctx.getTokenRewriteStream().toString( havingExpr.getTokenStartIndex(), havingExpr.getTokenStopIndex()); String msg = String.format("Encountered Select alias '%s' in having clause '%s'"
public QBSubQuery(String outerQueryId, int sqIdx, ASTNode subQueryAST, ASTNode parentQueryExpression, SubQueryTypeDef operator, ASTNode originalSQAST, Context ctx) { super(); this.subQueryAST = subQueryAST; this.parentQueryExpression = parentQueryExpression; this.operator = operator; this.outerQueryId = outerQueryId; this.sqIdx = sqIdx; this.alias = "sq_" + this.sqIdx; this.numCorrExprsinSQ = 0; this.numOuterCorrExprsForHaving = 0; String s = ctx.getTokenRewriteStream().toString( originalSQAST.getTokenStartIndex(), originalSQAST.getTokenStopIndex()); originalSQASTOrigin = new ASTNodeOrigin("SubQuery", alias, s, alias, originalSQAST); numOfCorrelationExprsAddedToSQSelect = 0; groupbyAddedToSQ = false; if ( operator.getType() == SubQueryType.NOT_IN ) { notInCheck = new NotInCheck(); } subQueryDiagnostic = SubQueryDiagnostic.getRewrite(this, ctx.getTokenRewriteStream(), ctx); }
TokenRewriteStream stream = work.getCtx().getTokenRewriteStream(); String program = "sq rewrite"; ASTNode ast = work.getAst();
public QBSubQuery(String outerQueryId, int sqIdx, ASTNode subQueryAST, ASTNode parentQueryExpression, SubQueryTypeDef operator, ASTNode originalSQAST, Context ctx) { super(); this.subQueryAST = subQueryAST; this.parentQueryExpression = parentQueryExpression; this.operator = operator; this.outerQueryId = outerQueryId; this.sqIdx = sqIdx; this.alias = "sq_" + this.sqIdx; this.numCorrExprsinSQ = 0; this.numOuterCorrExprsForHaving = 0; String s = ctx.getTokenRewriteStream().toString( originalSQAST.getTokenStartIndex(), originalSQAST.getTokenStopIndex()); originalSQASTOrigin = new ASTNodeOrigin("SubQuery", alias, s, alias, originalSQAST); numOfCorrelationExprsAddedToSQSelect = 0; groupbyAddedToSQ = false; if ( operator.getType() == SubQueryType.NOT_IN ) { notInCheck = new NotInCheck(); } subQueryDiagnostic = SubQueryDiagnostic.getRewrite(this, ctx.getTokenRewriteStream(), ctx); }
TokenRewriteStream stream = work.getCtx().getTokenRewriteStream(); String program = "sq rewrite"; ASTNode ast = work.getAst();
String originalText = ctx.getTokenRewriteStream().toString( viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex()); createVwDesc.setViewOriginalText(originalText); unparseTranslator.applyTranslations(ctx.getTokenRewriteStream()); String expandedText = ctx.getTokenRewriteStream().toString( viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
private void setQueryHints(QB qb) throws SemanticException { QBParseInfo qbp = getQBParseInfo(qb); String selClauseName = qbp.getClauseNames().iterator().next(); Tree selExpr0 = qbp.getSelForClause(selClauseName).getChild(0); if (selExpr0.getType() != HiveParser.QUERY_HINT) return; String hint = ctx.getTokenRewriteStream().toString( selExpr0.getTokenStartIndex(), selExpr0.getTokenStopIndex()); LOG.debug("Handling query hints: " + hint); ParseDriver pd = new ParseDriver(); try { ASTNode hintNode = pd.parseHint(hint); qbp.setHints(hintNode); } catch (ParseException e) { throw new SemanticException("failed to parse query hint: "+e.getMessage(), e); } }
String originalText = ctx.getTokenRewriteStream().toString( viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex()); createVwDesc.setViewOriginalText(originalText); unparseTranslator.applyTranslations(ctx.getTokenRewriteStream()); String expandedText = ctx.getTokenRewriteStream().toString( viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
|| ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) { additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex())); } else {
BaseSemanticAnalyzer.processCheckConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], child, null, checkConstraints, child, this.ctx.getTokenRewriteStream()); break; default:
|| ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) { additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex())); } else {
quotedIdenfierHelper = new IdentifierQuoter(ctx.getTokenRewriteStream());
processCheckConstraints(catName, qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode)ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_DEFAULT_VALUE: processDefaultConstraints(catName, qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode)ast.getChild(2), this.ctx.getTokenRewriteStream()); break; case HiveParser.TOK_NOT_NULL:
String query = ctx.getTokenRewriteStream().toString(input.getTokenStartIndex(), input.getTokenStopIndex()); LOG.info("Explain analyze (running phase) for query " + query);