Refine search
/** Test basic || expression */ @Test public void selectAll() throws RecognitionException { String queryString = "select * where a = 1 or b > 2"; ANTLRStringStream in = new ANTLRStringStream( queryString ); CpQueryFilterLexer lexer = new CpQueryFilterLexer( in ); TokenRewriteStream tokens = new TokenRewriteStream( lexer ); CpQueryFilterParser parser = new CpQueryFilterParser( tokens ); ParsedQuery query = parser.ql().parsedQuery; Collection<SelectFieldMapping> identifiers = query.getSelectFieldMappings(); assertEquals( 0, identifiers.size() ); }
stream.replace(subQuery.getAlias(), whereAST.getTokenStartIndex(), whereAST.getTokenStopIndex(), stream.insertAfter(subQuery.getAlias(), selectClause.getTokenStopIndex(), selectClauseAdditions); if (!addGroupByClause) { ASTNode groupBy = (ASTNode) sqAST.getChild(1).getChild(3); stream.insertAfter(subQuery.getAlias(), groupBy.getTokenStopIndex(), gByClauseAdditions); stream.insertAfter(subQuery.getAlias(), sqAST.getTokenStopIndex() - 1, gByClauseAdditions); stream.toString(subQuery.getAlias(), sqAST.getTokenStartIndex(), sqAST.getTokenStopIndex()) + " " + subQuery.getAlias(); } finally { stream.deleteProgram(subQuery.getAlias());
private void visit(ASTNode n) { if (n.getType() == HiveParser.Identifier) { if (visitedNodes.containsKey(n)) { /** * Since we are modifying the stream, it's not idempotent. Ideally, the caller would take * care to only quote Identifiers in each subtree once, but this makes it safe */ return; } visitedNodes.put(n, n); trs.insertBefore(n.getToken(), "`"); trs.insertAfter(n.getToken(), "`"); } if (n.getChildCount() <= 0) { return; } for (Node c : n.getChildren()) { visit((ASTNode)c); } } }
public static String enhanceXPath(final String prefix, final int index, final String xpath) { if (log.isDebugEnabled()) { log.debug("prefix: " + prefix); log.debug("index: " + index); log.debug("xpath: " + xpath); } final ANTLRStringStream stringStream = new ANTLRStringStream(xpath); final XPathEnhancerLexer xl = new XPathEnhancerLexer(stringStream); final TokenRewriteStream trs = new TokenRewriteStream(xl); final XPathEnhancerParser xp = new XPathEnhancerParser(trs, index, prefix); try { xp.main(); } catch (RecognitionException e) { throw new IllegalArgumentException(xpath + " is no valid XPath expression", e); } final String enhanced = trs.toString(); return enhanced; }
if (entry.getKey() > 0) { // negative means the key didn't exist in the original tokenRewriteStream.replace( entry.getKey(), entry.getValue().tokenStopIndex, String replacementText = tokenRewriteStream.toString( copyTranslation.sourceNode.getTokenStartIndex(), copyTranslation.sourceNode.getTokenStopIndex()); String currentText = tokenRewriteStream.toString( copyTranslation.targetNode.getTokenStartIndex(), copyTranslation.targetNode.getTokenStopIndex()); tokenRewriteStream.replace( copyTranslation.targetNode.getTokenStartIndex(), copyTranslation.targetNode.getTokenStopIndex(),
private String getQueryStringFromAst(ASTNode ast) { StringBuilder sb = new StringBuilder(); int startIdx = ast.getTokenStartIndex(); int endIdx = ast.getTokenStopIndex(); boolean queryNeedsQuotes = true; if (conf.getVar(ConfVars.HIVE_QUOTEDID_SUPPORT).equals("none")) { queryNeedsQuotes = false; } for (int idx = startIdx; idx <= endIdx; idx++) { Token curTok = ctx.getTokenRewriteStream().get(idx); if (curTok.getType() == Token.EOF) { continue; } else if (queryNeedsQuotes && curTok.getType() == HiveLexer.Identifier) { // The Tokens have no distinction between Identifiers and QuotedIdentifiers. // Ugly solution is just to surround all identifiers with quotes. sb.append('`'); // Re-escape any backtick (`) characters in the identifier. sb.append(curTok.getText().replaceAll("`", "``")); sb.append('`'); } else { sb.append(curTok.getText()); } } return sb.toString(); }
@Override public void visit(Object t) { CommonTree a = (CommonTree)t; CommonTree child = (CommonTree)a.getChild(0); tokens.delete(a.token.getTokenIndex()); // kill "id+=" tokens.delete(child.token.getTokenIndex()); } });
while (!queue.isEmpty()) { ASTNode astNode = (ASTNode) queue.poll(); if (astNode.getToken().getType() == HiveParser.TOK_TABREF) { int aliasIndex = 0; StringBuffer additionalTabInfo = new StringBuffer(); for (int index = 1; index < astNode.getChildCount(); index++) { ASTNode ct = (ASTNode) astNode.getChild(index); if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) { additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex())); } else {
public void visit(Object t) { CommonTree a = (CommonTree)t; tokens.replace(a.token.getTokenIndex(), "/*"+a.getText()+"*/"); } });
public void visit(Object t) { CommonTree a = (CommonTree)t; CommonTree child = (CommonTree)a.getChild(0); int stop = child.getTokenStopIndex(); if ( child.getType()==ANTLRv3Parser.SEMPRED ) { CommonTree rew = (CommonTree)a.getChild(1); stop = rew.getTokenStopIndex(); } tokens.delete(a.token.getTokenIndex(), stop); killTrailingNewline(tokens, stop); } });
@Override public void visit(Object t) { CommonTree a = (CommonTree)t; if ( !a.hasAncestor(ANTLRv3Parser.OPTIONS) ) { // avoid options CommonTree child = (CommonTree)a.getChild(0); tokens.delete(a.token.getTokenIndex()); // kill "id=" tokens.delete(child.token.getTokenIndex()); } } });
@Override public void visit(Object t) { CommonTree a = (CommonTree)t; String text = tokens.toString(a.getTokenStartIndex(), a.getTokenStopIndex()); tokens.replace(a.getTokenStartIndex(), a.getTokenStopIndex(), "/*"+text+"*/"); } });
public void insertAfter(String programName, Token t, Object text) { insertAfter(programName,t.getTokenIndex(), text); }
public void insertBefore(String programName, Token t, Object text) { insertBefore(programName, t.getTokenIndex(), text); }
void addGByClauseRewrite(ASTNode selectExpr) { if ( gByClauseAdditions == null ) { gByClauseAdditions = ""; } if ( !addGroupByClause || !gByClauseAdditions.equals("") ) { gByClauseAdditions += ", "; } gByClauseAdditions += stream.toString( selectExpr.getTokenStartIndex(), selectExpr.getTokenStopIndex()); }
((TokenRewriteStream)input).replace( ((Token)retval.start).getTokenIndex(), input.LT(-1).getTokenIndex(), retval.st);
out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex())); } finally { stream.deleteProgram(program);
public ASTNode parseTriggerExpression(String command) throws ParseException { HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command)); TokenRewriteStream tokens = new TokenRewriteStream(lexer); HiveParser parser = new HiveParser(tokens); parser.setTreeAdaptor(adaptor); HiveParser_ResourcePlanParser.triggerExpressionStandalone_return r = null; try { r = parser.gResourcePlanParser.triggerExpressionStandalone(); } catch (RecognitionException e) { throw new ParseException(parser.errors); } if (lexer.getErrors().size() != 0) { throw new ParseException(lexer.getErrors()); } else if (parser.errors.size() != 0) { throw new ParseException(parser.errors); } return r.getTree(); }
stream.replace(program, sqNode.getTokenStartIndex(), sqNode.getTokenStopIndex(), "1 = 1"); stream.insertAfter(program, tokWhere.getTokenStopIndex(), " and " + postJoinCond);
public void deleteProgram() { deleteProgram(DEFAULT_PROGRAM_NAME); }