Refine search
private static CommonTokenStream createTokenStream(final String expression) throws HL7QueryParsingException { final CharStream input = new ANTLRStringStream(expression); final HL7QueryLexer lexer = new HL7QueryLexer(input); return new CommonTokenStream(lexer); }
@Override public int LA(int i) { int returnChar = super.LA(i); if (returnChar == CharStream.EOF) { return returnChar; } else if (returnChar == 0) { return returnChar; } return Character.toUpperCase((char) returnChar); } }
public void examineAction() { //System.out.println("examine "+actionToken); ANTLRStringStream in = new ANTLRStringStream(actionToken.getText()); in.setLine(actionToken.getLine()); in.setCharPositionInLine(actionToken.getCharPositionInLine()); ActionSplitter splitter = new ActionSplitter(in, this); // forces eval, triggers listener methods node.chunks = splitter.getActionTokens(); }
Token templateToken) ANTLRStringStream is = new ANTLRStringStream(template); is.name = srcName!=null ? srcName : name; STLexer lexer; if ( templateToken!=null && templateToken.getType() == GroupParser.BIGSTRING_NO_NL ) group.delimiterStartChar, group.delimiterStopChar); CommonTokenStream tokens = new CommonTokenStream(lexer); STParser p = new STParser(tokens, group.errMgr, templateToken); STParser.templateAndEOF_return r; impl.template = template; impl.ast = r.getTree(); impl.ast.setUnknownTokenBoundaries(); impl.tokens = tokens;
input = new ANTLRStringStream(testInput); CommonTokenStream tokens = new CommonTokenStream(lexObj); parser = Class.forName(parserPath).asSubclass(Parser.class); Constructor<? extends Parser> parConstructor = parser.getConstructor(TokenStream.class); Method returnName = _return.getMethod("getTree"); CommonTree tree = (CommonTree) returnName.invoke(ruleReturn); astString = tree.toStringTree(); if ( tokens.index()!=tokens.size()-1 ) { this.stderr += "Stopped parsing at token index "+tokens.index()+": ";
private static List<TokenLocation> buildApexDocTokenLocations(String source) { ANTLRStringStream stream = new ANTLRStringStream(source); ApexLexer lexer = new ApexLexer(stream); ArrayList<TokenLocation> tokenLocations = new ArrayList<>(); int startIndex = 0; Token token = lexer.nextToken(); int endIndex = lexer.getCharIndex(); while (token.getType() != Token.EOF) { if (token.getType() == ApexLexer.BLOCK_COMMENT) { // Filter only block comments starting with "/**" if (token.getText().startsWith("/**")) { tokenLocations.add(new TokenLocation(startIndex, token.getText())); } } // TODO : Check other non-doc comments and tokens of type ApexLexer.EOL_COMMENT for "NOPMD" suppressions startIndex = endIndex; token = lexer.nextToken(); endIndex = lexer.getCharIndex(); } return tokenLocations; }
/** Test basic || expression */ @Test public void selectAll() throws RecognitionException { String queryString = "select * where a = 1 or b > 2"; ANTLRStringStream in = new ANTLRStringStream( queryString ); CpQueryFilterLexer lexer = new CpQueryFilterLexer( in ); TokenRewriteStream tokens = new TokenRewriteStream( lexer ); CpQueryFilterParser parser = new CpQueryFilterParser( tokens ); ParsedQuery query = parser.ql().parsedQuery; Collection<SelectFieldMapping> identifiers = query.getSelectFieldMappings(); assertEquals( 0, identifiers.size() ); }
ANTLRStringStream in = new ANTLRStringStream( outputQuery ); CpQueryFilterLexer lexer = new CpQueryFilterLexer( in ); CommonTokenStream tokens = new CommonTokenStream( lexer ); CpQueryFilterParser parser = new CpQueryFilterParser( tokens ); token.getText(), index, lineNumber );
StringBuilder code = sourceCode.getCodeBuffer(); ANTLRStringStream ass = new ANTLRStringStream(code.toString()); ApexLexer lexer = new ApexLexer(ass) { @Override Token token = lexer.nextToken(); while (token.getType() != Token.EOF) { if (token.getChannel() != Lexer.HIDDEN) { String tokenText = token.getText(); if (!caseSensitive) { tokenText = tokenText.toLowerCase(Locale.ROOT);
private CommonTree parse(String query) { try { final JpqlLexer lexer = new JpqlLexer(new ANTLRStringStream(query)); final CommonTokenStream tokenStream = new CommonTokenStream(lexer); final JpqlParser parser = new JpqlParser(tokenStream); final ql_statement_return ql_statement = parser.ql_statement(); final CommonTree tree = (CommonTree) ql_statement.getTree(); final List<String> errors = parser.getErrors(); if (errors.size() > 0) { final String errorMsg = Joiner.on("\n\t").join(errors); JpqlQuery.LOG.error("Cannot parse query: {0}", // JpqlQuery.LOG.boxed(query, // new Object[] { "\n\t" + errorMsg, "\n\n" + tree.toStringTree() + "\n" })); throw new PersistenceException("Cannot parse the query:\n " + errorMsg + ".\n" + query); } return tree; } catch (final PersistenceException e) { throw e; } catch (final Exception e) { throw new PersistenceException("Cannot parse the query:\n " + e.getMessage() + ".\n" + query, e); } }
lexer = new FastSimpleGenericEdifactDirectXMLLexer( new ANTLRStringStream( inputValue ) ); tokens = new CommonTokenStream( lexer ); parser = new FastSimpleGenericEdifactDirectXMLParser( tokens ); lexer.setCharStream( new ANTLRStringStream( inputValue ) ); tokens.setTokenSource( lexer ); parser.setTokenStream( tokens ); errorMessage.append( ": expecting " + ( ( e.expecting > -1 ) ? parser.getTokenNames()[e.expecting] : "<UNKNOWN>" ) + " but found " ); errorMessage.append( ( e.token.getType() >= 0 ) ? parser.getTokenNames()[e.token.getType()] : "<EOF>" );
public ActionTranslator(CodeGenerator generator, String ruleName, GrammarAST actionAST) { this(new ANTLRStringStream(actionAST.token.getText())); this.generator = generator; this.grammar = generator.grammar; this.enclosingRule = grammar.getLocallyDefinedRule(ruleName); this.actionToken = actionAST.token; this.outerAltNum = actionAST.outerAltNum; }
import org.antlr.runtime.*; import org.antlr.runtime.tree.*; public class Main { public static void main(String args[]) throws Exception { INTcLexer lex = new INTcLexer(new ANTLRStringStream("true and false\n")); CommonTokenStream tokens = new CommonTokenStream(lex); INTcParser parser = new INTcParser(tokens); CommonTree t = (CommonTree)parser.prog().getTree(); CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); INTcWalker evaluator = new INTcWalker(nodes); System.out.println(t.toStringTree()); CommonTree tr; while(true) { Token token = ((CommonTree)nodes.nextElement()).getToken(); if(token.getType() == INTcParser.EOF) break; System.out.printf("%-10s '%s'\n", INTcParser.tokenNames[token.getType()], token.getText()); } System.out.println("\nresult=" + evaluator.getResult()); } }
import org.antlr.runtime.*; public class ANTLRDemo { public static void main(String[] args) throws Exception { ANTLRStringStream in = new ANTLRStringStream("12*(5-6)"); ExpLexer lexer = new ExpLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); ExpParser parser = new ExpParser(tokens); parser.eval(); } }
public static String enhanceXPath(final String prefix, final int index, final String xpath) { if (log.isDebugEnabled()) { log.debug("prefix: " + prefix); log.debug("index: " + index); log.debug("xpath: " + xpath); } final ANTLRStringStream stringStream = new ANTLRStringStream(xpath); final XPathEnhancerLexer xl = new XPathEnhancerLexer(stringStream); final TokenRewriteStream trs = new TokenRewriteStream(xl); final XPathEnhancerParser xp = new XPathEnhancerParser(trs, index, prefix); try { xp.main(); } catch (RecognitionException e) { throw new IllegalArgumentException(xpath + " is no valid XPath expression", e); } final String enhanced = trs.toString(); return enhanced; }
import org.antlr.runtime.*; import org.antlr.runtime.tree.*; public class Main { public static void main(String[] args) throws Exception { String source = "var a = 1; function foo() { var b = 2; } var c = 3;"; ANTLRStringStream in = new ANTLRStringStream(source); JavaScriptLexer lexer = new JavaScriptLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaScriptParser parser = new JavaScriptParser(tokens); JavaScriptParser.program_return returnValue = parser.program(); CommonTree tree = (CommonTree)returnValue.getTree(); for(Object o : tree.getChildren()) { CommonTree child = (CommonTree)o; if(child.getType() == JavaScriptParser.VARIABLE) { System.out.println("Found a global var: "+child.getChild(0)); } } } }
import org.antlr.runtime.*; import org.antlr.runtime.tree.*; class FooTest { public static void main(String[] args) throws Exception { String text = "aaa | bbb | ccc"; System.out.println("parsing: "+text); ANTLRStringStream in = new ANTLRStringStream(text); test01Lexer lexer = new test01Lexer(in); CommonTokenStream tokens = new TokenRewriteStream(lexer); test01Parser parser = new test01Parser(tokens); CommonTree root = (CommonTree)parser.test().getTree(); for(int i = 0; i < root.getChildCount(); i++) { CommonTree child = (CommonTree)root.getChild(i); System.out.println("root.children[" + i + "] = " + child); } } }
/** consume() ahead until p==index; can't just set p=index as we must * update line and charPositionInLine. */ @Override public void seek(int index) { if ( index<=p ) { p = index; // just jump; don't update stream state (line, ...) return; } // seek forward, consume until p hits index while ( p<index ) { consume(); } }
public void processNested(Token actionToken) { ANTLRStringStream in = new ANTLRStringStream(actionToken.getText()); in.setLine(actionToken.getLine()); in.setCharPositionInLine(actionToken.getCharPositionInLine()); ActionSplitter splitter = new ActionSplitter(in, this); // forces eval, triggers listener methods splitter.getActionTokens(); }
Token templateToken) ANTLRStringStream is = new ANTLRStringStream(template); is.name = srcName!=null ? srcName : name; STLexer lexer = null; if ( templateToken!=null && templateToken.getType() == GroupParser.BIGSTRING_NO_NL ) group.delimiterStartChar, group.delimiterStopChar); CommonTokenStream tokens = new CommonTokenStream(lexer); STParser p = new STParser(tokens, group.errMgr, templateToken); STParser.templateAndEOF_return r = null; impl.template = template; impl.ast = (CommonTree)r.getTree(); impl.ast.setUnknownTokenBoundaries(); impl.tokens = tokens;