tokens = new CommonTokenStream((TokenSource) lexer); tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) { continue; System.out.println(smaliParser.tokenNames[token.getType()] + ": " + token.getText()); CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t); treeStream.setTokenStream(tokens);
lexer = new FastSimpleGenericEdifactDirectXMLLexer( new ANTLRStringStream( inputValue ) ); tokens = new CommonTokenStream( lexer ); parser = new FastSimpleGenericEdifactDirectXMLParser( tokens ); lexer.setCharStream( new ANTLRStringStream( inputValue ) ); tokens.setTokenSource( lexer ); parser.setTokenStream( tokens ); errorMessage.append( ": expecting " + ( ( e.expecting > -1 ) ? parser.getTokenNames()[e.expecting] : "<UNKNOWN>" ) + " but found " ); errorMessage.append( ( e.token.getType() >= 0 ) ? parser.getTokenNames()[e.token.getType()] : "<EOF>" );
Token templateToken) ANTLRStringStream is = new ANTLRStringStream(template); is.name = srcName!=null ? srcName : name; STLexer lexer; if ( templateToken!=null && templateToken.getType() == GroupParser.BIGSTRING_NO_NL ) group.delimiterStartChar, group.delimiterStopChar); CommonTokenStream tokens = new CommonTokenStream(lexer); STParser p = new STParser(tokens, group.errMgr, templateToken); STParser.templateAndEOF_return r; CommonTreeNodeStream nodes = new CommonTreeNodeStream(r.getTree()); nodes.setTokenStream(tokens); CodeGenerator gen = new CodeGenerator(nodes, group.errMgr, name, template, templateToken); impl.template = template; impl.ast = r.getTree(); impl.ast.setUnknownTokenBoundaries(); impl.tokens = tokens;
private static CommonTokenStream createTokenStream(final String expression) throws HL7QueryParsingException { final CharStream input = new ANTLRStringStream(expression); final HL7QueryLexer lexer = new HL7QueryLexer(input); return new CommonTokenStream(lexer); }
public static PartitionDefinition parsePartitionDefinition(String source) throws SiddhiParserException { try { SiddhiQLGrammarLexer lexer = new SiddhiQLGrammarLexer(); lexer.setCharStream(new ANTLRStringStream(source)); CommonTokenStream tokens = new CommonTokenStream(lexer); SiddhiQLGrammarParser parser = new SiddhiQLGrammarParser(tokens); SiddhiQLGrammarParser.definitionPartitionFinal_return r = parser.definitionPartitionFinal(); CommonTree t = (CommonTree) r.getTree(); CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); nodes.setTokenStream(tokens); SiddhiQLGrammarWalker walker = new SiddhiQLGrammarWalker(nodes); return walker.definitionPartitionFinal(); } catch (Throwable e) { throw new SiddhiParserException(e.getMessage(), e); } }
public static ClassNode parse(String fileName, Reader bufferedReader) throws IOException, RecognitionException { ANTLRStringStream is = new ANTLRReaderStream(bufferedReader); is.name = fileName; JasminLexer lexer = new JasminLexer(is); CommonTokenStream ts = new CommonTokenStream(lexer); JasminParser parser = new JasminParser(ts); return parser.parse(); }
input = new ANTLRStringStream(testInput); CommonTokenStream tokens = new CommonTokenStream(lexObj); nodes = new CommonTreeNodeStream(customTreeAdaptor, tree); nodes = new CommonTreeNodeStream(tree); nodes.setTokenStream(tokens); Method treeReturnName = _treeReturn.getMethod("getTree"); CommonTree returnTree = (CommonTree) treeReturnName.invoke(treeRuleReturn); astString = returnTree.toStringTree(); if ( tokens.index()!=tokens.size()-1 ) { throw new InvalidInputException();
ANTLRStringStream in = new ANTLRStringStream( outputQuery ); CpQueryFilterLexer lexer = new CpQueryFilterLexer( in ); CommonTokenStream tokens = new CommonTokenStream( lexer ); CpQueryFilterParser parser = new CpQueryFilterParser( tokens ); token.getText(), index, lineNumber );
input = new ANTLRStringStream(testInput); CommonTokenStream tokens = new CommonTokenStream(lexObj); parser = Class.forName(parserPath).asSubclass(Parser.class); Constructor<? extends Parser> parConstructor = parser.getConstructor(TokenStream.class); Method returnName = _return.getMethod("getTree"); CommonTree tree = (CommonTree) returnName.invoke(ruleReturn); astString = tree.toStringTree(); if ( tokens.index()!=tokens.size()-1 ) { this.stderr += "Stopped parsing at token index "+tokens.index()+": ";
private DSLMapWalker buildFileMappingWalker(final List<ParserError> errors, CharStream stream) throws RecognitionException{ DSLMapLexer lexer = new DSLMapLexer(stream); CommonTokenStream tokens = new CommonTokenStream(); tokens.setTokenSource(lexer); DSLMapParser parser = new DSLMapParser(tokens); DSLMapParser.mapping_file_return example = parser.mapping_file(); CommonTree tree = (CommonTree) example.getTree(); // logger.info(tree.toStringTree()); CommonTreeNodeStream nodes = new CommonTreeNodeStream(tree); DSLMapWalker walker = new DSLMapWalker(nodes); errors.addAll( lexer.getErrors() ); errors.addAll( parser.getErrors() ); return walker; } }
System.out.println(source); System.out.println("=================="); ANTLRStringStream in = new ANTLRStringStream(source); FuzzyJavaLexer lexer = new FuzzyJavaLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); for(Object obj : tokens.getTokens()) { Token token = (Token)obj; if(token.getType() == FuzzyJavaLexer.SingleLineComment) { System.out.println("Found a SingleLineComment on line "+token.getLine()+ ", starting at column "+token.getCharPositionInLine()+ ", text: "+token.getText());
public static HoseFactory compile(String program) throws RecognitionException { lexer.setCharStream(new ANTLRStringStream(program)); CommonTokenStream tokens = new CommonTokenStream(lexer); HoseParser parser = new HoseParser(tokens); CommonTree tree = (CommonTree) parser.program().getTree(); CommonTreeNodeStream nodes = new CommonTreeNodeStream(tree); HoseWalker walker = new HoseWalker(nodes); walker.program(); return walker.program.new Factory(); }
private CommonTree parse(String query) { try { final JpqlLexer lexer = new JpqlLexer(new ANTLRStringStream(query)); final CommonTokenStream tokenStream = new CommonTokenStream(lexer); final JpqlParser parser = new JpqlParser(tokenStream); final ql_statement_return ql_statement = parser.ql_statement(); final CommonTree tree = (CommonTree) ql_statement.getTree(); final List<String> errors = parser.getErrors(); if (errors.size() > 0) { final String errorMsg = Joiner.on("\n\t").join(errors); JpqlQuery.LOG.error("Cannot parse query: {0}", // JpqlQuery.LOG.boxed(query, // new Object[] { "\n\t" + errorMsg, "\n\n" + tree.toStringTree() + "\n" })); throw new PersistenceException("Cannot parse the query:\n " + errorMsg + ".\n" + query); } return tree; } catch (final PersistenceException e) { throw e; } catch (final Exception e) { throw new PersistenceException("Cannot parse the query:\n " + e.getMessage() + ".\n" + query, e); } }
public Object parseRegex(String pattern) throws SyntaxError { if (pattern == null) return null; if (pattern.length() == 0) return ""; try { RegexLexer lex = new RegexLexer(new ANTLRReaderStream(new StringReader(pattern))); CommonTokenStream tokens = new CommonTokenStream(lex); org.databene.regex.antlr.RegexParser parser = new org.databene.regex.antlr.RegexParser(tokens); org.databene.regex.antlr.RegexParser.expression_return r = parser.expression(); checkForSyntaxErrors(pattern, "regex", parser, r); if (r != null) { CommonTree tree = (CommonTree) r.getTree(); LOGGER.debug("parsed {} to {}", pattern, tree.toStringTree()); return convertNode(tree); } else return null; } catch (RuntimeException e) { if (e.getCause() instanceof RecognitionException) throw mapToSyntaxError((RecognitionException) e.getCause(), pattern); else throw e; } catch (IOException e) { throw new IllegalStateException("Encountered illegal state in regex parsing", e); } catch (RecognitionException e) { throw mapToSyntaxError(e, pattern); } }
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
public static BuilderClassDef parse(String path, InputStream is, DexBuilder dexBuilder) throws UnsupportedEncodingException, RecognitionException { File smaliFile = new File(path); InputStreamReader reader = new InputStreamReader(is, "UTF-8"); LexerErrorInterface lexer = new smaliFlexLexer(reader); ((smaliFlexLexer) lexer).setSourceFile(smaliFile); CommonTokenStream tokens = new CommonTokenStream((TokenSource) lexer); smaliParser parser = new smaliParser(tokens); parser.setApiLevel(DEFAULT_API_LEVEL); smaliParser.smali_file_return result = parser.smali_file(); if ((parser.getNumberOfSyntaxErrors() > 0) || (lexer.getNumberOfSyntaxErrors() > 0)) { throw new RuntimeException("Unable to parse: " + smaliFile); } CommonTree t = result.getTree(); CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t); treeStream.setTokenStream(tokens); smaliTreeWalker dexGen = new smaliTreeWalker(treeStream); dexGen.setVerboseErrors(false); dexGen.setDexBuilder(dexBuilder); BuilderClassDef classDef = (BuilderClassDef) dexGen.smali_file(); if (dexGen.getNumberOfSyntaxErrors() != 0) { throw new RuntimeException("Unable to walk: " + smaliFile); } return classDef; }
private String getUnconsumedTokens(CommonTokenStream tokens) { // ensure we've buffered all tokens from the underlying TokenSource tokens.fill(); if ( tokens.index() == tokens.size() - 1 ) { return null; } StringBuilder nonEofEndingTokens = new StringBuilder(); @SuppressWarnings("unchecked") List<Token> unconsumed = (List<Token>) tokens.getTokens( tokens.index(), tokens.size() - 1 ); for ( Token endToken : unconsumed ) { // Ignore <EOF> tokens as they might be inserted by the parser if ( endToken.getType() != Token.EOF ) { nonEofEndingTokens.append( endToken.getText() ); } } return nonEofEndingTokens.length() > 0 ? nonEofEndingTokens.toString() : null; } }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** * Loads the proto from an {@link ANTLRReaderStream}. */ public static void loadFrom(ANTLRReaderStream input, Proto target) throws Exception { // Create an ExprLexer that feeds from that stream ProtoLexer lexer = new ProtoLexer(input); // Create a stream of tokens fed by the lexer CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that feeds off the token stream ProtoParser parser = new ProtoParser(tokens); // Begin parsing at rule parse parser.parse(target); }