/* default */ static CharStream getCharStreamFromSourceCode(final SourceCode sourceCode) { StringBuilder buffer = sourceCode.getCodeBuffer(); return CharStreams.fromString(buffer.toString()); } }
private static Lexer createLexer(final DatabaseType databaseType, final String sql) { CharStream sqlCharStream = CharStreams.fromString(sql); switch (databaseType) { case H2: case MySQL: return new MySQLStatementLexer(sqlCharStream); case PostgreSQL: return new PostgreSQLStatementLexer(sqlCharStream); case SQLServer: return new SQLServerStatementLexer(sqlCharStream); case Oracle: return new OracleStatementLexer(sqlCharStream); default: throw new UnsupportedOperationException(String.format("Can not support database type [%s].", databaseType)); } }
return new LinkedHashMap<>(); CharStream input = CharStreams.fromString(text); StatusCodeLexer lexer = new StatusCodeLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer);
/** * Parses block comment content as javadoc comment. * @param blockComment * block comment content. * @return parse tree */ private JavadocParser createJavadocParser(String blockComment) { final JavadocLexer lexer = new JavadocLexer(CharStreams.fromString(blockComment)); final CommonTokenStream tokens = new CommonTokenStream(lexer); final JavadocParser parser = new JavadocParser(tokens); // remove default error listeners parser.removeErrorListeners(); // add custom error listener that logs syntax errors parser.addErrorListener(errorListener); // JavadocParserErrorStrategy stops parsing on first parse error encountered unlike the // DefaultErrorStrategy used by ANTLR which rather attempts error recovery. parser.setErrorHandler(new JavadocParserErrorStrategy()); return parser; }
private static JavadocParser.DocumentationContext parseJavadoc(Path file, String doc) { ANTLRErrorListener error = errorListener(LoggerFactory.getLogger(DocCollector.class), file); CharStream stream = CharStreams.fromString(doc == null ? "" : doc); JavadocLexer lexer = new JavadocLexer(stream); lexer.removeErrorListeners(); lexer.addErrorListener(error); CommonTokenStream tokens = new CommonTokenStream(lexer); JavadocParser parser = new JavadocParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(error); return parser.documentation(); }
/** * Initializes logical expression lexer and parser, add error listener that converts all * syntax error into {@link org.apache.drill.common.exceptions.ExpressionParsingException}. * Parses given expression into logical expression instance. * * @param expr expression to be parsed * @return logical expression instance */ public static LogicalExpression parse(String expr) { ExprLexer lexer = new ExprLexer(CharStreams.fromString(expr)); lexer.removeErrorListeners(); // need to remove since default listener will output warning lexer.addErrorListener(ErrorListener.INSTANCE); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); parser.removeErrorListeners(); // need to remove since default listener will output warning parser.addErrorListener(ErrorListener.INSTANCE); ExprParser.ParseContext parseContext = parser.parse(); logger.trace("Tokens: [{}]. Parsing details: [{}].", tokens.getText(), parseContext.toInfoString(parser)); return parseContext.e; }
charStream = CharStreams.fromString(input); } else { charStream = CharStreams.fromString(input, sourceName);
private ParserRuleContext getParseTree(final String sql) { final SqlBaseLexer sqlBaseLexer = new SqlBaseLexer( new CaseInsensitiveStream(CharStreams.fromString(sql))); final CommonTokenStream tokenStream = new CommonTokenStream(sqlBaseLexer); final SqlBaseParser sqlBaseParser = new SqlBaseParser(tokenStream); sqlBaseLexer.removeErrorListeners(); sqlBaseLexer.addErrorListener(ERROR_LISTENER); sqlBaseParser.removeErrorListeners(); sqlBaseParser.addErrorListener(ERROR_LISTENER); final Function<SqlBaseParser, ParserRuleContext> parseFunction = SqlBaseParser::statements; try { // first, try parsing with potentially faster SLL mode sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.SLL); return parseFunction.apply(sqlBaseParser); } catch (final ParseCancellationException ex) { // if we fail, parse with LL mode tokenStream.seek(0); // rewind input stream sqlBaseParser.reset(); sqlBaseParser.getInterpreter().setPredictionMode(PredictionMode.LL); return parseFunction.apply(sqlBaseParser); } }
/** * Creates a {@link CharStream} given a {@link String}. */ public static CodePointCharStream fromString(String s) { return fromString(s, IntStream.UNKNOWN_SOURCE_NAME); }
private Node invokeParser(String name, String sql, Function<SqlBaseParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SqlBaseParser parser = new SqlBaseParser(tokenStream);
@Override public void parse(String ddlContent, Tables databaseTables) { this.databaseTables = databaseTables; CodePointCharStream ddlContentCharStream = CharStreams.fromString(ddlContent); L lexer = createNewLexerInstance(new CaseChangingCharStream(ddlContentCharStream, isGrammarInUpperCase())); P parser = createNewParserInstance(new CommonTokenStream(lexer)); dataTypeResolver = initializeDataTypeResolver(); // remove default console output printing error listener parser.removeErrorListener(ConsoleErrorListener.INSTANCE); ParsingErrorListener parsingErrorListener = new ParsingErrorListener(AbstractDdlParser::accumulateParsingFailure); parser.addErrorListener(parsingErrorListener); ParseTree parseTree = parseTree(parser); if (parsingErrorListener.getErrors().isEmpty()) { antlrDdlParserListener = createParseTreeWalkerListener(); if (antlrDdlParserListener != null) { ParseTreeWalker.DEFAULT.walk(antlrDdlParserListener, parseTree); if (throwErrorsFromTreeWalk && !antlrDdlParserListener.getErrors().isEmpty()) { throwParsingException(antlrDdlParserListener.getErrors()); } } } else { throwParsingException(parsingErrorListener.getErrors()); } }
@Nullable @Override public CharStream read(String contentOrName) { if (!contentReaded) { contentReaded = true; return CharStreams.fromString(contentOrName); } return importReader.read(contentOrName); } }
public Iterator<Token> getTokenIterator(String input) { return new TokenIterator(this, lexerInstance(CharStreams.fromString(input))); }
public List<Token> tokenize(String input) { return getAllTokens(lexerInstance(CharStreams.fromString(input))); }
public SignatureProcessor(String input) { Lexer lexer = new Java7Lexer(CharStreams.fromString(input)); tokenStream = new CommonTokenStream(lexer); parser = new Java7Parser(tokenStream); parser.setErrorHandler(new BailErrorStrategy()); unitContext = parser.compilationUnit(); }
/** * Parse the raw EQL query and apply it to the supplied query. */ public static <T> void parse(String raw, SpiQuery<T> query) { EQLLexer lexer = new EQLLexer(CharStreams.fromString(raw)); CommonTokenStream tokens = new CommonTokenStream(lexer); EQLParser parser = new EQLParser(tokens); parser.addErrorListener(errorListener); EQLParser.Select_statementContext context = parser.select_statement(); EqlAdapter<T> adapter = new EqlAdapter<>(query); ParseTreeWalker walker = new ParseTreeWalker(); walker.walk(adapter, context); query.simplifyExpressions(); }
/** * Creates a {@link CharStream} given a {@link String}. */ public static CodePointCharStream fromString(String s) { return fromString(s, IntStream.UNKNOWN_SOURCE_NAME); }
@Override public CharStream getCharStream(String s) { if(t == GenericParser.CaseSensitiveType.LOWER) s = s.toLowerCase(); else if (t == GenericParser.CaseSensitiveType.UPPER) s = s.toUpperCase(); return CharStreams.fromString(s); } }
@Nullable @Override public CharStream read(String contentOrName) { if (!contentReaded) { contentReaded = true; return CharStreams.fromString(contentOrName); } return importReader.read(contentOrName); } }
private void init(String cql) { this.errorListener.setQuery(cql); CodePointCharStream input = CharStreams.fromString(cql); this.lexer.setInputStream(input); //this.tokenStream.setTokenSource(lexer); UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(lexer); this.parser.setTokenStream(tokenStream); }