private static CommonTokenStream createTokenStream(final String expression) throws HL7QueryParsingException { final CharStream input = new ANTLRStringStream(expression); final HL7QueryLexer lexer = new HL7QueryLexer(input); return new CommonTokenStream(lexer); }
private void assemble1(Path file, Path output) throws IOException { try (BufferedReader bufferedReader = Files.newBufferedReader(file, Charset.forName(encoding))) { ANTLRStringStream is = new ANTLRReaderStream(bufferedReader); is.name = file.toString(); JasminLexer lexer = new JasminLexer(is); CommonTokenStream ts = new CommonTokenStream(lexer); JasminParser parser = new JasminParser(ts); parser.rebuildLine = autogenLines; ClassWriter cw = new ClassWriter(noComputeMax?0:ClassWriter.COMPUTE_MAXS); ClassNode cn = parser.parse(); if (cn.version == 0) { cn.version = versions[classVersion]; } if (dump) { new JasminDumper(new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8), true)).dump(cn); } cn.accept(cw); Path clzFile = output.resolve(cn.name.replace('.', '/') + ".class"); createParentDirectories(clzFile); Files.write(clzFile, cw.toByteArray()); } catch (RecognitionException e) { System.err.println("Fail to assemble " + file); e.printStackTrace(); } } }
public static Query fromQL( String ql ) { if ( ql == null ) { return null; } ql = ql.trim(); String qlt = ql.toLowerCase(); if ( !qlt.startsWith( "select" ) && !qlt.startsWith( "insert" ) && !qlt.startsWith( "update" ) && !qlt .startsWith( "delete" ) ) { if ( qlt.startsWith( "order by" ) ) { ql = "select * " + ql; } else { ql = "select * where " + ql; } } try { ANTLRStringStream in = new ANTLRStringStream( ql.trim() ); QueryFilterLexer lexer = new QueryFilterLexer( in ); CommonTokenStream tokens = new CommonTokenStream( lexer ); QueryFilterParser parser = new QueryFilterParser( tokens ); Query q = parser.ql(); return q; } catch ( Exception e ) { logger.error( "Unable to parse \"{}\"", ql, e ); } return null; }
public static ClassNode parse(String fileName, Reader bufferedReader) throws IOException, RecognitionException { ANTLRStringStream is = new ANTLRReaderStream(bufferedReader); is.name = fileName; JasminLexer lexer = new JasminLexer(is); CommonTokenStream ts = new CommonTokenStream(lexer); JasminParser parser = new JasminParser(ts); return parser.parse(); }
/** * Loads the proto from an {@link ANTLRReaderStream}. */ public static void loadFrom(ANTLRReaderStream input, Proto target) throws Exception { // Create an ExprLexer that feeds from that stream ProtoLexer lexer = new ProtoLexer(input); // Create a stream of tokens fed by the lexer CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that feeds off the token stream ProtoParser parser = new ProtoParser(tokens); // Begin parsing at rule parse parser.parse(target); }
public static FilterPredicate valueOf( String str ) { if ( str == null ) { return null; } try { ANTLRStringStream in = new ANTLRStringStream( str.trim() ); QueryFilterLexer lexer = new QueryFilterLexer( in ); CommonTokenStream tokens = new CommonTokenStream( lexer ); QueryFilterParser parser = new QueryFilterParser( tokens ); FilterPredicate filter = parser.filter(); return normalize( filter ); } catch ( Exception e ) { logger.error( "Unable to parse \"{}\"", str, e ); } return null; }
CommonTokenStream tokens = new CommonTokenStream( lexer ); CpQueryFilterParser parser = new CpQueryFilterParser( tokens );
public static void load(InputStream in, Proto proto) throws Exception { // Create an input character stream from standard in ANTLRInputStream input = new ANTLRInputStream(in); // Create an ExprLexer that feeds from that stream ProtoLexer lexer = new ProtoLexer(input); // Create a stream of tokens fed by the lexer CommonTokenStream tokens = new CommonTokenStream(lexer); // Create a parser that feeds off the token stream ProtoParser parser = new ProtoParser(tokens); // Begin parsing at rule prog parser.parse(proto); }
tokens = new CommonTokenStream((TokenSource) lexer);
public CompiledExpression compile(final String expression) { try { final CharStream input = new ANTLRStringStream(expression); final AttributeExpressionLexer lexer = new AttributeExpressionLexer(input); final CommonTokenStream lexerTokenStream = new CommonTokenStream(lexer); final AttributeExpressionParser parser = new AttributeExpressionParser(lexerTokenStream); final Tree ast = (Tree) parser.query().getTree(); final Tree tree = ast.getChild(0); final Evaluator<?> evaluator = buildEvaluator(tree); verifyMappingEvaluatorReduced(evaluator); final Set<Evaluator<?>> allEvaluators = new HashSet<>(evaluators); this.evaluators.clear(); return new CompiledExpression(expression, evaluator, tree, allEvaluators); } catch (final AttributeExpressionLanguageParsingException e) { throw e; } catch (final Exception e) { throw new AttributeExpressionLanguageParsingException(e); } }
public static BuilderClassDef parse(String path, InputStream is, DexBuilder dexBuilder) throws UnsupportedEncodingException, RecognitionException { File smaliFile = new File(path); InputStreamReader reader = new InputStreamReader(is, "UTF-8"); LexerErrorInterface lexer = new smaliFlexLexer(reader); ((smaliFlexLexer) lexer).setSourceFile(smaliFile); CommonTokenStream tokens = new CommonTokenStream((TokenSource) lexer); smaliParser parser = new smaliParser(tokens); parser.setApiLevel(DEFAULT_API_LEVEL); smaliParser.smali_file_return result = parser.smali_file(); if ((parser.getNumberOfSyntaxErrors() > 0) || (lexer.getNumberOfSyntaxErrors() > 0)) { throw new RuntimeException("Unable to parse: " + smaliFile); } CommonTree t = result.getTree(); CommonTreeNodeStream treeStream = new CommonTreeNodeStream(t); treeStream.setTokenStream(tokens); smaliTreeWalker dexGen = new smaliTreeWalker(treeStream); dexGen.setVerboseErrors(false); dexGen.setDexBuilder(dexBuilder); BuilderClassDef classDef = (BuilderClassDef) dexGen.smali_file(); if (dexGen.getNumberOfSyntaxErrors() != 0) { throw new RuntimeException("Unable to walk: " + smaliFile); } return classDef; }
public static FilterParser getFilterParser(String filter) throws MetaException { FilterLexer lexer = new FilterLexer(new ANTLRNoCaseStringStream(filter)); CommonTokenStream tokens = new CommonTokenStream(lexer); FilterParser parser = new FilterParser(tokens); try { parser.filter(); } catch(RecognitionException re) { throw new MetaException("Error parsing partition filter; lexer error: " + lexer.errorMsg + "; exception " + re); } if (lexer.errorMsg != null) { throw new MetaException("Error parsing partition filter : " + lexer.errorMsg); } return parser; }
CommonTokenStream tokens = new CommonTokenStream((TokenSource)lexer);
private Tree compileTree(final String expression) throws AttributeExpressionLanguageParsingException { try { final CharStream input = new ANTLRStringStream(expression); final AttributeExpressionLexer lexer = new AttributeExpressionLexer(input); final CommonTokenStream lexerTokenStream = new CommonTokenStream(lexer); final AttributeExpressionParser parser = new AttributeExpressionParser(lexerTokenStream); final Tree ast = (Tree) parser.query().getTree(); final Tree tree = ast.getChild(0); // ensure that we are able to build the evaluators, so that we validate syntax final Evaluator<?> evaluator = buildEvaluator(tree); verifyMappingEvaluatorReduced(evaluator); return tree; } catch (final AttributeExpressionLanguageParsingException e) { throw e; } catch (final Exception e) { throw new AttributeExpressionLanguageParsingException(e); } }
tokens = new CommonTokenStream( lexer ); parser = new FastSimpleGenericEdifactDirectXMLParser( tokens );
final CharStream input = new ANTLRStringStream(path); final RecordPathLexer lexer = new RecordPathLexer(input); final CommonTokenStream lexerTokenStream = new CommonTokenStream(lexer);
public static ScoreFunction parseFormula(int funcNum, String definition) throws Exception { InputStream reader = new ByteArrayInputStream(definition.getBytes()); ANTLRInputStream input = new ANTLRInputStream(reader); ScoreFormulaLexer lexer = new ScoreFormulaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); ScoreFormulaParser parser = new ScoreFormulaParser(tokens); Class<?> funcClass = parser.generateClass(lexer, funcNum); Constructor cons = funcClass.getConstructor(); return (ScoreFunction)cons.newInstance(); }
public SQLParser(String query, ParseNodeFactory factory) { PhoenixSQLLexer lexer; try { lexer = new PhoenixSQLLexer(new CaseInsensitiveReaderStream(new StringReader(query))); } catch (IOException e) { throw new RuntimeException(e); // Impossible } CommonTokenStream cts = new CommonTokenStream(lexer); parser = new PhoenixSQLParser(cts); parser.setParseNodeFactory(factory); }
public SQLParser(Reader queryReader, ParseNodeFactory factory) throws IOException { PhoenixSQLLexer lexer = new PhoenixSQLLexer(new CaseInsensitiveReaderStream(queryReader)); CommonTokenStream cts = new CommonTokenStream(lexer); parser = new PhoenixSQLParser(cts); parser.setParseNodeFactory(factory); }
public SQLParser(Reader queryReader) throws IOException { PhoenixSQLLexer lexer = new PhoenixSQLLexer(new CaseInsensitiveReaderStream(queryReader)); CommonTokenStream cts = new CommonTokenStream(lexer); parser = new PhoenixSQLParser(cts); parser.setParseNodeFactory(DEFAULT_NODE_FACTORY); }