public ParseTree parse(String startRule) throws RecognitionException { return parse(startRule, null); }
public int predict(IntStream input) { Interpreter interp = new Interpreter(nfa.grammar, input); return interp.predict(this); }
public CommonToken scan(String startRule, List<NFAState> visitedStates) throws RecognitionException { LexerActionGetTokenType actions = new LexerActionGetTokenType(grammar); scan(startRule, actions, visitedStates); return actions.token; }
Interpreter lexEngine = new Interpreter(lexer, input); CommonTokenStream tokens = new CommonTokenStream(lexEngine); StringTokenizer tk = new StringTokenizer(ignoreTokens, " "); return; Interpreter parseEngine = new Interpreter(parser, tokens); ParseTree t = parseEngine.parse(startRule); System.out.println(t.toStringTree());
while (input.LA(1)!=CharStream.EOF) { try { token = scan(Grammar.ARTIFICIAL_TOKENS_RULENAME, null); break; reportScanError(re); continue loop;
public void parse(String startRule, DebugEventListener actions, List visitedStates) throws RecognitionException { //System.out.println("parse("+startRule+")"); // Build NFAs/DFAs from the grammar AST if NFAs haven't been built yet if ( grammar.getRuleStartState(startRule)==null ) { grammar.buildNFA(); } if ( !grammar.allDecisionDFAHaveBeenCreated() ) { // Create the DFA predictors for each decision grammar.createLookaheadDFAs(); } // do the parse Stack ruleInvocationStack = new Stack(); NFAState start = grammar.getRuleStartState(startRule); NFAState stop = grammar.getRuleStopState(startRule); parseEngine(startRule, start, stop, input, ruleInvocationStack, actions, visitedStates); }
int predictedAlt = predict(dfa); if ( predictedAlt == NFA.INVALID_ALT_NUMBER ) { String description = dfa.getNFADecisionStartState().getDescription();
Interpreter lexEngine = new Interpreter(lexer, input); FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); StringTokenizer tk = new StringTokenizer(ignoreTokens, " "); return; Interpreter parseEngine = new Interpreter(parser, tokens); ParseTree t = parseEngine.parse(startRule); System.out.println(t.toStringTree());
while (input.LA(1)!=CharStream.EOF) { try { token = scan(Grammar.ARTIFICIAL_TOKENS_RULENAME, null); break; reportScanError(re); continue loop;
public void parse(String startRule, DebugEventListener actions, List<NFAState> visitedStates) throws RecognitionException { //System.out.println("parse("+startRule+")"); // Build NFAs/DFAs from the grammar AST if NFAs haven't been built yet if ( grammar.getRuleStartState(startRule)==null ) { grammar.buildNFA(); } if ( !grammar.allDecisionDFAHaveBeenCreated() ) { // Create the DFA predictors for each decision grammar.createLookaheadDFAs(); } // do the parse Stack<NFAState> ruleInvocationStack = new Stack<NFAState>(); NFAState start = grammar.getRuleStartState(startRule); NFAState stop = grammar.getRuleStopState(startRule); parseEngine(startRule, start, stop, input, ruleInvocationStack, actions, visitedStates); }
int predictedAlt = predict(dfa); if ( predictedAlt == NFA.INVALID_ALT_NUMBER ) { String description = dfa.getNFADecisionStartState().getDescription();
Interpreter lexEngine = new Interpreter(lexer, input); FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); StringTokenizer tk = new StringTokenizer(ignoreTokens, " "); return; Interpreter parseEngine = new Interpreter(parser, tokens); ParseTree t = parseEngine.parse(startRule); System.out.println(t.toStringTree());
public int predict(IntStream input) { Interpreter interp = new Interpreter(nfa.grammar, input); return interp.predict(this); }
while (input.LA(1)!=CharStream.EOF) { try { token = scan(Grammar.ARTIFICIAL_TOKENS_RULENAME, null); break; reportScanError(re); continue loop;
public ParseTree parse(String startRule) throws RecognitionException { return parse(startRule, null); }
public void parse(String startRule, DebugEventListener actions, List visitedStates) throws RecognitionException { //System.out.println("parse("+startRule+")"); // Build NFAs/DFAs from the grammar AST if NFAs haven't been built yet if ( grammar.getRuleStartState(startRule)==null ) { grammar.buildNFA(); } if ( !grammar.allDecisionDFAHaveBeenCreated() ) { // Create the DFA predictors for each decision grammar.createLookaheadDFAs(); } // do the parse Stack ruleInvocationStack = new Stack(); NFAState start = grammar.getRuleStartState(startRule); NFAState stop = grammar.getRuleStopState(startRule); parseEngine(startRule, start, stop, input, ruleInvocationStack, actions, visitedStates); }
public CommonToken scan(String startRule, List visitedStates) throws RecognitionException { LexerActionGetTokenType actions = new LexerActionGetTokenType(grammar); scan(startRule, actions, visitedStates); return actions.token; }
int predictedAlt = predict(dfa); if ( predictedAlt == NFA.INVALID_ALT_NUMBER ) { String description = dfa.getNFADecisionStartState().getDescription();
Interpreter lexEngine = new Interpreter(lexer, input); FilteringTokenStream tokens = new FilteringTokenStream(lexEngine); StringTokenizer tk = new StringTokenizer(ignoreTokens, " "); return; Interpreter parseEngine = new Interpreter(parser, tokens); ParseTree t = parseEngine.parse(startRule); System.out.println(t.toStringTree());
public int predict(IntStream input) { Interpreter interp = new Interpreter(nfa.grammar, input); return interp.predict(this); }