private Grammar createLexerGrammarFromCombinedGrammar(Grammar grammar) throws Exception { String lexerGrammarStr = grammar.getLexerGrammar(); if(lexerGrammarStr == null) return null; Grammar lexerGrammar = new Grammar(); lexerGrammar.implicitLexer = true; lexerGrammar.setTool(engine.getANTLRTool()); lexerGrammar.setFileName("<internally-generated-lexer>"); lexerGrammar.importTokenVocabulary(grammar); lexerGrammar.setGrammarContent(lexerGrammarStr); lexerGrammar.composite.createNFAs(); return lexerGrammar; }
public NFAState getRuleStartState(String scopeName, String ruleName) { Rule r = getRule(scopeName, ruleName); if ( r!=null ) { //System.out.println("getRuleStartState("+scopeName+", "+ruleName+")="+r.startState); return r.startState; } //System.out.println("getRuleStartState("+scopeName+", "+ruleName+")=null"); return null; }
public void parse(String startRule, DebugEventListener actions, List<NFAState> visitedStates) throws RecognitionException { //System.out.println("parse("+startRule+")"); // Build NFAs/DFAs from the grammar AST if NFAs haven't been built yet if ( grammar.getRuleStartState(startRule)==null ) { grammar.buildNFA(); } if ( !grammar.allDecisionDFAHaveBeenCreated() ) { // Create the DFA predictors for each decision grammar.createLookaheadDFAs(); } // do the parse Stack<NFAState> ruleInvocationStack = new Stack<NFAState>(); NFAState start = grammar.getRuleStartState(startRule); NFAState stop = grammar.getRuleStopState(startRule); parseEngine(startRule, start, stop, input, ruleInvocationStack, actions, visitedStates); }
protected void performGrammarAnalysis(CodeGenerator generator, Grammar grammar) { // Build NFAs from the grammar AST grammar.buildNFA(); // Create the DFA predictors for each decision grammar.createLookaheadDFAs(); }
AttributeScope resolveDynamicScope(String scopeName) { if ( grammar.getGlobalScope(scopeName)!=null ) { return grammar.getGlobalScope(scopeName); } Rule scopeRule = grammar.getRule(scopeName); if ( scopeRule!=null ) { return scopeRule.ruleScope; } return null; // not a valid dynamic scope }
public boolean getAutoBacktrackMode(int decision) { NFAState decisionNFAStartState = getDecisionNFAStartState(decision); String autoBacktrack = (String)getBlockOption(decisionNFAStartState.associatedASTNode, "backtrack"); if ( autoBacktrack==null ) { autoBacktrack = (String)nfa.grammar.getOption("backtrack"); } return autoBacktrack!=null&&autoBacktrack.equals("true"); }
Grammar parser = new Grammar(tool, grammarFileName, composite); composite.setDelegationRoot(parser); FileReader fr = new FileReader(grammarFileName); BufferedReader br = new BufferedReader(fr); parser.parseAndBuildAST(br); br.close(); List leftRecursiveRules = parser.checkAllRulesForLeftRecursion(); if ( leftRecursiveRules.size()>0 ) { return; if ( parser.getRule(startRule)==null ) { System.out.println("undefined start rule "+startRule); return; String lexerGrammarText = parser.getLexerGrammar(); Grammar lexer = new Grammar(); lexer.importTokenVocabulary(parser); lexer.fileName = grammarFileName; lexer.setTool(tool); if ( lexerGrammarText!=null ) { lexer.setGrammarContent(lexerGrammarText); while ( tk.hasMoreTokens() ) { String tokenName = tk.nextToken(); tokens.setTokenTypeChannel(lexer.getTokenType(tokenName), 99); if ( parser.getRule(startRule)==null ) { System.err.println("Rule "+startRule+" does not exist in "+grammarFileName);
Grammar parser = new Grammar(tool, grammarFileName, composite); composite.setDelegationRoot(parser); parser.parseAndBuildAST(br); br.close(); List<? extends Collection<? extends Rule>> leftRecursiveRules = parser.checkAllRulesForLeftRecursion(); if ( leftRecursiveRules.size()>0 ) { return; if ( parser.getRule(startRule)==null ) { System.out.println("undefined start rule "+startRule); return; String lexerGrammarText = parser.getLexerGrammar(); Grammar lexer = new Grammar(tool); lexer.importTokenVocabulary(parser); lexer.fileName = grammarFileName; if ( lexerGrammarText!=null ) { lexer.setGrammarContent(lexerGrammarText); lexer.buildNFA(); leftRecursiveRules = lexer.checkAllRulesForLeftRecursion(); if ( leftRecursiveRules.size()>0 ) { return; Integer ttypeI = tokenTypes.get(i); int ttype = ttypeI; String ttypeDisplayName = parser.getTokenDisplayName(ttype);
public void createLookaheadDFAs(boolean wackTempStructures) { if ( nfa==null ) { buildNFA(); checkAllRulesForLeftRecursion(); int numDecisions = getNumberOfDecisions(); if ( NFAToDFAConverter.SINGLE_THREADED_NFA_CONVERSION ) { for (int decision=1; decision<=numDecisions; decision++) { NFAState decisionStartState = getDecisionNFAStartState(decision); if ( leftRecursiveRules.contains(decisionStartState.enclosingRule) ) { if ( getUserMaxLookahead(decision)==0 || getUserMaxLookahead(decision)==1 ) dfa = createLL_1_LookaheadDFA(decision); " not suitable for LL(1)-optimized DFA analysis"); dfa = createLookaheadDFA(decision, wackTempStructures); setLookaheadDFA(decision, null);
grammar.printGrammar(System.out); String lexerGrammarStr = grammar.getLexerGrammar(); lexerGrammarFileName = grammar.getImplicitlyGeneratedLexerFileName(); try { Writer w = getOutputFile(grammar, lexerGrammarFileName); Grammar lexerGrammar = new Grammar(); lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion; lexerGrammar.implicitLexer = true; lexerGrammar.setTool(this); File lexerGrammarFullFile = new File(getFileDirectory(lexerGrammarFileName), lexerGrammarFileName); lexerGrammar.setFileName(lexerGrammarFullFile.toString()); lexerGrammar.importTokenVocabulary(grammar); lexerGrammar.parseAndBuildAST(sr);
rootGrammar.addRulesForSyntacticPredicates(); rootGrammar.composite.defineGrammarSymbols(); rootGrammar.composite.createNFAs(); rootGrammar.printGrammar(System.out); String lexerGrammarStr = rootGrammar.getLexerGrammar(); lexerGrammarFileName = rootGrammar.getImplicitlyGeneratedLexerFileName(); try { Writer w = getOutputFile(rootGrammar, lexerGrammarFileName); Grammar lexerGrammar = new Grammar(this); lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion; lexerGrammar.implicitLexer = true; lexerGrammar.setFileName(lexerGrammarFullFile.toString()); lexerGrammar.importTokenVocabulary(rootGrammar); lexerGrammar.parseAndBuildAST(sr); lexerGrammar.addRulesForSyntacticPredicates(); lexerGrammar.composite.defineGrammarSymbols(); lexerGrammar.composite.createNFAs();
actions.enterRule(s.nfa.grammar.getFileName(), start.enclosingRule.name); if ( s.getDecisionNumber()>0 && s.nfa.grammar.getNumberOfAltsForDecisionNFA(s)>1 ) { DFA dfa = s.nfa.grammar.getLookaheadDFA(s.getDecisionNumber()); if ( parseAlt > s.nfa.grammar.getNumberOfAltsForDecisionNFA(s) ) { alt = s.nfa.grammar.getNFAStateForAltOfDecision(s, parseAlt); actions.exitRule(s.nfa.grammar.getFileName(), s.enclosingRule.name); actions.enterRule(s.nfa.grammar.getFileName(), s.enclosingRule.name); if ( !s.nfa.grammar.allDecisionDFAHaveBeenCreated() ) { s.nfa.grammar.createLookaheadDFAs(); actions.exitRule(s.nfa.grammar.getFileName(), stop.enclosingRule.name);
Rule encRule = grammar.getRule(((GrammarAST)atom_AST_in).enclosingRuleName); if ( encRule!=null && encRule.hasRewrite(outerAltNum) && astSuffix!=null ) { ErrorManager.grammarError(ErrorManager.MSG_AST_OP_IN_ALT_WITH_REWRITE, grammar.checkRuleReference(scope, r, rarg, currentRuleName); String scopeName = null; if ( scope!=null ) { scopeName = scope.getText(); Rule rdef = grammar.getRule(scopeName, r.getText()); List<Grammar> rdefDelegates = rdef.grammar.getDelegates(); if ( rdefDelegates.contains(this.grammar) ) { code.setAttribute("scope", rdef.grammar); grammar.checkRuleReference(scope, t, targ, currentRuleName); if ( grammar.type==Grammar.LEXER ) { if ( grammar.getTokenType(t.getText())==Label.EOF ) { code = templates.getInstanceOf("lexerMatchEOF"); Rule rdef2 = grammar.getRule(scopeName, t.getText()); code.setAttribute("rule", rdef2); if ( scope!=null ) { // scoped rule ref code = getTokenElementST("tokenRef", t.getText(), t, astSuffix, labelText); String tokenLabel = generator.getTokenTypeAsTargetLabel(grammar.getTokenType(t.getText())); code.setAttribute("token",tokenLabel); if ( !currentAltHasASTRewrite && t.terminalOptions!=null ) {
Grammar grammar = new Grammar(this, grammarFileName, composite); composite.setDelegationRoot(grammar); FileReader fr; grammar.parseAndBuildAST(br); composite.watchNFAConversion = internalOption_watchNFAConversion; br.close();
private Grammar createNewGrammar() throws TokenStreamException, RecognitionException, IOException { Grammar g = new Grammar(); g.setTool(engine.getANTLRTool()); g.setFileName(engine.getGrammarFileName()); g.setGrammarContent(engine.getGrammarText()); g.composite.createNFAs(); // don't want errors from a previous grammar to interfere with this new grammar. // must reset error state otherwise analysis will not proceed if // there were previous errors. ErrorManager.resetErrorState(); return g; }
public void optimize() { // optimize each DFA in this grammar for (int decisionNumber=1; decisionNumber<=grammar.getNumberOfDecisions(); decisionNumber++) { DFA dfa = grammar.getLookaheadDFA(decisionNumber); optimize(dfa); } }
scope = getDefaultActionScope(type); Map<String, Object> scopeActions = getActions().get(scope); if ( scopeActions==null ) { scopeActions = new HashMap<String, Object>(); getActions().put(scope, scopeActions); List<Grammar> allgrammars = composite.getRootGrammar().getDelegates(); for (Grammar delegate : allgrammars) { if ( target.isValidActionScope(delegate.type, scope) ) { delegate.defineNamedAction(ampersandAST, scope, nameAST, actionAST);
grammar.composite.getDelegates(grammar.composite.getRootGrammar()); for (Grammar g : imports) { outputDir = tool.getOutputDirectory(g.getFileName()); String fname = groomQualifiedFileName(outputDir.toString(), g.getRecognizerName() + extST.render()); files.add(new File(fname));
scope = getDefaultActionScope(type); List<Grammar> allgrammars = composite.getRootGrammar().getDelegates(); for (Grammar g : allgrammars) { g.defineNamedAction(ampersandAST, scope, nameAST, actionAST);
public String toString() { GrammarAST decisionASTNode = probe.dfa.getDecisionASTNode(); line = decisionASTNode.getLine(); column = decisionASTNode.getColumn(); String fileName = probe.dfa.nfa.grammar.getFileName(); if ( fileName!=null ) { file = fileName; } StringTemplate st = getMessageTemplate(); String ruleName = probe.dfa.getNFADecisionStartState().enclosingRule.name; st.setAttribute("ruleName", ruleName); List sortedAlts = new ArrayList(); sortedAlts.addAll(altsWithRecursion); Collections.sort(sortedAlts); // make sure it's 1, 2, ... st.setAttribute("alts", sortedAlts); return super.toString(st); }