@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
public LexerInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, Collection<String> channelNames, Collection<String> modeNames, ATN atn, CharStream input) { super(input); if (atn.grammarType != ATNType.LEXER) { throw new IllegalArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.channelNames = channelNames.toArray(new String[channelNames.size()]); this.modeNames = modeNames.toArray(new String[modeNames.size()]); this.vocabulary = vocabulary; this._decisionToDFA = new DFA[atn.getNumberOfDecisions()]; for (int i = 0; i < _decisionToDFA.length; i++) { _decisionToDFA[i] = new DFA(atn.getDecisionState(i), i); } this._interp = new LexerATNSimulator(this,atn,_decisionToDFA,_sharedContextCache); }
public ParserInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, ATN atn, TokenStream input) { super(input); this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.vocabulary = vocabulary; // init decision DFA int numberOfDecisions = atn.getNumberOfDecisions(); this.decisionToDFA = new DFA[numberOfDecisions]; for (int i = 0; i < numberOfDecisions; i++) { DecisionState decisionState = atn.getDecisionState(i); decisionToDFA[i] = new DFA(decisionState, i); } // get atn simulator that knows how to do predictions setInterpreter(new ParserATNSimulator(this, atn, decisionToDFA, sharedContextCache)); }
predicateDFAState(D, atn.getDecisionState(dfa.decision)); if (D.predicates != null) { D.prediction = ATN.INVALID_ALT_NUMBER;
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
@Override public void clearDFA() { for (int d = 0; d < decisionToDFA.length; d++) { decisionToDFA[d] = new DFA(atn.getDecisionState(d), d); } }
public LexerInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, Collection<String> channelNames, Collection<String> modeNames, ATN atn, CharStream input) { super(input); if (atn.grammarType != ATNType.LEXER) { throw new IllegalArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.channelNames = channelNames.toArray(new String[channelNames.size()]); this.modeNames = modeNames.toArray(new String[modeNames.size()]); this.vocabulary = vocabulary; this._decisionToDFA = new DFA[atn.getNumberOfDecisions()]; for (int i = 0; i < _decisionToDFA.length; i++) { _decisionToDFA[i] = new DFA(atn.getDecisionState(i), i); } this._interp = new LexerATNSimulator(this,atn,_decisionToDFA,_sharedContextCache); }
private void generateAntlr4Stats(String fileName, long parseTree, long treeVisitor, ParseInfo info) { File f = new File(".proparse/antlr4-timings/" + fileName.replace('\\', '_').replace('/', '_').replace(':', '_')); f.getParentFile().mkdirs(); try (PrintWriter writer = new PrintWriter(f)) { writer.println(fileName.replace(':', '_') + " : " + parseTree + " : " + treeVisitor); if ((info != null) && (info.getDecisionInfo() != null)) { Arrays.stream(info.getDecisionInfo()).filter(decision -> decision.SLL_MaxLook > 0).sorted( (d1, d2) -> Long.compare(d2.SLL_MaxLook, d1.SLL_MaxLook)).forEach( decision -> writer.println(String.format( "Time: %d in %d calls - LL_Lookaheads: %d Max k: %d Ambiguities: %d Errors: %d Rule: %s", decision.timeInPrediction / 1000000, decision.invocations, decision.SLL_TotalLook, decision.SLL_MaxLook, decision.ambiguities.size(), decision.errors.size(), Proparse.ruleNames[Proparse._ATN.getDecisionState(decision.decision).ruleIndex]))); // MaxK + prediction time stats Arrays.stream(info.getDecisionInfo()).filter(decision -> decision.SLL_MaxLook > 0).forEach(decision -> { if ((maxK.get(decision.decision) == null) || (maxK.get(decision.decision) < decision.SLL_MaxLook)) maxK.put(decision.decision, decision.SLL_MaxLook); }); Arrays.stream(info.getDecisionInfo()).filter(decision -> decision.timeInPrediction > 0).forEach( decision -> decisionTime.put(decision.decision, (decision.timeInPrediction / 1000000) + (decisionTime.get(decision.decision) == null ? 0 : decisionTime.get(decision.decision)))); } } catch (IOException caught) { LOG.error("Unable to write proparse debug file", caught); } }
public LexerInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, Collection<String> modeNames, ATN atn, CharStream input) { super(input); if (atn.grammarType != ATNType.LEXER) { throw new IllegalArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.modeNames = modeNames.toArray(new String[modeNames.size()]); this.vocabulary = vocabulary; this._decisionToDFA = new DFA[atn.getNumberOfDecisions()]; for (int i = 0; i < _decisionToDFA.length; i++) { _decisionToDFA[i] = new DFA(atn.getDecisionState(i), i); } this._interp = new LexerATNSimulator(this,atn,_decisionToDFA,_sharedContextCache); }
public LexerInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, Collection<String> modeNames, ATN atn, CharStream input) { super(input); if (atn.grammarType != ATNType.LEXER) { throw new IllegalArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.modeNames = modeNames.toArray(new String[modeNames.size()]); this.vocabulary = vocabulary; this._decisionToDFA = new DFA[atn.getNumberOfDecisions()]; for (int i = 0; i < _decisionToDFA.length; i++) { _decisionToDFA[i] = new DFA(atn.getDecisionState(i), i); } this._interp = new LexerATNSimulator(this,atn,_decisionToDFA,_sharedContextCache); }
private void logStatistics() { LOG.info("{} files proparse'd, {} XML files, {} listing files, {} failure(s), {} NCLOCs", numFiles, numXREF, numListings, numFailures, ncLocs); LOG.info("AST Generation | time={} ms", parseTime); LOG.info("XML Parsing | time={} ms", xmlParseTime); LOG.info("AST4Generation | time={} ms", parse4Time); LOG.info("AST4Tree | time={} ms", parse4Tree); // Sort entries by rule name ruleTime.entrySet().stream().sorted( (Entry<String, Long> obj1, Entry<String, Long> obj2) -> obj1.getKey().compareTo(obj2.getKey())).forEach( (Entry<String, Long> entry) -> LOG.info("Rule {} | time={} ms", entry.getKey(), entry.getValue())); if (!decisionTime.isEmpty()) { LOG.info("ANTRL4 - 25 longest rules"); decisionTime.entrySet().stream().sorted((o1, o2) -> o2.getValue().compareTo(o1.getValue())).limit(25).forEach( entry -> LOG.info("Rule {} - {} | time={} ms", entry.getKey().intValue(), Proparse.ruleNames[Proparse._ATN.getDecisionState(entry.getKey().intValue()).ruleIndex], entry.getValue())); } if (!maxK.isEmpty()) { LOG.info("ANTRL4 - 25 Max lookeahead rules"); maxK.entrySet().stream().sorted((o1, o2) -> o2.getValue().compareTo(o1.getValue())).limit(25).forEach( entry -> LOG.info("Rule {} - {} | Max lookahead: {}", entry.getKey().intValue(), Proparse.ruleNames[Proparse._ATN.getDecisionState(entry.getKey().intValue()).ruleIndex], entry.getValue())); } }
int numberOfDecisions = atn.getNumberOfDecisions(); for (int i = 0; i < numberOfDecisions; i++) { DecisionState decisionState = atn.getDecisionState(i); RuleStartState startState = atn.ruleToStartState[decisionState.ruleIndex];
private DFA[] getDFA() { DFA[] result = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { result[i] = new DFA(_ATN.getDecisionState(i), i); } return result; } }
public ParserInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, ATN atn, TokenStream input) { super(input); this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.vocabulary = vocabulary; // init decision DFA int numberOfDecisions = atn.getNumberOfDecisions(); this.decisionToDFA = new DFA[numberOfDecisions]; for (int i = 0; i < numberOfDecisions; i++) { DecisionState decisionState = atn.getDecisionState(i); decisionToDFA[i] = new DFA(decisionState, i); } // get atn simulator that knows how to do predictions setInterpreter(new ParserATNSimulator(this, atn, decisionToDFA, sharedContextCache)); }
public ParserInterpreter(String grammarFileName, Vocabulary vocabulary, Collection<String> ruleNames, ATN atn, TokenStream input) { super(input); this.grammarFileName = grammarFileName; this.atn = atn; this.tokenNames = new String[atn.maxTokenType]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = vocabulary.getDisplayName(i); } this.ruleNames = ruleNames.toArray(new String[ruleNames.size()]); this.vocabulary = vocabulary; // init decision DFA int numberOfDecisions = atn.getNumberOfDecisions(); this.decisionToDFA = new DFA[numberOfDecisions]; for (int i = 0; i < numberOfDecisions; i++) { DecisionState decisionState = atn.getDecisionState(i); decisionToDFA[i] = new DFA(decisionState, i); } // get atn simulator that knows how to do predictions setInterpreter(new ParserATNSimulator(this, atn, decisionToDFA, sharedContextCache)); }