@Override public void exitEveryRule(ParserRuleContext ctx) { _indent--; _ptSentences.appendToLastSentence(")"); }
@Override public void enterEveryRule(ParserRuleContext ctx) { if (ctx != _ctx) { _ptSentences.getSentences().add(""); } for (int i = 0; i < _indent; i++) { _ptSentences.appendToLastSentence(" "); } String ruleName = _ruleNames.get(ctx.getRuleIndex()); if (ctx.getParent() != null) { for (Field f : ctx.getParent().getClass().getFields()) { try { if (!f.getName().equals(ruleName) && f.get(ctx.getParent()) == ctx) { _ptSentences.appendToLastSentence(f.getName() + " = "); } } catch (Throwable t) { // Ignore the error and continue. } } } _ptSentences.appendToLastSentence("(" + ruleName); _indent++; }
@Override public void visitErrorNode(ErrorNode ctx) { String nodeText = BatfishCombinedParser.escape(ctx.getText()); // _sb.append("\n"); _ptSentences.getSentences().add(""); for (int i = 0; i < _indent; i++) { _ptSentences.appendToLastSentence(" "); } int tokenType = ctx.getSymbol().getType(); String tokenName; if (tokenType == Lexer.EOF) { tokenName = "EOF"; _ptSentences.appendToLastSentence(tokenName + ":" + nodeText); } else if (tokenType == BatfishLexer.UNRECOGNIZED_LINE_TOKEN) { _ptSentences.appendToLastSentence("<UnrecognizedLine>:'" + nodeText + "'"); } else { tokenName = _vocabulary.getSymbolicName(tokenType); _ptSentences.appendToLastSentence("<ErrorNode>:" + tokenName + ":'" + nodeText + "'"); } }
_ptSentences.getSentences().add(""); for (int i = 0; i < _indent; i++) { _ptSentences.appendToLastSentence(" "); _ptSentences.appendToLastSentence(f.getName() + " = "); _ptSentences.appendToLastSentence(tokenName + ":" + nodeText); } else { _ptSentences.appendToLastSentence(tokenName + ":'" + nodeText + "'"); _ptSentences.appendToLastSentence(" <== mode:" + mode); _ptSentences.appendToLastSentence(String.format(" line:%s", _combinedParser.getLine(t)));