protected void checkReservedNames(Collection<Rule> rules) { for (Rule rule : rules) { if (reservedNames.contains(rule.name)) { errMgr.grammarError(ErrorType.RESERVED_RULE_NAME, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void checkReservedNames(Collection<Rule> rules) { for (Rule rule : rules) { if (reservedNames.contains(rule.name)) { errMgr.grammarError(ErrorType.RESERVED_RULE_NAME, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void checkReservedNames(Collection<Rule> rules) { for (Rule rule : rules) { if (reservedNames.contains(rule.name)) { errMgr.grammarError(ErrorType.RESERVED_RULE_NAME, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void checkReservedNames(Collection<Rule> rules) { for (Rule rule : rules) { if (reservedNames.contains(rule.name)) { errMgr.grammarError(ErrorType.RESERVED_RULE_NAME, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
public GrammarAST getNodeWithTokenIndex(int index) { if ( this.getToken()!=null && this.getToken().getTokenIndex()==index ) { return this; } // walk all children of root. for (int i= 0; i < getChildCount(); i++) { GrammarAST child = (GrammarAST)getChild(i); GrammarAST result = child.getNodeWithTokenIndex(index); if ( result!=null ) { return result; } } return null; }
public GrammarAST getNodeWithTokenIndex(int index) { if ( this.getToken()!=null && this.getToken().getTokenIndex()==index ) { return this; } // walk all children of root. for (int i= 0; i < getChildCount(); i++) { GrammarAST child = (GrammarAST)getChild(i); GrammarAST result = child.getNodeWithTokenIndex(index); if ( result!=null ) { return result; } } return null; }
public GrammarAST getNodeWithTokenIndex(int index) { if ( this.getToken()!=null && this.getToken().getTokenIndex()==index ) { return this; } // walk all children of root. for (int i= 0; i < getChildCount(); i++) { GrammarAST child = (GrammarAST)getChild(i); GrammarAST result = child.getNodeWithTokenIndex(index); if ( result!=null ) { return result; } } return null; }
protected void checkReservedNames(@NotNull Collection<Rule> rules) { for (Rule rule : rules) { if (reservedNames.contains(rule.name)) { errMgr.grammarError(ErrorType.RESERVED_RULE_NAME, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
/** Not valid for non-lexers. */ @NotNull @Override public Handle range(@NotNull GrammarAST a, @NotNull GrammarAST b) { g.tool.errMgr.grammarError(ErrorType.TOKEN_RANGE_IN_PARSER, g.fileName, a.getToken(), a.getToken().getText(), b.getToken().getText()); // From a..b, yield ATN for just a. return tokenRef((TerminalAST)a); }
@Override protected void enterLabeledLexerElement(GrammarAST tree) { Token label = ((GrammarAST)tree.getChild(0)).getToken(); g.tool.errMgr.grammarError(ErrorType.V3_LEXER_LABEL, g.fileName, label, label.getText()); }
@Override protected void enterLabeledLexerElement(GrammarAST tree) { Token label = ((GrammarAST)tree.getChild(0)).getToken(); g.tool.errMgr.grammarError(ErrorType.V3_LEXER_LABEL, g.fileName, label, label.getText()); }
@Override protected void enterLabeledLexerElement(GrammarAST tree) { Token label = ((GrammarAST)tree.getChild(0)).getToken(); g.tool.errMgr.grammarError(ErrorType.V3_LEXER_LABEL, g.fileName, label, label.getText()); }
@Override protected void enterLabeledLexerElement(GrammarAST tree) { Token label = ((GrammarAST)tree.getChild(0)).getToken(); g.tool.errMgr.grammarError(ErrorType.V3_LEXER_LABEL, g.fileName, label, label.getText()); }
@Override protected void enterLabeledLexerElement(GrammarAST tree) { Token label = ((GrammarAST)tree.getChild(0)).getToken(); g.tool.errMgr.grammarError(ErrorType.V3_LEXER_LABEL, g.fileName, label, label.getText()); }
protected void processLexer() { // make sure all non-fragment lexer rules must match at least one symbol for (Rule rule : g.rules.values()) { if (rule.isFragment()) { continue; } LL1Analyzer analyzer = new LL1Analyzer(g.atn); IntervalSet look = analyzer.LOOK(g.atn.ruleToStartState[rule.index], null); if (look.contains(Token.EPSILON)) { g.tool.errMgr.grammarError(ErrorType.EPSILON_TOKEN, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void processLexer() { // make sure all non-fragment lexer rules must match at least one symbol for (Rule rule : g.rules.values()) { if (rule.isFragment()) { continue; } LL1Analyzer analyzer = new LL1Analyzer(g.atn); IntervalSet look = analyzer.LOOK(g.atn.ruleToStartState[rule.index], null); if (look.contains(Token.EPSILON)) { g.tool.errMgr.grammarError(ErrorType.EPSILON_TOKEN, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void processLexer() { // make sure all non-fragment lexer rules must match at least one symbol for (Rule rule : g.rules.values()) { if (rule.isFragment()) { continue; } LL1Analyzer analyzer = new LL1Analyzer(g.atn); IntervalSet look = analyzer.LOOK(g.atn.ruleToStartState[rule.index], null); if (look.contains(Token.EPSILON)) { g.tool.errMgr.grammarError(ErrorType.EPSILON_TOKEN, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }
protected void processLexer() { // make sure all non-fragment lexer rules must match at least one symbol for (Rule rule : g.rules.values()) { if (rule.isFragment()) { continue; } LL1Analyzer analyzer = new LL1Analyzer(g.atn); IntervalSet look = analyzer.LOOK(g.atn.ruleToStartState[rule.index], PredictionContext.EMPTY_LOCAL); if (look.contains(Token.EPSILON)) { g.tool.errMgr.grammarError(ErrorType.EPSILON_TOKEN, g.fileName, ((GrammarAST)rule.ast.getChild(0)).getToken(), rule.name); } } }