/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
private String getUnconsumedTokens(CommonTokenStream tokens) { // ensure we've buffered all tokens from the underlying TokenSource tokens.fill(); if ( tokens.index() == tokens.size() - 1 ) { return null; } StringBuilder nonEofEndingTokens = new StringBuilder(); @SuppressWarnings("unchecked") List<Token> unconsumed = (List<Token>) tokens.getTokens( tokens.index(), tokens.size() - 1 ); for ( Token endToken : unconsumed ) { // Ignore <EOF> tokens as they might be inserted by the parser if ( endToken.getType() != Token.EOF ) { nonEofEndingTokens.append( endToken.getText() ); } } return nonEofEndingTokens.length() > 0 ? nonEofEndingTokens.toString() : null; } }
if ( tokens.index()!=tokens.size()-1 ) { throw new InvalidInputException();
if ( tokens.index()!=tokens.size()-1 ) { throw new InvalidInputException();
if ( tokens.index()!=tokens.size()-1 ) { throw new InvalidInputException();
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }