tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
/** * Verify the input has been properly consumed */ protected void checkForValidInput(CommonTokenStream tokens, PrintStream ps2) { if ( tokens.index() != tokens.size() - 1 ) { //At this point we need to check for redundant EOF tokens //which might have been added by the Parser: List<? extends Token> endingTokens = tokens.getTokens(tokens.index(), tokens.size() -1); for (Token endToken : endingTokens) { if (! "<EOF>".equals(endToken.getText())) { //writing to ps2 will mark the test as failed: ps2.print( "Invalid input" ); return; } } } }
for (int i = 0; i < tokens.size(); i++)
private String getUnconsumedTokens(CommonTokenStream tokens) { // ensure we've buffered all tokens from the underlying TokenSource tokens.fill(); if ( tokens.index() == tokens.size() - 1 ) { return null; } StringBuilder nonEofEndingTokens = new StringBuilder(); @SuppressWarnings("unchecked") List<Token> unconsumed = (List<Token>) tokens.getTokens( tokens.index(), tokens.size() - 1 ); for ( Token endToken : unconsumed ) { // Ignore <EOF> tokens as they might be inserted by the parser if ( endToken.getType() != Token.EOF ) { nonEofEndingTokens.append( endToken.getText() ); } } return nonEofEndingTokens.length() > 0 ? nonEofEndingTokens.toString() : null; } }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
/** scan backwards from current point in this.tokens list * looking for the start of the rule or subrule. * Return token or null if for some reason we can't find the start. */ public Token getRuleOrSubruleStartToken() { if ( tokens==null ) return null; int i = tokens.index(); int n = tokens.size(); if ( i>=n ) i = n-1; // seems index == n as we lex while ( i>=0 && i<n) { int ttype = tokens.get(i).getType(); if ( ttype == LPAREN || ttype == TOKEN_REF || ttype == RULE_REF ) { return tokens.get(i); } i--; } return null; }
buf.append("\n"); for (int i=ruleAST.getTokenStartIndex(); i<=ruleAST.getTokenStopIndex() && i<tokenBuffer.size(); i++)
buf.append("\n"); for (int i=ruleAST.getTokenStartIndex(); i<=ruleAST.getTokenStopIndex() && i<tokenBuffer.size(); i++)
bs.add(org.apache.uima.ruta.parser.RutaParser.LINE_COMMENT); bs.add(org.apache.uima.ruta.parser.RutaParser.COMMENT); List<CommonToken> comments = (List<CommonToken>) tokenStream.getTokens(0, tokenStream.size(), bs);
tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
tokens.getTokens(); for (int i=0; i<tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {
tokens.getTokens(); for (int i = 0; i < tokens.size(); i++) { Token token = tokens.get(i); if (token.getChannel() == smaliParser.HIDDEN) {