public final void mQUESTION(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = QUESTION; int _saveIndex; match('?'); if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin)); } _returnToken = _token; }
JToggleButton tokenButton = new JToggleButton((String) tokens.get(Integer.valueOf(token.getType()))); bg.add(tokenButton); tokenButton.addActionListener(this); tokenButton.setToolTipText(token.getText()); tokenButton.putClientProperty("token", token); tokenButton.setMargin(new Insets(0, 1, 0, 1)); tokenButton.setFocusPainted(false); if (token.getLine() > line) { tokenPane.getDocument().insertString(tokenPane.getDocument().getLength(), "\n", null); line = token.getLine(); if (token.getType() == Token.EOF_TYPE) { break;
protected boolean isEofToken(antlr.Token token) { return token.getType() == antlr.Token.EOF_TYPE; }
private void handleToken(Token token, int nextTokenLine, List<String> lines) { int tokenType = token.getType(); int tokenLine = token.getLine(); if (isComment(tokenType)) { if (isNotHeaderComment(tokenLine)) { comments += nextTokenLine - tokenLine + 1 - numberEmptyLines(token, lines); } for (int commentLineNb = tokenLine; commentLineNb <= nextTokenLine; commentLineNb++) { fileLinesContext.setIntValue(CoreMetrics.COMMENT_LINES_DATA_KEY, commentLineNb, 1); } } else if (isNotWhitespace(tokenType) && tokenLine != currentLine) { loc++; fileLinesContext.setIntValue(CoreMetrics.NCLOC_DATA_KEY, tokenLine, 1); currentLine = tokenLine; } }
astFactory.addASTChild(currentAST, returnAST); else if (((LA(1)==AT) && (LA(2)==IDENT))&&(LA(1)==AT && !LT(2).getText().equals("interface"))) { annotation(); astFactory.addASTChild(currentAST, returnAST);
if ( inputState.guessing==0 ) { annotationArguments_AST = (AST)currentAST.root; Token itkn = new Token(IDENT, "value"); AST i; i = (AST)astFactory.make( (new ASTArray(1)).add(create(IDENT,"value",itkn,itkn)));
private boolean isConstructorIdent(Token x) { if (currentClass == null) return false; if (currentClass.getType() != IDENT) return false; // cannot happen? String cname = currentClass.getText(); if (x == null || x.getType() != IDENT) return false; // cannot happen? return cname.equals(x.getText()); }
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { StringBuilder buffer = sourceCode.getCodeBuffer(); GroovyLexer lexer = new GroovyLexer(new StringReader(buffer.toString())); TokenStream tokenStream = lexer.plumb(); try { Token token = tokenStream.nextToken(); while (token.getType() != Token.EOF_TYPE) { TokenEntry tokenEntry = new TokenEntry(token.getText(), sourceCode.getFileName(), token.getLine()); tokenEntries.add(tokenEntry); token = tokenStream.nextToken(); } } catch (TokenStreamException err) { // Wrap exceptions of the Groovy tokenizer in a TokenMgrError, so // they are correctly handled // when CPD is executed with the '--skipLexicalErrors' command line // option throw new TokenMgrError("Lexical error in file " + sourceCode.getFileName() + " at line " + lexer.getLine() + ", column " + lexer.getColumn() + ". Encountered: " + err.getMessage(), TokenMgrError.LEXICAL_ERROR); } finally { tokenEntries.add(TokenEntry.getEOF()); } } }
public void addWarning(String warning, String solution) { Token lt = null; try { lt = LT(1); } catch (TokenStreamException ee) { } if (lt == null) lt = Token.badToken; Map row = new HashMap(); row.put("warning", warning); row.put("solution", solution); row.put("filename", getFilename()); row.put("line", Integer.valueOf(lt.getLine())); row.put("column", Integer.valueOf(lt.getColumn())); // System.out.println(row); warningList.add(row); }
int lasttype = token.getType(); if (whitespaceIncluded) { switch (lasttype) { // filter out insignificant types
nls(); else if (((LA(1)==AT) && (LA(2)==IDENT))&&(LA(1)==AT && !LT(2).getText().equals("interface"))) { annotation(); astFactory.addASTChild(currentAST, returnAST);