@Override public void firstPathTokenWeakKeywords() throws TokenStreamException { int t = LA( 1 ); switch ( t ){ case DOT: LT(0).setType( IDENT ); } }
/** * Create block comment from token. * @param token * Token object. * @return DetailAST with BLOCK_COMMENT type. */ public static DetailAST createBlockCommentNode(Token token) { final DetailAST blockComment = new DetailAST(); blockComment.initialize(TokenTypes.BLOCK_COMMENT_BEGIN, BLOCK_MULTIPLE_COMMENT_BEGIN); // column counting begins from 0 blockComment.setColumnNo(token.getColumn() - 1); blockComment.setLineNo(token.getLine()); final DetailAST blockCommentContent = new DetailAST(); blockCommentContent.setType(TokenTypes.COMMENT_CONTENT); // column counting begins from 0 // plus length of '/*' blockCommentContent.setColumnNo(token.getColumn() - 1 + 2); blockCommentContent.setLineNo(token.getLine()); blockCommentContent.setText(token.getText()); final DetailAST blockCommentClose = new DetailAST(); blockCommentClose.initialize(TokenTypes.BLOCK_COMMENT_END, BLOCK_MULTIPLE_COMMENT_END); final Map.Entry<Integer, Integer> linesColumns = countLinesColumns( token.getText(), token.getLine(), token.getColumn()); blockCommentClose.setLineNo(linesColumns.getKey()); blockCommentClose.setColumnNo(linesColumns.getValue()); blockComment.addChild(blockCommentContent); blockComment.addChild(blockCommentClose); return blockComment; }
private void error(String msg) throws TokenStreamException, SemanticException { Token token = LT(0); throw new SemanticException( msg, token.getFilename(), token.getLine(), token.getColumn() ); }
public final void mATOM(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = ATOM; int _saveIndex; { int _cnt26=0; _loop26: do { if ((_tokenSet_0.member(LA(1)))) { mIDENTIFIER_PART(false); } else { if ( _cnt26>=1 ) { break _loop26; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());} } _cnt26++; } while (true); } if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin)); } _returnToken = _token; }
theRetToken=_returnToken; else if ((_tokenSet_0.member(LA(1)))) { mATOM(true); theRetToken=_returnToken; else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());} _ttype = _returnToken.getType(); _ttype = testLiteralsTable(_ttype); _returnToken.setType(_ttype); return _returnToken; throw new TokenStreamRecognitionException(e); throw new TokenStreamIOException(((CharStreamIOException)cse).io); throw new TokenStreamException(cse.getMessage());
public final void mMINUS(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = MINUS; int _saveIndex; match('-'); if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin)); } _returnToken = _token; }
public final void mSINGLE_LINE_COMMENT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = SINGLE_LINE_COMMENT; int _saveIndex; Token content=null; match("//"); if ( inputState.guessing==0 ) { mCommentListener.reportSingleLineComment("//", getLine(), getColumn() - 3); } mSINGLE_LINE_COMMENT_CONTENT(true); content=_returnToken; if ( inputState.guessing==0 ) { text.setLength(_begin); text.append(content.getText()); } if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin)); } _returnToken = _token; }
protected String retrieveLookAheadText(int lookAheadPosition) throws TokenStreamException { Token token = LT(lookAheadPosition); return token == null ? null : token.getText(); }
@Override public void initialize(Token tok) { super.initialize(tok); filename = tok.getFilename(); line = tok.getLine(); column = tok.getColumn(); String text = tok.getText(); textLength = StringHelper.isEmpty(text) ? 0 : text.length(); }
/** * Create comment AST from token. Depending on token type * SINGLE_LINE_COMMENT or BLOCK_COMMENT_BEGIN is created. * @param token to create the AST * @return DetailAST of comment node */ private static DetailAST createCommentAstFromToken(Token token) { final DetailAST commentAst; if (token.getType() == TokenTypes.SINGLE_LINE_COMMENT) { commentAst = createSlCommentNode(token); } else { commentAst = CommonUtil.createBlockCommentNode(token); } return commentAst; }
public final void mIDENT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = IDENT; int _saveIndex; if ((_tokenSet_9.member(LA(1)))) { mID_PART(false); if (mTreatAssertAsKeyword && "assert".equals(new String(text.getBuffer(),_begin,text.length()-_begin))) { _ttype = ASSERT; if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
if ((_tokenSet_0.member(LA(1)))) { mIDENT(true); theRetToken=_returnToken; else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());} _ttype = _returnToken.getType(); _returnToken.setType(_ttype); return _returnToken; throw new TokenStreamRecognitionException(e); throw new TokenStreamIOException(((CharStreamIOException)cse).io); throw new TokenStreamException(cse.getMessage());
public final void mANNUALLY(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = ANNUALLY; int _saveIndex; match("annually"); if ( _createToken && _token==null && _ttype!=Token.SKIP ) { _token = makeToken(_ttype); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin)); } _returnToken = _token; }
public final void mBLOCK_COMMENT_BEGIN(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException { int _ttype; Token _token=null; int _begin=text.length(); _ttype = BLOCK_COMMENT_BEGIN; int _saveIndex; text.setLength(_begin); text.append(content.getText()); _token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));