protected List<CommonToken> extractComments(CommonTokenStream stream) { List<CommonToken> comments = new ArrayList<CommonToken>(); for (Object t : stream.getTokens()) { CommonToken token = (CommonToken) t; if (token.getType() == EolLexer.COMMENT || token.getType() == EolParser.LINE_COMMENT) { comments.add(token); } } return comments; }
@Override protected Expression instantiate(Object oldInstance, Encoder out) { CommonToken ct = (CommonToken)oldInstance; Object[] args = {ct.getType(), ct.getText()}; return new Expression(ct, ct.getClass(), "new", args); } }
@Override protected Expression instantiate(Object oldInstance, Encoder out) { return new Expression(oldInstance, oldInstance.getClass(), "new", new Object[] { ((CommonToken) oldInstance).getType(), ((CommonToken) oldInstance).getText() }); } });
import org.antlr.runtime.*; public class Main { public static void main(String[] args) throws Exception { JavaCommentLexer lexer = new JavaCommentLexer(new ANTLRFileStream("Test.java")); CommonTokenStream tokens = new CommonTokenStream(lexer); for(Object o : tokens.getTokens()) { CommonToken t = (CommonToken)o; if(t.getType() == JavaCommentLexer.SingleLineComment) { System.out.println("SingleLineComment :: " + t.getText().replace("\n", "\\n")); } if(t.getType() == JavaCommentLexer.MultiLineComment) { System.out.println("MultiLineComment :: " + t.getText().replace("\n", "\\n")); } } } }
private List<Comment> convertToComments(List<CommonToken> preceding, LessSource source) { List<Comment> result = new ArrayList<Comment>(); Comment comment = null; for (CommonToken token : preceding) { if (token.getType() == LessLexer.COMMENT) { comment = new Comment(new HiddenTokenAwareTree(token, source)); result.add(comment); } if (token.getType() == LessLexer.NEW_LINE) { if (comment != null) comment.setHasNewLine(true); } } return result; }
public String getInfoString() { StringBuilder info = new StringBuilder(); Object payload = getPayload(); if(payload instanceof CommonToken) { CommonToken t = (CommonToken)payload; info.append("Type: ").append(grammar.getTokenDisplayName(t.getType())).append("\n"); info.append("Text: ").append(t.getText()).append("\n"); info.append("Line: ").append(t.getLine()).append("\n"); info.append("Char: ").append(t.getCharPositionInLine()).append("\n"); info.append("Channel: ").append(t.getChannel()).append("\n"); } else if(payload instanceof NoViableAltException) { NoViableAltException e = (NoViableAltException)payload; info.append("Description: ").append(e.grammarDecisionDescription).append("\n"); info.append("Descision: ").append(e.decisionNumber).append("\n"); info.append("State: ").append(e.stateNumber).append("\n"); } else { if(isLeaf()) info.append(payload.toString()); else info.append("Rule: ").append(payload.toString()); } return info.toString(); }
public TokenInfo(CommonToken token) { length = token.getStopIndex() - token.getStartIndex() + 1; type = token.getType(); }
private LinkedList<CommonToken> readTillNewLine(int end) { LinkedList<CommonToken> result = new LinkedList<CommonToken>(); if (hiddenTokens.isEmpty()) return result; CommonToken first = hiddenTokens.peekFirst(); while (first != null && first.getTokenIndex() < end && first.getType() == LessLexer.COMMENT) { result.add(first); hiddenTokens.removeFirst(); first = hiddenTokens.peekFirst(); } if (first == null || first.getTokenIndex() >= end) return result; result.add(first); return result; }
if ( t.getType()==ANTLRParser.BLOCK ) { buf.append("("); else if ( t.getType()==ANTLRParser.ACTION ) { buf.append("{"); buf.append(t.getText()); buf.append("}"); else if ( t.getType()==ANTLRParser.SEMPRED || t.getType()==ANTLRParser.SYN_SEMPRED || t.getType()==ANTLRParser.GATED_SEMPRED || t.getType()==ANTLRParser.BACKTRACK_SEMPRED ) buf.append("}?"); else if ( t.getType()==ANTLRParser.ARG_ACTION ) { buf.append("["); buf.append(t.getText());
@Override public void write(Kryo kryo, Output output, CommonToken token) { output.writeInt(token.getType()); output.writeString(token.getText()); } }
if ( t.getType()==ANTLRParser.BLOCK ) { buf.append("("); else if ( t.getType()==ANTLRParser.ACTION ) { buf.append("{"); buf.append(t.getText()); buf.append("}"); else if ( t.getType()==ANTLRParser.SEMPRED || t.getType()==ANTLRParser.SYN_SEMPRED || t.getType()==ANTLRParser.GATED_SEMPRED || t.getType()==ANTLRParser.BACKTRACK_SEMPRED ) buf.append("}?"); else if ( t.getType()==ANTLRParser.ARG_ACTION ) { buf.append("["); buf.append(t.getText());
private void assignFirstCommentsSegment(HiddenTokenAwareTree firstChild, HiddenTokenAwareTree secondChild) { if (firstChild == null) return; LinkedList<CommonToken> tail = readTillNewLine(secondChild.getTokenStartIndex()); if (tail.isEmpty()) return; CommonToken lastInTail = tail.peekLast(); if (lastInTail.getType() == LessLexer.NEW_LINE) firstChild.addFollowing(tail); else secondChild.addPreceding(tail); } //this method assumes that ast is empty
TokenInfo tokenInfo = internalModifyableTokenInfos.get(tokenInfoIdx); if (token.getStartIndex() >= e.fOffset + e.fText.length()) { if (tokenStartsAt + lengthDiff == token.getStartIndex() && tokenInfo.type == token.getType() && token.getStopIndex() - token.getStartIndex() + 1 == tokenInfo.length) { return new Region(regionOffset, token.getStartIndex() - regionOffset);
if ( state.backtracking==1 ) { System.out.println("PCDATA:"+(name!=null?name.getText():null)+ " type:"+(name!=null?name.getType():0)); }
/** * @since 2.4 */ @Override protected RepairEntryData getRepairEntryData(DocumentEvent e) throws Exception { int tokenStartsAt = 0; int tokenInfoIdx = 0; TokenSource source = createTokenSource(e.fDocument.get()); CommonToken token = (CommonToken) source.nextToken(); // find start idx while (true) { if (token == Token.EOF_TOKEN) { break; } if (tokenInfoIdx >= getInternalModifyableTokenInfos().size()) break; TokenInfo tokenInfo = getInternalModifyableTokenInfos().get(tokenInfoIdx); if (tokenInfo.getAntlrTokenType() != token.getType() || token.getStopIndex() - token.getStartIndex() + 1 != tokenInfo.getLength()) break; if (tokenStartsAt + tokenInfo.getLength() > e.fOffset) break; tokenStartsAt += tokenInfo.getLength(); tokenInfoIdx++; token = (CommonToken) source.nextToken(); } return new RepairEntryData(tokenStartsAt, tokenInfoIdx, token, source); }
if (token.getType() == smaliParser.INVALID_TOKEN) { Assert.assertTrue("Encountered an INVALID_TOKEN not on the error channel", token.getChannel() == smaliParser.ERROR_CHANNEL); if (token.getType() != expectedTokenType) { Assert.fail(String.format("Invalid token at index %d. Expecting %s, got %s(%s)", expectedTokenIndex-1, expectedToken.tokenName, getTokenName(token.getType()), token.getText()));
return 0; final int nextType = nextToken.getType();
if ( state.backtracking==1 ) { System.out.println("PCDATA:"+(at!=null?at.getText():null)+" "+(av!=null?av.getText():null)+" type:" + (av!=null?av.getType():0)); }