Refine search
@Override public void tokenize(SourceCode tokens, Tokens tokenEntries) { List<String> code = tokens.getCode(); for (int i = 0; i < code.size(); i++) { String currentLine = code.get(i); for (int j = 0; j < currentLine.length(); j++) { char tok = currentLine.charAt(j); if (!Character.isWhitespace(tok) && tok != '{' && tok != '}' && tok != ';') { tokenEntries.add(new TokenEntry(String.valueOf(tok), tokens.getFileName(), i + 1)); } } } tokenEntries.add(TokenEntry.getEOF()); } }
@Test public void testNewTokenEntry() { TokenEntry entry = new TokenEntry("token1", "src1", 1); assertThat(entry.getValue(), equalTo("token1")); assertThat(entry.getBeginLine(), equalTo(1)); entry = new TokenEntry("token2", "src2", 2); assertThat(entry.getValue(), equalTo("token2")); assertThat(entry.getBeginLine(), equalTo(2)); }
public List<TokensLine> chunk(String fileName, Reader fileReader) { SourceCode sourceCode = new SourceCode(new FileCodeLoaderWithoutCache(fileName, fileReader)); Tokens tokens = new Tokens(); TokenEntry.clearImages(); try { tokenizer.tokenize(sourceCode, tokens); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } TokenEntry.clearImages(); return convert(tokens.getTokens()); }
public int getLineCount(TokenEntry mark, Match match) { TokenEntry endTok = get(mark.getIndex() + match.getTokenCount() - 1); if (endTok == TokenEntry.EOF) { endTok = get(mark.getIndex() + match.getTokenCount() - 2); } return endTok.getBeginLine() - mark.getBeginLine() + 1; }
public void tokenize(SourceCode tokens, Tokens tokenEntries) throws IOException { tokenEntries.add(new TokenEntry("t1", "src", 1)); tokenEntries.add(new TokenEntry("t2", "src", 1)); tokenEntries.add(new TokenEntry("t3", "src", 2)); tokenEntries.add(new TokenEntry("t1", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(TokenEntry.getEOF()); } };
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { final String fileName = sourceCode.getFileName(); final JavaTokenFilter tokenFilter = createTokenFilter(sourceCode); final ConstructorDetector constructorDetector = new ConstructorDetector(ignoreIdentifiers); Token currentToken = (Token) tokenFilter.getNextToken(); while (currentToken != null) { processToken(tokenEntries, fileName, currentToken, constructorDetector); currentToken = (Token) tokenFilter.getNextToken(); } tokenEntries.add(TokenEntry.getEOF()); }
private void processToken(Tokens tokenEntries, String fileName, Token currentToken, ConstructorDetector constructorDetector) { String image = currentToken.image; constructorDetector.restoreConstructorToken(tokenEntries, currentToken); if (ignoreLiterals && (currentToken.kind == JavaParserConstants.STRING_LITERAL || currentToken.kind == JavaParserConstants.CHARACTER_LITERAL || currentToken.kind == JavaParserConstants.DECIMAL_LITERAL || currentToken.kind == JavaParserConstants.FLOATING_POINT_LITERAL)) { image = String.valueOf(currentToken.kind); } if (ignoreIdentifiers && currentToken.kind == JavaParserConstants.IDENTIFIER) { image = String.valueOf(currentToken.kind); } constructorDetector.processToken(currentToken); tokenEntries.add(new TokenEntry(image, fileName, currentToken.beginLine)); }
@Test public void shouldClearCacheInTokenEntry() { bridge.chunk("file.txt", new InputStreamReader(new ByteArrayInputStream(new byte[0]), StandardCharsets.UTF_8)); TokenEntry token = new TokenEntry("image", "srcId", 0); assertThat(token.getIndex(), is(0)); assertThat(token.getIdentifier(), is(1)); }
@Before public void setUp() { Tokenizer tokenizer = new Tokenizer() { public void tokenize(SourceCode tokens, Tokens tokenEntries) throws IOException { tokenEntries.add(new TokenEntry("t1", "src", 1)); tokenEntries.add(new TokenEntry("t2", "src", 1)); tokenEntries.add(new TokenEntry("t3", "src", 2)); tokenEntries.add(new TokenEntry("t1", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(TokenEntry.getEOF()); } }; bridge = new TokenizerBridge(tokenizer, 10); }
/** * We expect that implementation of {@link Tokenizer} is correct: * tokens ordered by occurrence in source code and last token is EOF. */ public static List<TokensLine> convert(List<TokenEntry> tokens) { List<TokensLine> result = new ArrayList<>(); StringBuilder sb = new StringBuilder(); int startLine = Integer.MIN_VALUE; int startIndex = 0; int currentIndex = 0; for (TokenEntry token : tokens) { if (token != TokenEntry.EOF) { String value = token.getValue(); int line = token.getBeginLine(); if (line != startLine) { addNewTokensLine(result, startIndex, currentIndex, startLine, sb); startIndex = currentIndex + 1; startLine = line; } currentIndex++; sb.append(value); } } addNewTokensLine(result, startIndex, currentIndex, startLine, sb); return result; }
public int getBeginLine() { return this.token.getBeginLine(); }
String filename = tokenEntry.getTokenSrcID(); File file = new File( filename ); PmdFileInfo fileInfo = fileMap.get( file ); String xrefLocation = fileInfo.getXrefLocation(); MavenProject projectFile = fileInfo.getProject(); int line = tokenEntry.getBeginLine();
public CPD(CPDConfiguration theConfiguration) { configuration = theConfiguration; // before we start any tokenizing (add(File...)), we need to reset the // static TokenEntry status TokenEntry.clearImages(); }
@Override public void tokenize(final SourceCode sourceCode, final Tokens tokenEntries) { AntlrTokenManager tokenManager = getLexerForSource(sourceCode); try { AntlrToken token = (AntlrToken) tokenManager.getNextToken(); while (token.getType() != Token.EOF) { if (!token.isHidden()) { final TokenEntry tokenEntry = new TokenEntry(token.getImage(), tokenManager.getFileName(), token.getBeginLine()); tokenEntries.add(tokenEntry); } token = (AntlrToken) tokenManager.getNextToken(); } } catch (final AntlrTokenManager.ANTLRSyntaxError err) { // Wrap exceptions of the ANTLR tokenizer in a TokenMgrError, so they are correctly handled // when CPD is executed with the '--skipLexicalErrors' command line option throw new TokenMgrError("Lexical error in file " + tokenManager.getFileName() + " at line " + err.getLine() + ", column " + err.getColumn() + ". Encountered: " + err.getMessage(), TokenMgrError.LEXICAL_ERROR); } finally { tokenEntries.add(TokenEntry.getEOF()); } }
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { final String fileName = sourceCode.getFileName(); final JavaTokenFilter tokenFilter = createTokenFilter(sourceCode); final ConstructorDetector constructorDetector = new ConstructorDetector(ignoreIdentifiers); Token currentToken = (Token) tokenFilter.getNextToken(); while (currentToken != null) { processToken(tokenEntries, fileName, currentToken, constructorDetector); currentToken = (Token) tokenFilter.getNextToken(); } tokenEntries.add(TokenEntry.getEOF()); }
@Override public boolean consume(CodeReader code, Tokens cpdTokens) { if (code.popTo(matcher, token) > 0) { cpdTokens.add(new TokenEntry(token.toString(), fileName, code.getLinePosition())); token = new StringBuilder(); return true; } else { return false; } } }
public int getLineCount(TokenEntry mark, Match match) { TokenEntry endTok = get(mark.getIndex() + match.getTokenCount() - 1); if (endTok == TokenEntry.EOF) { endTok = get(mark.getIndex() + match.getTokenCount() - 2); } return endTok.getBeginLine() - mark.getBeginLine() + 1; }