Refine search
@Override public void tokenize(SourceCode tokens, Tokens tokenEntries) { List<String> code = tokens.getCode(); for (int i = 0; i < code.size(); i++) { String currentLine = code.get(i); for (int j = 0; j < currentLine.length(); j++) { char tok = currentLine.charAt(j); if (!Character.isWhitespace(tok) && tok != '{' && tok != '}' && tok != ';') { tokenEntries.add(new TokenEntry(String.valueOf(tok), tokens.getFileName(), i + 1)); } } } tokenEntries.add(TokenEntry.getEOF()); } }
private void addAndSkipLexicalErrors(SourceCode sourceCode) throws IOException { TokenEntry.State savedTokenEntry = new TokenEntry.State(tokens.getTokens()); try { addAndThrowLexicalError(sourceCode); } catch (TokenMgrError e) { System.err.println("Skipping " + sourceCode.getFileName() + ". Reason: " + e.getMessage()); tokens.getTokens().clear(); tokens.getTokens().addAll(savedTokenEntry.restore()); } }
public List<TokensLine> chunk(String fileName, Reader fileReader) { SourceCode sourceCode = new SourceCode(new FileCodeLoaderWithoutCache(fileName, fileReader)); Tokens tokens = new Tokens(); TokenEntry.clearImages(); try { tokenizer.tokenize(sourceCode, tokens); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } TokenEntry.clearImages(); return convert(tokens.getTokens()); }
public void restoreConstructorToken(Tokens tokenEntries, Token currentToken) { if (!ignoreIdentifiers) { return; } if (currentToken.kind == JavaParserConstants.LPAREN) { // was the previous token a constructor? If so, restore the // identifier if (!classMembersIndentations.isEmpty() && classMembersIndentations.peek().name.equals(prevIdentifier)) { int lastTokenIndex = tokenEntries.size() - 1; TokenEntry lastToken = tokenEntries.getTokens().get(lastTokenIndex); lastToken.setImage(prevIdentifier); } } } }
public void tokenize(SourceCode tokens, Tokens tokenEntries) throws IOException { tokenEntries.add(new TokenEntry("t1", "src", 1)); tokenEntries.add(new TokenEntry("t2", "src", 1)); tokenEntries.add(new TokenEntry("t3", "src", 2)); tokenEntries.add(new TokenEntry("t1", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(new TokenEntry("t3", "src", 4)); tokenEntries.add(TokenEntry.getEOF()); } };
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { final String fileName = sourceCode.getFileName(); final JavaTokenFilter tokenFilter = createTokenFilter(sourceCode); final ConstructorDetector constructorDetector = new ConstructorDetector(ignoreIdentifiers); Token currentToken = (Token) tokenFilter.getNextToken(); while (currentToken != null) { processToken(tokenEntries, fileName, currentToken, constructorDetector); currentToken = (Token) tokenFilter.getNextToken(); } tokenEntries.add(TokenEntry.getEOF()); }
private void processToken(Tokens tokenEntries, String fileName, Token currentToken, ConstructorDetector constructorDetector) { String image = currentToken.image; constructorDetector.restoreConstructorToken(tokenEntries, currentToken); if (ignoreLiterals && (currentToken.kind == JavaParserConstants.STRING_LITERAL || currentToken.kind == JavaParserConstants.CHARACTER_LITERAL || currentToken.kind == JavaParserConstants.DECIMAL_LITERAL || currentToken.kind == JavaParserConstants.FLOATING_POINT_LITERAL)) { image = String.valueOf(currentToken.kind); } if (ignoreIdentifiers && currentToken.kind == JavaParserConstants.IDENTIFIER) { image = String.valueOf(currentToken.kind); } constructorDetector.processToken(currentToken); tokenEntries.add(new TokenEntry(image, fileName, currentToken.beginLine)); }
public MatchAlgorithm(Map<String, SourceCode> sourceCode, Tokens tokens, int min, CPDListener listener) { this.source = sourceCode; this.tokens = tokens; this.code = tokens.getTokens(); this.min = min; this.cpdListener = listener; for (int i = 0; i < min; i++) { lastMod *= MOD; } }
@SuppressWarnings("PMD.JumbledIncrementer") private Map<TokenEntry, Object> hash() { Map<TokenEntry, Object> markGroups = new HashMap<>(tokens.size()); for (int i = code.size() - 1; i >= 0; i--) { TokenEntry token = code.get(i); if (token != TokenEntry.EOF) { int last = tokenAt(min, token).getIdentifier(); lastHash = MOD * lastHash + token.getIdentifier() - lastMod * last; token.setHashCode(lastHash); Object o = markGroups.get(token);
Tokens tokens=new Tokens();//create Tokens instance while(it1.hasNext()) { // init while (stringTokenizer.hasMoreElements()) { // set tokens fields } System.out.println(tokens);// You print the current Tokens instance }//while(it1.hasNext() return tokens; //only return one, at the last state.
@Override public void tokenize(final SourceCode sourceCode, final Tokens tokenEntries) { AntlrTokenManager tokenManager = getLexerForSource(sourceCode); try { AntlrToken token = (AntlrToken) tokenManager.getNextToken(); while (token.getType() != Token.EOF) { if (!token.isHidden()) { final TokenEntry tokenEntry = new TokenEntry(token.getImage(), tokenManager.getFileName(), token.getBeginLine()); tokenEntries.add(tokenEntry); } token = (AntlrToken) tokenManager.getNextToken(); } } catch (final AntlrTokenManager.ANTLRSyntaxError err) { // Wrap exceptions of the ANTLR tokenizer in a TokenMgrError, so they are correctly handled // when CPD is executed with the '--skipLexicalErrors' command line option throw new TokenMgrError("Lexical error in file " + tokenManager.getFileName() + " at line " + err.getLine() + ", column " + err.getColumn() + ". Encountered: " + err.getMessage(), TokenMgrError.LEXICAL_ERROR); } finally { tokenEntries.add(TokenEntry.getEOF()); } }
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { final String fileName = sourceCode.getFileName(); final JavaTokenFilter tokenFilter = createTokenFilter(sourceCode); final ConstructorDetector constructorDetector = new ConstructorDetector(ignoreIdentifiers); Token currentToken = (Token) tokenFilter.getNextToken(); while (currentToken != null) { processToken(tokenEntries, fileName, currentToken, constructorDetector); currentToken = (Token) tokenFilter.getNextToken(); } tokenEntries.add(TokenEntry.getEOF()); }
@Override public boolean consume(CodeReader code, Tokens cpdTokens) { if (code.popTo(matcher, token) > 0) { cpdTokens.add(new TokenEntry(token.toString(), fileName, code.getLinePosition())); token = new StringBuilder(); return true; } else { return false; } } }
public void restoreConstructorToken(Tokens tokenEntries, Token currentToken) { if (!ignoreIdentifiers) { return; } if (currentToken.kind == JavaParserConstants.LPAREN) { // was the previous token a constructor? If so, restore the // identifier if (!classMembersIndentations.isEmpty() && classMembersIndentations.peek().name.equals(prevIdentifier)) { int lastTokenIndex = tokenEntries.size() - 1; TokenEntry lastToken = tokenEntries.getTokens().get(lastTokenIndex); lastToken.setImage(prevIdentifier); } } } }
public MatchAlgorithm(Map<String, SourceCode> sourceCode, Tokens tokens, int min, CPDListener listener) { this.source = sourceCode; this.tokens = tokens; this.code = tokens.getTokens(); this.min = min; this.cpdListener = listener; for (int i = 0; i < min; i++) { lastMod *= MOD; } }
@SuppressWarnings("PMD.JumbledIncrementer") private Map<TokenEntry, Object> hash() { Map<TokenEntry, Object> markGroups = new HashMap<>(tokens.size()); for (int i = code.size() - 1; i >= 0; i--) { TokenEntry token = code.get(i); if (token != TokenEntry.EOF) { int last = tokenAt(min, token).getIdentifier(); lastHash = MOD * lastHash + token.getIdentifier() - lastMod * last; token.setHashCode(lastHash); Object o = markGroups.get(token);
public static List<Tokens> getTokens( Vector<NameBuffer> vc1 ) { List<Tokens> tokenList=new ArrayList<Tokens>();//create Tokens list to return while(it1.hasNext()) { // init Tokens tokens=new Tokens();//create Tokens instance inside loop tokenList.add(tokens); //add to list to return while (stringTokenizer.hasMoreElements()) { // set tokens fields -- unchanged } System.out.println(tokens);// You print the current Tokens instance }//while(it1.hasNext() return tokenList; //only return the last one.... }
@Override public void tokenize(SourceCode sourceCode, Tokens tokenEntries) { StringBuilder sb = sourceCode.getCodeBuffer(); try (BufferedReader reader = new BufferedReader(new CharArrayReader(sb.toString().toCharArray()))) { int lineNumber = 1; String line = reader.readLine(); while (line != null) { StringTokenizer tokenizer = new StringTokenizer(line, TOKENS, true); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (!" ".equals(token) && !"\t".equals(token)) { tokenEntries.add(new TokenEntry(token, sourceCode.getFileName(), lineNumber)); } } // advance iteration variables line = reader.readLine(); lineNumber++; } } catch (IOException ignored) { ignored.printStackTrace(); } finally { tokenEntries.add(TokenEntry.getEOF()); } } }