public MatchAlgorithm(Map<String, SourceCode> sourceCode, Tokens tokens, int min, CPDListener listener) { this.source = sourceCode; this.tokens = tokens; this.code = tokens.getTokens(); this.min = min; this.cpdListener = listener; for (int i = 0; i < min; i++) { lastMod *= MOD; } }
private void addAndSkipLexicalErrors(SourceCode sourceCode) throws IOException { TokenEntry.State savedTokenEntry = new TokenEntry.State(tokens.getTokens()); try { addAndThrowLexicalError(sourceCode); } catch (TokenMgrError e) { System.err.println("Skipping " + sourceCode.getFileName() + ". Reason: " + e.getMessage()); tokens.getTokens().clear(); tokens.getTokens().addAll(savedTokenEntry.restore()); } }
public void restoreConstructorToken(Tokens tokenEntries, Token currentToken) { if (!ignoreIdentifiers) { return; } if (currentToken.kind == JavaParserConstants.LPAREN) { // was the previous token a constructor? If so, restore the // identifier if (!classMembersIndentations.isEmpty() && classMembersIndentations.peek().name.equals(prevIdentifier)) { int lastTokenIndex = tokenEntries.size() - 1; TokenEntry lastToken = tokenEntries.getTokens().get(lastTokenIndex); lastToken.setImage(prevIdentifier); } } } }
protected void tokenizeTest() throws IOException { Tokens tokens = new Tokens(); tokenizer.tokenize(sourceCode, tokens); List<TokenEntry> entries = tokens.getTokens(); assertEquals(expectedTokenCount, entries.size()); }
public List<TokensLine> chunk(String fileName, Reader fileReader) { SourceCode sourceCode = new SourceCode(new FileCodeLoaderWithoutCache(fileName, fileReader)); Tokens tokens = new Tokens(); TokenEntry.clearImages(); try { tokenizer.tokenize(sourceCode, tokens); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } TokenEntry.clearImages(); return convert(tokens.getTokens()); }
public MatchAlgorithm(Map<String, SourceCode> sourceCode, Tokens tokens, int min, CPDListener listener) { this.source = sourceCode; this.tokens = tokens; this.code = tokens.getTokens(); this.min = min; this.cpdListener = listener; for (int i = 0; i < min; i++) { lastMod *= MOD; } }
private void addAndSkipLexicalErrors(SourceCode sourceCode) throws IOException { TokenEntry.State savedTokenEntry = new TokenEntry.State(tokens.getTokens()); try { addAndThrowLexicalError(sourceCode); } catch (TokenMgrError e) { System.err.println("Skipping " + sourceCode.getFileName() + ". Reason: " + e.getMessage()); tokens.getTokens().clear(); tokens.getTokens().addAll(savedTokenEntry.restore()); } }
public void restoreConstructorToken(Tokens tokenEntries, Token currentToken) { if (!ignoreIdentifiers) { return; } if (currentToken.kind == JavaParserConstants.LPAREN) { // was the previous token a constructor? If so, restore the // identifier if (!classMembersIndentations.isEmpty() && classMembersIndentations.peek().name.equals(prevIdentifier)) { int lastTokenIndex = tokenEntries.size() - 1; TokenEntry lastToken = tokenEntries.getTokens().get(lastTokenIndex); lastToken.setImage(prevIdentifier); } } } }
protected void tokenizeTest() throws IOException { Tokens tokens = new Tokens(); tokenizer.tokenize(sourceCode, tokens); List<TokenEntry> entries = tokens.getTokens(); assertEquals(expectedTokenCount, entries.size()); }
public List<TokensLine> chunk(File file) { SourceCode sourceCode = new SourceCode(new FileCodeLoaderWithoutCache(file, encoding)); Tokens tokens = new Tokens(); TokenEntry.clearImages(); try { tokenizer.tokenize(sourceCode, tokens); } catch (IOException e) { throw Throwables.propagate(e); } TokenEntry.clearImages(); return convert(tokens.getTokens()); }