private void processFile(InputFile inputFile) { try { Set<Integer> noSonarLines = new HashSet<>(); int[] lineCounter = {1}; try (Stream<String> stream = Files.lines(inputFile.path(), inputFile.charset())) { stream.forEachOrdered(lineStr -> { if (lineStr.contains("//NOSONAR")) { noSonarLines.add(lineCounter[0]); } lineCounter[0]++; }); } noSonarFilter.noSonarInFile(inputFile, noSonarLines); } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
private static void parseIssues(InputFile file, SensorContext context, Map<Integer, TextPointer> startPositions, Map<Integer, TextPointer> endPositions) { int currentLine = 0; try { for (String lineStr : Files.readAllLines(file.path(), file.charset())) { currentLine++; Matcher m = START_ISSUE_PATTERN.matcher(lineStr); while (m.find()) { Integer issueId = Integer.parseInt(m.group(1)); TextPointer newPointer = file.newPointer(currentLine, m.end()); startPositions.put(issueId, newPointer); } m = END_ISSUE_PATTERN.matcher(lineStr); while (m.find()) { Integer issueId = Integer.parseInt(m.group(1)); TextPointer newPointer = file.newPointer(currentLine, m.start()); endPositions.put(issueId, newPointer); } } } catch (IOException e) { throw new IllegalStateException("Unable to read file", e); } }
int currentLine = 0; try { for (String lineStr : Files.readAllLines(file.path(), file.charset())) { currentLine++;
try { StringBuilder sb = new StringBuilder(); for (String line : FileUtils.readLines(inputFile.file(), inputFile.charset())) { int startOffset = 0; int endOffset = 0;
@Override protected void processFile(InputFile inputFile, SensorContext context, RuleKey ruleKey, String languageKey) { try { int[] lineCounter = {1}; try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> { int startIndex = -1; while ((startIndex = lineStr.indexOf(TAG, startIndex + 1)) != -1) { NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(inputFile) .at(inputFile.newRange(lineCounter[0], startIndex, lineCounter[0], startIndex + TAG.length()))) .save(); } lineCounter[0]++; }); } } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> {
private void createIndex(Iterable<InputFile> sourceFiles) { TokenChunker tokenChunker = JavaTokenProducer.build(); StatementChunker statementChunker = JavaStatementBuilder.build(); BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE); for (InputFile inputFile : sourceFiles) { LOG.debug("Populating index from {}", inputFile); String resourceEffectiveKey = inputFile.key(); List<Statement> statements; try (InputStream is = inputFile.inputStream(); Reader reader = new InputStreamReader(is, inputFile.charset())) { statements = statementChunker.chunk(tokenChunker.chunk(reader)); } catch (FileNotFoundException e) { throw new IllegalStateException("Cannot find file " + inputFile.file(), e); } catch (IOException e) { throw new IllegalStateException("Exception handling file: " + inputFile.file(), e); } List<Block> blocks; try { blocks = blockChunker.chunk(resourceEffectiveKey, statements); } catch (Exception e) { throw new IllegalStateException("Cannot process file " + inputFile.file(), e); } index.insert(inputFile, blocks); } }
private Charset getCharset(File file) { return inputFromIOFile(file).charset(); }
private Charset getCharset(File file) { return inputFromIOFile(file).charset(); }
private void processFile(InputFile inputFile) { try { Set<Integer> noSonarLines = new HashSet<>(); int[] lineCounter = {1}; try (Stream<String> stream = Files.lines(inputFile.path(), inputFile.charset())) { stream.forEachOrdered(lineStr -> { if (lineStr.contains("//NOSONAR")) { noSonarLines.add(lineCounter[0]); } lineCounter[0]++; }); } noSonarFilter.noSonarInFile(inputFile, noSonarLines); } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
private static void parseIssues(InputFile file, SensorContext context, Map<Integer, TextPointer> startPositions, Map<Integer, TextPointer> endPositions) { int currentLine = 0; try { for (String lineStr : Files.readAllLines(file.path(), file.charset())) { currentLine++; Matcher m = START_ISSUE_PATTERN.matcher(lineStr); while (m.find()) { Integer issueId = Integer.parseInt(m.group(1)); TextPointer newPointer = file.newPointer(currentLine, m.end()); startPositions.put(issueId, newPointer); } m = END_ISSUE_PATTERN.matcher(lineStr); while (m.find()) { Integer issueId = Integer.parseInt(m.group(1)); TextPointer newPointer = file.newPointer(currentLine, m.start()); endPositions.put(issueId, newPointer); } } } catch (IOException e) { throw new IllegalStateException("Unable to read file", e); } }
@Override public void scanFile(XmlFile file) { Charset charset = file.getInputFile().charset(); for (Node comment : getComments(file)) { if (visitedNodes.contains(comment)) { // already reported in previous issue continue; } List<Node> siblingComments = getNextCommentSiblings(comment); checkCommentBlock(siblingComments, charset); visitedNodes.addAll(siblingComments); } // clear for next XML file visitedNodes.clear(); }
int currentLine = 0; try { for (String lineStr : Files.readAllLines(file.path(), file.charset())) { currentLine++;
try { StringBuilder sb = new StringBuilder(); for (String line : FileUtils.readLines(inputFile.file(), inputFile.charset())) { int startOffset = 0; int endOffset = 0;
/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
@Override protected void processFile(InputFile inputFile, SensorContext context, RuleKey ruleKey, String languageKey) { try { int[] lineCounter = {1}; try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> { int startIndex = -1; while ((startIndex = lineStr.indexOf(TAG, startIndex + 1)) != -1) { NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(inputFile) .at(inputFile.newRange(lineCounter[0], startIndex, lineCounter[0], startIndex + TAG.length()))) .save(); } lineCounter[0]++; }); } } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> {
private void createIndex(Iterable<InputFile> sourceFiles) { TokenChunker tokenChunker = JavaTokenProducer.build(); StatementChunker statementChunker = JavaStatementBuilder.build(); BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE); for (InputFile inputFile : sourceFiles) { LOG.debug("Populating index from {}", inputFile); String resourceEffectiveKey = inputFile.key(); List<Statement> statements; try (InputStream is = inputFile.inputStream(); Reader reader = new InputStreamReader(is, inputFile.charset())) { statements = statementChunker.chunk(tokenChunker.chunk(reader)); } catch (FileNotFoundException e) { throw new IllegalStateException("Cannot find file " + inputFile.file(), e); } catch (IOException e) { throw new IllegalStateException("Exception handling file: " + inputFile.file(), e); } List<Block> blocks; try { blocks = blockChunker.chunk(resourceEffectiveKey, statements); } catch (Exception e) { throw new IllegalStateException("Cannot process file " + inputFile.file(), e); } index.insert(inputFile, blocks); } }
LOG.info("Analyzing {}", file); DatabaseDescription desc = DumpFileUtils.getDatabaseDescription(file.inputStream(), file.charset(), Files.getNameWithoutExtension(file.filename())); sensorContext.newMeasure().on(file).forMetric((Metric) OpenEdgeMetrics.NUM_TABLES).withValue(