/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
@Override protected void processFile(InputFile inputFile, SensorContext context, RuleKey ruleKey, String languageKey) { try { int[] lineCounter = {1}; try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> { int startIndex = -1; while ((startIndex = lineStr.indexOf(TAG, startIndex + 1)) != -1) { NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(inputFile) .at(inputFile.newRange(lineCounter[0], startIndex, lineCounter[0], startIndex + TAG.length()))) .save(); } lineCounter[0]++; }); } } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> {
private void createIndex(Iterable<InputFile> sourceFiles) { TokenChunker tokenChunker = JavaTokenProducer.build(); StatementChunker statementChunker = JavaStatementBuilder.build(); BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE); for (InputFile inputFile : sourceFiles) { LOG.debug("Populating index from {}", inputFile); String resourceEffectiveKey = inputFile.key(); List<Statement> statements; try (InputStream is = inputFile.inputStream(); Reader reader = new InputStreamReader(is, inputFile.charset())) { statements = statementChunker.chunk(tokenChunker.chunk(reader)); } catch (FileNotFoundException e) { throw new IllegalStateException("Cannot find file " + inputFile.file(), e); } catch (IOException e) { throw new IllegalStateException("Exception handling file: " + inputFile.file(), e); } List<Block> blocks; try { blocks = blockChunker.chunk(resourceEffectiveKey, statements); } catch (Exception e) { throw new IllegalStateException("Cannot process file " + inputFile.file(), e); } index.insert(inputFile, blocks); } }
/** * @return InputStream object of InputFile * @throws UncheckedIOException instead of IOException */ public static InputStream getInputStream(InputFile file) { try { return file.inputStream(); } catch (IOException caught) { throw new UncheckedIOException(caught); } }
private InputStream getInputStream(File file) throws IOException { return inputFromIOFile(file).inputStream(); }
private InputStream getInputStream(File file) throws IOException { return inputFromIOFile(file).inputStream(); }
/** * Compute a MD5 hash of each line of the file after removing of all blank chars */ public static void computeLineHashesForIssueTracking(InputFile f, LineHashConsumer consumer) { try { readFile(f.inputStream(), f.charset(), f.absolutePath(), new CharHandler[] {new LineHashComputer(consumer, f.file())}); } catch (IOException e) { throw new IllegalStateException("Failed to compute line hashes for " + f.absolutePath(), e); } } }
@Override protected void processFile(InputFile inputFile, SensorContext context, RuleKey ruleKey, String languageKey) { try { int[] lineCounter = {1}; try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> { int startIndex = -1; while ((startIndex = lineStr.indexOf(TAG, startIndex + 1)) != -1) { NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(inputFile) .at(inputFile.newRange(lineCounter[0], startIndex, lineCounter[0], startIndex + TAG.length()))) .save(); } lineCounter[0]++; }); } } catch (IOException e) { throw new IllegalStateException("Fail to process " + inputFile, e); } } }
try (InputStreamReader isr = new InputStreamReader(inputFile.inputStream(), inputFile.charset()); BufferedReader reader = new BufferedReader(isr)) { reader.lines().forEachOrdered(lineStr -> {
LOG.debug("Generating ParseTree for dump file {}", file); long time = System.currentTimeMillis(); ParseTree tree = DumpFileUtils.getDumpFileParseTree(file.inputStream(), file.charset()); parseTime += (System.currentTimeMillis() - time);
private void createIndex(Iterable<InputFile> sourceFiles) { TokenChunker tokenChunker = JavaTokenProducer.build(); StatementChunker statementChunker = JavaStatementBuilder.build(); BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE); for (InputFile inputFile : sourceFiles) { LOG.debug("Populating index from {}", inputFile); String resourceEffectiveKey = inputFile.key(); List<Statement> statements; try (InputStream is = inputFile.inputStream(); Reader reader = new InputStreamReader(is, inputFile.charset())) { statements = statementChunker.chunk(tokenChunker.chunk(reader)); } catch (FileNotFoundException e) { throw new IllegalStateException("Cannot find file " + inputFile.file(), e); } catch (IOException e) { throw new IllegalStateException("Exception handling file: " + inputFile.file(), e); } List<Block> blocks; try { blocks = blockChunker.chunk(resourceEffectiveKey, statements); } catch (Exception e) { throw new IllegalStateException("Cannot process file " + inputFile.file(), e); } index.insert(inputFile, blocks); } }
@Test void test_failure() throws Exception { InputFile failingFile = createInputFile("lets.go", InputFile.Type.MAIN, "package main \n" + "\n" + "func test() {\n" + " pwd := \"secret\"\n" + "}"); failingFile = spy(failingFile); IOException ioException = new IOException(); when(failingFile.inputStream()).thenThrow(ioException); sensorContext.fileSystem().add(failingFile); sensorContext.settings().setProperty("sonar.go.coverage.reportPaths", "invalid-coverage-path.out"); GoSensor goSensor = getSensor("S2068"); goSensor.execute(sensorContext); assertThat(logTester.logs(LoggerLevel.ERROR).stream().collect(Collectors.joining("\n"))) .contains("Error analyzing file lets.go") .contains("Coverage report can't be loaded, report file not found, ignoring this file invalid-coverage-path.out."); }
try (InputStream inputStream = inputFile.inputStream()) { UastNode uast = uastGenerator.createUast(inputStream);
LOG.info("Analyzing {}", file); DatabaseDescription desc = DumpFileUtils.getDatabaseDescription(file.inputStream(), file.charset(), Files.getNameWithoutExtension(file.filename())); sensorContext.newMeasure().on(file).forMetric((Metric) OpenEdgeMetrics.NUM_TABLES).withValue(
private void highlightFile(SensorContext context, InputFile file) throws IOException { DumpFileGrammarLexer lexer = new DumpFileGrammarLexer(CharStreams.fromStream(file.inputStream())); NewHighlighting highlighting = context.newHighlighting().onFile(file);