For IntelliJ IDEA,
Android Studio or Eclipse



private static void validateMaxLine(Map<Integer, Integer> m, InputFile inputFile) { int maxLine = inputFile.lines(); for (int line : m.keySet()) { if (line > maxLine) { throw new IllegalStateException(String.format("Can't create measure for line %d for file '%s' with %d lines", line, inputFile, maxLine)); } } }
private static void storeZeroCoverageForEachExecutableLine(final SensorContext context, InputFile f, DefaultMeasure<String> execLines) { NewCoverage newCoverage = context.newCoverage().onFile(f); Map<Integer, Integer> lineMeasures = KeyValueFormat.parseIntInt((String) execLines.value()); for (Map.Entry<Integer, Integer> lineMeasure : lineMeasures.entrySet()) { int lineIdx = lineMeasure.getKey(); if (lineIdx <= f.lines() && lineMeasure.getValue() > 0) { newCoverage.lineHits(lineIdx, 0); } } newCoverage.save(); }
public static FileHashes create(InputFile f) { final byte[][] hashes = new byte[f.lines()][]; FileMetadata.computeLineHashesForIssueTracking(f, (lineIdx, hash) -> hashes[lineIdx - 1] = hash); int size = hashes.length; Multimap<String, Integer> linesByHash = LinkedHashMultimap.create(); String[] hexHashes = new String[size]; for (int i = 0; i < size; i++) { String hash = hashes[i] != null ? Hex.encodeHexString(hashes[i]) : ""; hexHashes[i] = hash; // indices in array are shifted one line before linesByHash.put(hash, i + 1); } return new FileHashes(hexHashes, linesByHash); }
@Override public void execute(SensorContext context) { Iterator<InputFile> inputFiles = context.fileSystem().inputFiles(context.fileSystem().predicates().all()).iterator(); if (!inputFiles.hasNext()) { throw new IllegalStateException("No files indexed"); } InputFile file = inputFiles.next(); context.newHighlighting() .onFile(file) .highlight(file.selectLine(1), TypeOfText.CONSTANT) .save(); context.newHighlighting() .onFile(file) .highlight(file.selectLine(file.lines()), TypeOfText.COMMENT) .save(); } }
private void createIssues(InputFile file, SensorContext context, String repo) { RuleKey ruleKey = RuleKey.of(repo, RULE_KEY); String severity = context.settings().getString(FORCE_SEVERITY_PROPERTY); for (int line = 1; line <= file.lines(); line++) { NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(file) .at(file.selectLine(line)) .message("This issue is generated on each line")) .overrideSeverity(severity != null ? Severity.valueOf(severity) : null); if (context.getSonarQubeVersion().isGreaterThanOrEqual(Version.create(5, 5))) { newIssue.gap(context.settings().getDouble(EFFORT_TO_FIX_PROPERTY)); } else { newIssue.effortToFix(context.settings().getDouble(EFFORT_TO_FIX_PROPERTY)); } newIssue.save(); } }
private static void createIssues(InputFile file, SensorContext context, String repo) { RuleKey ruleKey = RuleKey.of(repo, RULE_KEY); for (int line = 1; line <= file.lines(); line++) { TextRange text = file.selectLine(line); // do not count empty lines, which can be a pain with end-of-file return if (text.end().lineOffset() == 0) { continue; } NewIssue newIssue = context.newIssue(); newIssue .forRule(ruleKey) .at(newIssue.newLocation() .on(file) .at(text) .message("This bug issue is generated on each line")) .save(); } }
@Override public synchronized void blameResult(InputFile file, List<BlameLine> lines) { Preconditions.checkNotNull(file); Preconditions.checkNotNull(lines); Preconditions.checkArgument(allFilesToBlame.contains(file), "It was not expected to blame file %s", file); if (lines.size() != file.lines()) { LOG.debug("Ignoring blame result since provider returned {} blame lines but file {} has {} lines", lines.size(), file, file.lines()); return; } Builder scmBuilder = ScannerReport.Changesets.newBuilder(); DefaultInputFile inputFile = (DefaultInputFile) file; scmBuilder.setComponentRef(inputFile.batchId()); Map<String, Integer> changesetsIdByRevision = new HashMap<>(); int lineId = 1; for (BlameLine line : lines) { validateLine(line, lineId, file); Integer changesetId = changesetsIdByRevision.get(line.revision()); if (changesetId == null) { addChangeset(scmBuilder, line); changesetId = scmBuilder.getChangesetCount() - 1; changesetsIdByRevision.put(line.revision(), changesetId); } scmBuilder.addChangesetIndexByLine(changesetId); lineId++; } writer.writeComponentChangesets(scmBuilder.build()); allFilesToBlame.remove(file); count++; progressReport.message(count + "/" + total + " files analyzed"); }
static void save(org.sonar.api.batch.sensor.SensorContext context, InputFile inputFile, @Nullable Iterable<CloneGroup> duplications) { if (duplications == null || Iterables.isEmpty(duplications)) { return; } Set<Integer> duplicatedLines = new HashSet<Integer>(); int duplicatedBlocks = computeBlockAndLineCount(duplications, duplicatedLines); Map<Integer, Integer> duplicationByLine = new HashMap<Integer, Integer>(); for (int i = 1; i <= inputFile.lines(); i++) { duplicationByLine.put(i, duplicatedLines.contains(i) ? 1 : 0); } saveMeasures(context, inputFile, duplicatedLines, duplicatedBlocks, duplicationByLine); saveDuplications(context, inputFile, duplications); }
@Override public DefaultIssue atLine(int line) { Preconditions.checkState(this.path != null && this.path instanceof InputFile, "atLine should be called after onFile."); Preconditions.checkArgument(line > 0, "line starts at 1, invalid value " + line + "."); int lines = ((InputFile) path).lines(); Preconditions.checkArgument(line <= lines, "File " + path + " has " + lines + " lines. Unable to create issue at line " + line + "."); this.line = line; return this; }
FileData(InputFile inputFile) { linesInFile = inputFile.lines(); filename = inputFile.relativePath(); }
public int fileLength(File file) { return inputFromIOFile(file).lines(); }