/** * Creates new part with specified resourceId and unitStart, and 0 for lineStart and lineEnd. */ private ClonePart newClonePart(String resourceId, int unitStart) { return new ClonePart(resourceId, unitStart, 0, 0); }
private static ClonePart newClonePart(String resourceId, int unitStart) { return new ClonePart(resourceId, unitStart, 0, 0); }
protected static ClonePart newClonePart(String resourceId, int unitStart, int cloneUnitLength) { return new ClonePart(resourceId, unitStart, unitStart, unitStart + cloneUnitLength + LINES_PER_BLOCK - 1); }
@Test public void testDuplicationBetweenTwoFiles() throws IOException { File file1 = new File("test-resources/org/sonar/duplications/cpd/CPDTest/CPDFile1.java"); File file2 = new File("test-resources/org/sonar/duplications/cpd/CPDTest/CPDFile2.java"); addToIndex(file1); addToIndex(file2); List<CloneGroup> duplications = detect(file1); assertThat(duplications.size()).isEqualTo(1); CloneGroup duplication = duplications.get(0); assertThat(duplication.getOriginPart().getResourceId()).isEqualTo(file1.getAbsolutePath()); ClonePart part1 = new ClonePart(file1.getAbsolutePath(), 1, 18, 41); ClonePart part2 = new ClonePart(file2.getAbsolutePath(), 1, 18, 41); assertThat(duplication.getCloneParts()).containsOnly(part1, part2); assertThat(duplication.getLengthInUnits()).as("length in tokens").isEqualTo(115); }
private void reportClones(BlocksGroup beginGroup, BlocksGroup endGroup, int cloneLength) { List<Block[]> pairs = beginGroup.pairs(endGroup, cloneLength); ClonePart origin = null; List<ClonePart> parts = new ArrayList<>(); for (int i = 0; i < pairs.size(); i++) { Block[] pair = pairs.get(i); Block firstBlock = pair[0]; Block lastBlock = pair[1]; ClonePart part = new ClonePart(firstBlock.getResourceId(), firstBlock.getIndexInFile(), firstBlock.getStartLine(), lastBlock.getEndLine()); if (originResourceId.equals(part.getResourceId())) { if (origin == null || part.getUnitStart() < origin.getUnitStart()) { origin = part; } } parts.add(part); } filter.add(CloneGroup.builder().setLength(cloneLength).setOrigin(origin).setParts(parts).build()); }
@Test public void should_save_two_duplicated_groups_involving_three_files() { List<CloneGroup> groups = Arrays.asList( newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent2.key(), 0, 15, 214)), newCloneGroup(new ClonePart(batchComponent1.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 15, 214))); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(2); assertDuplication(dups[0], 5, 204, batchComponent2.scannerId(), 15, 214); assertDuplication(dups[1], 15, 214, batchComponent3.scannerId(), 15, 214); }
@Test public void should_save_duplication_on_same_file() { List<CloneGroup> groups = Collections.singletonList(newCloneGroup( new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent1.key(), 0, 215, 414))); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(1); assertDuplication(dups[0], 5, 204, null, 215, 414); }
@Test public void should_limit_number_of_clones() { // 1 origin part + 101 duplicates = 102 List<CloneGroup> dups = new ArrayList<>(CpdExecutor.MAX_CLONE_GROUP_PER_FILE + 1); for (int i = 0; i < CpdExecutor.MAX_CLONE_GROUP_PER_FILE + 1; i++) { ClonePart clonePart = new ClonePart(batchComponent1.key(), i, i, i + 1); ClonePart dupPart = new ClonePart(batchComponent1.key(), i + 1, i + 1, i + 2); dups.add(newCloneGroup(clonePart, dupPart)); } executor.saveDuplications(batchComponent1, dups); assertThat(reader.readComponentDuplications(batchComponent1.scannerId())).hasSize(CpdExecutor.MAX_CLONE_GROUP_PER_FILE); assertThat(logTester.logs(LoggerLevel.WARN)) .contains("Too many duplication groups on file " + batchComponent1 + ". Keep only the first " + CpdExecutor.MAX_CLONE_GROUP_PER_FILE + " groups."); }
@Test public void should_save_single_duplication() { List<CloneGroup> groups = Collections.singletonList(newCloneGroup( new ClonePart(batchComponent1.key(), 0, 2, 4), new ClonePart(batchComponent2.key(), 0, 15, 17))); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(1); assertDuplication(dups[0], 2, 4, batchComponent2.scannerId(), 15, 17); }
@Test public void should_limit_number_of_references() { // 1 origin part + 101 duplicates = 102 List<ClonePart> parts = new ArrayList<>(CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2); for (int i = 0; i < CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2; i++) { parts.add(new ClonePart(batchComponent1.key(), i, i, i + 1)); } List<CloneGroup> groups = Collections.singletonList(CloneGroup.builder().setLength(0).setOrigin(parts.get(0)).setParts(parts).build()); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(1); assertThat(dups[0].getDuplicateList()).hasSize(CpdExecutor.MAX_CLONE_PART_PER_GROUP); assertThat(logTester.logs(LoggerLevel.WARN)) .contains("Too many duplication references on file " + batchComponent1 + " for block at line 0. Keep only the first " + CpdExecutor.MAX_CLONE_PART_PER_GROUP + " references."); }
@Test public void should_save_duplication_involving_three_files() { List<CloneGroup> groups = Collections.singletonList(newCloneGroup( new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent2.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 25, 224))); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(1); assertDuplication(dups[0], 5, 204, 2); assertDuplicate(dups[0].getDuplicate(0), batchComponent2.scannerId(), 15, 214); assertDuplicate(dups[0].getDuplicate(1), batchComponent3.scannerId(), 25, 224); }
Block firstBlock = text.getBlock(b[0]); Block lastBlock = text.getBlock(b[1]); ClonePart part = new ClonePart( firstBlock.getResourceId(), firstBlock.getIndexInFile(),
assertThat(clone.getCloneUnitLength(), is(1)); assertThat(clone.getCloneParts().size(), is(2)); assertThat(clone.getOriginPart(), is(new ClonePart("a", 0, 0, 1))); assertThat(clone.getCloneParts(), hasItem(new ClonePart("a", 0, 0, 1))); assertThat(clone.getCloneParts(), hasItem(new ClonePart("a", 2, 0, 1)));
private void reportClones(BlocksGroup beginGroup, BlocksGroup endGroup, int cloneLength) { List<Block[]> pairs = beginGroup.pairs(endGroup, cloneLength); ClonePart origin = null; List<ClonePart> parts = Lists.newArrayList(); for (int i = 0; i < pairs.size(); i++) { Block[] pair = pairs.get(i); Block firstBlock = pair[0]; Block lastBlock = pair[1]; ClonePart part = new ClonePart(firstBlock.getResourceId(), firstBlock.getIndexInFile(), firstBlock.getStartLine(), lastBlock.getEndLine()); if (originResourceId.equals(part.getResourceId())) { if (origin == null) { origin = part; } else if (part.getUnitStart() < origin.getUnitStart()) { origin = part; } } parts.add(part); } filter.add(CloneGroup.builder().setLength(cloneLength).setOrigin(origin).setParts(parts).build()); }
Block firstBlock = text.getBlock(b[0]); Block lastBlock = text.getBlock(b[1]); ClonePart part = new ClonePart( firstBlock.getResourceId(), firstBlock.getIndexInFile(),