public Optional<String> getValue() { if (initialized) { return Optional.ofNullable(distribution.build()); } return Optional.empty(); } }
@Test public void work_on_an_limits_array_copy() { Integer[] limits = new Integer[] {4, 2, 0}; RangeDistributionBuilder builder = new RangeDistributionBuilder(limits); builder.add(3.2).add(2.0).add(6.2).build(); assertThat(limits[0]).isEqualTo(4); assertThat(limits[1]).isEqualTo(2); assertThat(limits[2]).isEqualTo(0); }
@Test public void keep_int_ranges_when_merging_distributions() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String data = builder .add("0=3;3=5;6=9") .add("0=0;3=2;6=1") .build(); assertThat(data).isEqualTo("0=3;3=7;6=10"); }
@Test public void add_existing_double_distribution() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String data = builder .add("0.5=0;1.9=2;4.5=1") .add("0.5=1;1.9=3;4.5=1") .build(); assertThat(data).isEqualTo("0.5=1;1.9=5;4.5=2"); }
@Test public void add_existing_integer_distribution() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String data = builder .add("0=0;2=2;4=1") .add("0=1;2=2;4=2") .build(); assertThat(data).isEqualTo("0=1;2=4;4=3"); }
@Test public void aggregate_empty_distribution() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String distribution = builder.build(); assertThat(distribution).isEmpty(); }
@Test public void add_distribution_with_identical_limits() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String data = builder .add("0=1;2=0") .add("0=3;2=5") .build(); assertThat(data).isEqualTo("0=4;2=5"); }
@Test public void init_limits_at_the_first_add() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); String data = builder .add("0.5=3;3.5=5;6.5=9") .add("0.5=0;3.5=2;6.5=1") .build(); assertThat(data).isEqualTo("0.5=3;3.5=7;6.5=10"); }
@Test public void build_integer_distribution() { RangeDistributionBuilder builder = new RangeDistributionBuilder(new Integer[] {0, 2, 4}); String data = builder .add(3.2) .add(2.0) .add(6.2) .build(); assertThat(data).isEqualTo("0=0;2=2;4=1"); }
@Test public void value_lesser_than_minimum_is_ignored() { RangeDistributionBuilder builder = new RangeDistributionBuilder(new Integer[] {0, 2, 4}); String data = builder .add(3.2) .add(2.0) .add(-3.0) .build(); assertThat(data).isEqualTo("0=0;2=2;4=0"); }
@Test public void build_double_distribution() { RangeDistributionBuilder builder = new RangeDistributionBuilder(new Double[] {0.0, 2.0, 4.0}); String data = builder .add(3.2) .add(2.0) .add(6.2) .build(); assertThat(data).isEqualTo("0=0;2=2;4=1"); }
@Test public void add_distribution_with_different_double_limits() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); assertThat(builder .add("0.0=3;3.0=5") .add("0.0=3;3.0=5;6.0=9") .build()).isNull(); }
@Test public void add_distribution_with_different_int_limits() { RangeDistributionBuilder builder = new RangeDistributionBuilder(); assertThat(builder .add("0=1") .add("0=3;2=5") .build()).isNull(); }
public Optional<String> getValue() { if (initialized) { return Optional.fromNullable(distribution.build()); } return Optional.absent(); } }
private static void saveFilesComplexityDistribution(SensorContext context, InputFile inputFile, int fileComplexity) { String distribution = new RangeDistributionBuilder(FILES_DISTRIB_BOTTOM_LIMITS) .add(fileComplexity) .build(); context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FILE_COMPLEXITY_DISTRIBUTION) .withValue(distribution) .save(); }
private void saveFunctionsComplexityDistribution(InputFile inputFile, FileMetrics fileMetrics) { RangeDistributionBuilder complexityDistribution = new RangeDistributionBuilder(FUNCTIONS_DISTRIB_BOTTOM_LIMITS); for (Integer functionComplexity : fileMetrics.functionComplexities()) { complexityDistribution.add(functionComplexity); } context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION) .withValue(complexityDistribution.build()) .save(); }
private void saveFilesComplexityDistribution(InputFile inputFile, FileMetrics fileMetrics) { RangeDistributionBuilder complexityDistribution = new RangeDistributionBuilder(FILES_DISTRIB_BOTTOM_LIMITS); complexityDistribution.add(fileMetrics.complexity()); context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FILE_COMPLEXITY_DISTRIBUTION) .withValue(complexityDistribution.build()) .save(); }
private void saveFunctionsComplexityDistribution(InputFile inputFile, FileMetrics fileMetrics) { RangeDistributionBuilder complexityDistribution = new RangeDistributionBuilder(FUNCTIONS_DISTRIB_BOTTOM_LIMITS); for (Integer functionComplexity : fileMetrics.functionComplexities()) { complexityDistribution.add(functionComplexity); } context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION) .withValue(complexityDistribution.build()) .save(); }
private void saveFilesComplexityDistribution(InputFile inputFile, FileMetrics fileMetrics) { RangeDistributionBuilder complexityDistribution = new RangeDistributionBuilder(FILES_DISTRIB_BOTTOM_LIMITS); complexityDistribution.add(fileMetrics.complexity()); context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FILE_COMPLEXITY_DISTRIBUTION) .withValue(complexityDistribution.build()) .save(); }
private static void saveFunctionsComplexityDistribution(SensorContext context, InputFile inputFile, AstNode rootNode) { RangeDistributionBuilder complexityDistribution = new RangeDistributionBuilder(FUNCTIONS_DISTRIB_BOTTOM_LIMITS); for (AstNode functionDef : rootNode.getDescendants(FlexGrammar.FUNCTION_DEF, FlexGrammar.FUNCTION_EXPR)) { complexityDistribution.add(ComplexityVisitor.complexity(functionDef)); } context.<String>newMeasure() .on(inputFile) .forMetric(CoreMetrics.FUNCTION_COMPLEXITY_DISTRIBUTION) .withValue(complexityDistribution.build()) .save(); }