@VisibleForTesting static List<String> getUniqueMetrics(List<TimelineObjectHolder<String, DataSegment>> timelineSegments) { final BiMap<String, Integer> uniqueMetrics = HashBiMap.create(); // Here, we try to retain the order of metrics as they were specified. Metrics are extracted from the recent // segments to olders. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String metric : chunk.getObject().getMetrics()) { if (!uniqueMetrics.containsKey(metric)) { uniqueMetrics.put(metric, index++); } } } } final BiMap<Integer, String> orderedMetrics = uniqueMetrics.inverse(); return IntStream.range(0, orderedMetrics.size()) .mapToObj(orderedMetrics::get) .collect(Collectors.toList()); } }
@Deprecated @GET @Path("/{dataSourceName}/metrics") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Iterable<String> getDataSourceMetrics( @PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval ) { final Set<DataSegment> segments = getAllSegmentsForDataSource(dataSourceName); final Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } final Set<String> metrics = new HashSet<>(); for (DataSegment segment : segments) { if (theInterval.overlaps(segment.getInterval())) { metrics.addAll(segment.getMetrics()); } } return metrics; }
private static DataSegment computeMergedSegment( final String dataSource, final String version, final List<DataSegment> segments ) { final Interval mergedInterval = computeMergedInterval(segments); final Set<String> mergedDimensions = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); final Set<String> mergedMetrics = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (DataSegment segment : segments) { mergedDimensions.addAll(segment.getDimensions()); mergedMetrics.addAll(segment.getMetrics()); } return DataSegment.builder() .dataSource(dataSource) .interval(mergedInterval) .version(version) .binaryVersion(IndexIO.CURRENT_VERSION_ID) .shardSpec(NoneShardSpec.instance()) .dimensions(Lists.newArrayList(mergedDimensions)) .metrics(Lists.newArrayList(mergedMetrics)) .build(); } }
final DataSegment segment = server.getSegment(); dimensions.addAll(segment.getDimensions()); metrics.addAll(segment.getMetrics());
@Test public void testSkipDimensions() throws Exception { skipDimensionsAndMetrics = true; Iterator<DataSegment> segIter = testSegments.iterator(); DataSegment firstSegment = segIter.next(); segmentAnnouncer.announceSegment(firstSegment); List<String> zNodes = cf.getChildren().forPath(testSegmentsPath); for (String zNode : zNodes) { DataSegment announcedSegment = Iterables.getOnlyElement(segmentReader.read(joiner.join(testSegmentsPath, zNode))); Assert.assertEquals(announcedSegment, firstSegment); Assert.assertTrue(announcedSegment.getDimensions().isEmpty()); Assert.assertTrue(announcedSegment.getMetrics().isEmpty()); } segmentAnnouncer.unannounceSegment(firstSegment); Assert.assertTrue(cf.getChildren().forPath(testSegmentsPath).isEmpty()); }
for (Entry<DataSegment, File> entry : segmentFileMap.entrySet()) { final DataSegment segment = entry.getKey(); final List<String> columnNames = new ArrayList<>(segment.getDimensions().size() + segment.getMetrics().size()); columnNames.add(ColumnHolder.TIME_COLUMN_NAME); columnNames.addAll(segment.getDimensions()); columnNames.addAll(segment.getMetrics()); final Map<String, ColumnHolder> columnMap = new HashMap<>(columnNames.size()); final List<AggregatorFactory> aggregatorFactories = new ArrayList<>(segment.getMetrics().size());
public Builder(DataSegment segment) { this.dataSource = segment.getDataSource(); this.interval = segment.getInterval(); this.version = segment.getVersion(); this.loadSpec = segment.getLoadSpec(); this.dimensions = segment.getDimensions(); this.metrics = segment.getMetrics(); this.shardSpec = segment.getShardSpec(); this.binaryVersion = segment.getBinaryVersion(); this.size = segment.getSize(); }
@Test public void testV1SerializationNullMetrics() throws Exception { final DataSegment segment = DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-01/2012-01-02")) .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions()); Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics()); }
null, segments.get(0).getDimensions(), segments.get(0).getMetrics(), NoneShardSpec.instance(), 1,
Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
); Assert.assertEquals(Collections.singletonList("val"), segment.getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segment.getInterval());
segment.getInterval() ); Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics()); EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertEquals(Collections.singletonList("val"), segments.get(0).getMetrics()); Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval());
Assert.assertEquals(segment.getLoadSpec(), deserializedSegment.getLoadSpec()); Assert.assertEquals(segment.getDimensions(), deserializedSegment.getDimensions()); Assert.assertEquals(segment.getMetrics(), deserializedSegment.getMetrics()); Assert.assertEquals(segment.getShardSpec(), deserializedSegment.getShardSpec()); Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
publishedSegments.get(0).getDimensions() ); Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); publishedSegments.get(1).getDimensions() ); Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
publishedSegments.get(0).getDimensions() ); Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); publishedSegments.get(1).getDimensions() ); Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
@VisibleForTesting static List<String> getUniqueMetrics(List<TimelineObjectHolder<String, DataSegment>> timelineSegments) { final BiMap<String, Integer> uniqueMetrics = HashBiMap.create(); // Here, we try to retain the order of metrics as they were specified. Metrics are extracted from the recent // segments to olders. // timelineSegments are sorted in order of interval int index = 0; for (TimelineObjectHolder<String, DataSegment> timelineHolder : Lists.reverse(timelineSegments)) { for (PartitionChunk<DataSegment> chunk : timelineHolder.getObject()) { for (String metric : chunk.getObject().getMetrics()) { if (!uniqueMetrics.containsKey(metric)) { uniqueMetrics.put(metric, index++); } } } } final BiMap<Integer, String> orderedMetrics = uniqueMetrics.inverse(); return IntStream.range(0, orderedMetrics.size()) .mapToObj(orderedMetrics::get) .collect(Collectors.toList()); } }
private static DataSegment computeMergedSegment( final String dataSource, final String version, final List<DataSegment> segments ) { final Interval mergedInterval = computeMergedInterval(segments); final Set<String> mergedDimensions = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); final Set<String> mergedMetrics = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); for (DataSegment segment : segments) { mergedDimensions.addAll(segment.getDimensions()); mergedMetrics.addAll(segment.getMetrics()); } return DataSegment.builder() .dataSource(dataSource) .interval(mergedInterval) .version(version) .binaryVersion(IndexIO.CURRENT_VERSION_ID) .shardSpec(NoneShardSpec.instance()) .dimensions(Lists.newArrayList(mergedDimensions)) .metrics(Lists.newArrayList(mergedMetrics)) .build(); } }
public Builder(DataSegment segment) { this.dataSource = segment.getDataSource(); this.interval = segment.getInterval(); this.version = segment.getVersion(); this.loadSpec = segment.getLoadSpec(); this.dimensions = segment.getDimensions(); this.metrics = segment.getMetrics(); this.shardSpec = segment.getShardSpec(); this.binaryVersion = segment.getBinaryVersion(); this.size = segment.getSize(); }