private List<DataSegment> getAvailableDataSegments() { return metadataSegmentManager.getDataSources() .stream() .flatMap(source -> source.getSegments().stream()) .collect(Collectors.toList()); }
@GET @Path("/segments") @Produces(MediaType.APPLICATION_JSON) public Response getDatabaseSegments(@Context final HttpServletRequest req) { final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getDataSources(); final Stream<DataSegment> metadataSegments = druidDataSources .stream() .flatMap(t -> t.getSegments().stream()); Function<DataSegment, Iterable<ResourceAction>> raGenerator = segment -> Collections.singletonList( AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSource())); final Iterable<DataSegment> authorizedSegments = AuthorizationUtils.filterAuthorizedResources(req, metadataSegments::iterator, raGenerator, authorizerMapper); final StreamingOutput stream = outputStream -> { final JsonFactory jsonFactory = jsonMapper.getFactory(); try (final JsonGenerator jsonGenerator = jsonFactory.createGenerator(outputStream)) { jsonGenerator.writeStartArray(); for (DataSegment ds : authorizedSegments) { jsonGenerator.writeObject(ds); jsonGenerator.flush(); } jsonGenerator.writeEndArray(); } }; Response.ResponseBuilder builder = Response.status(Response.Status.OK); return builder.entity(stream).build(); }
final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getDataSources(); final Set<String> dataSourceNamesPreAuth; if (includeDisabled != null) {
public Map<String, Double> getLoadStatus() { Map<String, Double> loadStatus = new HashMap<>(); for (ImmutableDruidDataSource dataSource : metadataSegmentManager.getDataSources()) { final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments()); final int availableSegmentSize = segments.size(); // remove loaded segments for (DruidServer druidServer : serverInventoryView.getInventory()) { final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName()); if (loadedView != null) { // This does not use segments.removeAll(loadedView.getSegments()) for performance reasons. // Please see https://github.com/apache/incubator-druid/pull/5632 and LoadStatusBenchmark for more info. for (DataSegment serverSegment : loadedView.getSegments()) { segments.remove(serverSegment); } } } final int unloadedSegmentSize = segments.size(); loadStatus.put( dataSource.getName(), 100 * ((double) (availableSegmentSize - unloadedSegmentSize) / (double) availableSegmentSize) ); } return loadStatus; }
.andReturn(ImmutableList.of(hotTier, coldTier)).atLeastOnce(); EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); EasyMock.expect(databaseSegmentManager.getDataSources()).andReturn( ImmutableList.of(druidDataSources[0].toImmutableDruidDataSource()) ).atLeastOnce();
@Test public void testOrderedAvailableDataSegments() { DruidDataSource dataSource = new DruidDataSource("test", new HashMap()); DataSegment[] segments = new DataSegment[]{ getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")), getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) }; for (DataSegment segment : segments) { dataSource.addSegment(segment); } EasyMock.expect(databaseSegmentManager.getDataSources()).andReturn( ImmutableList.of(dataSource.toImmutableDruidDataSource()) ).atLeastOnce(); EasyMock.replay(databaseSegmentManager); Set<DataSegment> availableSegments = coordinator.getOrderedAvailableDataSegments(); DataSegment[] expected = new DataSegment[]{ getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")), getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")) }; Assert.assertEquals(expected.length, availableSegments.size()); Assert.assertEquals(expected, availableSegments.toArray()); EasyMock.verify(databaseSegmentManager); }
EasyMock.expect(databaseSegmentManager.getDataSources()).andReturn( ImmutableList.of(druidDataSources[0].toImmutableDruidDataSource()) ).atLeastOnce();
DruidCoordinatorRuntimeParams.newBuilder() .withStartTime(startTime) .withDataSources(metadataSegmentManager.getDataSources()) .withDynamicConfigs(getDynamicConfigs()) .withCompactionConfig(getCompactionConfig())