@GET @Path("/datasources/{dataSourceName}") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response getDatabaseSegmentDataSource(@PathParam("dataSourceName") final String dataSourceName) { ImmutableDruidDataSource dataSource = metadataSegmentManager.getDataSource(dataSourceName); if (dataSource == null) { return Response.status(Response.Status.NOT_FOUND).build(); } return Response.status(Response.Status.OK).entity(dataSource).build(); }
private List<DataSegment> getAvailableDataSegments() { return metadataSegmentManager.getDataSources() .stream() .flatMap(source -> source.getSegments().stream()) .collect(Collectors.toList()); }
@VisibleForTesting Interval findIntervalForKillTask(String dataSource, int limit) { List<Interval> unusedSegmentIntervals = segmentManager.getUnusedSegmentIntervals( dataSource, new Interval(DateTimes.EPOCH, DateTimes.nowUtc().minus(retainDuration)), limit ); if (unusedSegmentIntervals != null && unusedSegmentIntervals.size() > 0) { return JodaUtils.umbrellaInterval(unusedSegmentIntervals); } else { return null; } } }
final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getDataSources(); final Set<String> dataSourceNamesPreAuth; if (includeDisabled != null) { dataSourceNamesPreAuth = new TreeSet<>(metadataSegmentManager.getAllDataSourceNames()); } else { dataSourceNamesPreAuth = Sets.newTreeSet(
EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); EasyMock.expect(databaseSegmentManager.getDataSources()).andReturn( ImmutableList.of(druidDataSources[0].toImmutableDruidDataSource()) ).atLeastOnce();
@DELETE @Path("/{dataSourceName}/segments/{segmentId}") @ResourceFilters(DatasourceResourceFilter.class) public Response deleteDatasourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { if (databaseSegmentManager.removeSegment(dataSourceName, segmentId)) { return Response.ok().build(); } return Response.noContent().build(); }
metadataSegmentManager.isStarted(), serverInventoryView.isStarted() ); DruidCoordinatorRuntimeParams.newBuilder() .withStartTime(startTime) .withDataSources(metadataSegmentManager.getInventory()) .withDynamicConfigs(getDynamicConfigs()) .withCompactionConfig(getCompactionConfig())
final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getInventory(); final Set<String> dataSourceNamesPreAuth; if (includeDisabled != null) { dataSourceNamesPreAuth = Sets.newTreeSet(metadataSegmentManager.getAllDatasourceNames()); } else { dataSourceNamesPreAuth = Sets.newTreeSet(
@POST @Path("/{dataSourceName}/segments/{segmentId}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response enableDatasourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { if (!databaseSegmentManager.enableSegment(segmentId)) { return Response.noContent().build(); } return Response.ok().build(); }
@POST @Path("/{dataSourceName}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response enableDataSource( @PathParam("dataSourceName") final String dataSourceName ) { if (!databaseSegmentManager.enableDataSource(dataSourceName)) { return Response.noContent().build(); } return Response.ok().build(); }
whitelist = segmentManager.getAllDataSourceNames();
@POST @Path("/{dataSourceName}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response enableDataSource( @PathParam("dataSourceName") final String dataSourceName ) { if (!databaseSegmentManager.enableDatasource(dataSourceName)) { return Response.noContent().build(); } return Response.ok().build(); }
whitelist = segmentManager.getAllDatasourceNames();
metadataSegmentManager.isStarted(), serverInventoryView.isStarted() ); DruidCoordinatorRuntimeParams.newBuilder() .withStartTime(startTime) .withDataSources(metadataSegmentManager.getDataSources()) .withDynamicConfigs(getDynamicConfigs()) .withCompactionConfig(getCompactionConfig())
public void removeSegment(DataSegment segment) { log.info("Removing Segment[%s]", segment.getId()); metadataSegmentManager.removeSegment(segment.getId()); }
@POST @Path("/{dataSourceName}/segments/{segmentId}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response enableDatasourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { if (!databaseSegmentManager.enableSegment(segmentId)) { return Response.noContent().build(); } return Response.ok().build(); }
druidDataSources[0].addSegment(dataSegment); EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); EasyMock.expect(databaseSegmentManager.getDataSources()).andReturn( ImmutableList.of(druidDataSources[0].toImmutableDruidDataSource()) ).atLeastOnce();
@GET @Path("/datasources/{dataSourceName}/segments") @Produces(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response getDatabaseSegmentDataSourceSegments( @PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full ) { ImmutableDruidDataSource dataSource = metadataSegmentManager.getDataSource(dataSourceName); if (dataSource == null) { return Response.status(Response.Status.NOT_FOUND).build(); } Response.ResponseBuilder builder = Response.status(Response.Status.OK); if (full != null) { return builder.entity(dataSource.getSegments()).build(); } return builder.entity(Collections2.transform(dataSource.getSegments(), DataSegment::getId)).build(); }
@Before public void setup() throws IOException { task = NoopTask.create(); actionTestKit.getTaskLockbox().add(task); expectedUnusedSegments = new HashSet<>(); expectedUnusedSegments.add(createSegment(Intervals.of("2017-10-05/2017-10-06"), "1")); expectedUnusedSegments.add(createSegment(Intervals.of("2017-10-06/2017-10-07"), "1")); expectedUnusedSegments.add(createSegment(Intervals.of("2017-10-07/2017-10-08"), "1")); actionTestKit.getMetadataStorageCoordinator() .announceHistoricalSegments(expectedUnusedSegments); expectedUnusedSegments.forEach(s -> actionTestKit.getTaskLockbox().unlock(task, s.getInterval())); expectedUsedSegments = new HashSet<>(); expectedUsedSegments.add(createSegment(Intervals.of("2017-10-05/2017-10-06"), "2")); expectedUsedSegments.add(createSegment(Intervals.of("2017-10-06/2017-10-07"), "2")); expectedUsedSegments.add(createSegment(Intervals.of("2017-10-07/2017-10-08"), "2")); actionTestKit.getMetadataStorageCoordinator() .announceHistoricalSegments(expectedUsedSegments); expectedUsedSegments.forEach(s -> actionTestKit.getTaskLockbox().unlock(task, s.getInterval())); expectedUnusedSegments.forEach(s -> actionTestKit.getMetadataSegmentManager().removeSegment(s.getId())); }
@GET @Path("/segments") @Produces(MediaType.APPLICATION_JSON) public Response getDatabaseSegments(@Context final HttpServletRequest req) { final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getDataSources(); final Stream<DataSegment> metadataSegments = druidDataSources .stream() .flatMap(t -> t.getSegments().stream()); Function<DataSegment, Iterable<ResourceAction>> raGenerator = segment -> Collections.singletonList( AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSource())); final Iterable<DataSegment> authorizedSegments = AuthorizationUtils.filterAuthorizedResources(req, metadataSegments::iterator, raGenerator, authorizerMapper); final StreamingOutput stream = outputStream -> { final JsonFactory jsonFactory = jsonMapper.getFactory(); try (final JsonGenerator jsonGenerator = jsonFactory.createGenerator(outputStream)) { jsonGenerator.writeStartArray(); for (DataSegment ds : authorizedSegments) { jsonGenerator.writeObject(ds); jsonGenerator.flush(); } jsonGenerator.writeEndArray(); } }; Response.ResponseBuilder builder = Response.status(Response.Status.OK); return builder.entity(stream).build(); }