@Test public void testSerDesr() throws IOException { String segment = "{\"dataSource\":\"datasource2015\",\"interval\":\"2015-06-01T00:00:00.000-04:00/" + "2015-06-02T00:00:00.000-04:00\"" + ",\"version\":\"2016-11-04T19:24:01.732-04:00\",\"loadSpec\":{\"type\":\"hdfs\"," + "\"path\":\"hdfs://cn105-10.l42scl.hortonworks.com:8020/apps/hive/warehouse/druid.db/" + ".hive-staging_hive_2016-11-04_19-23-50_168_1550339856804207572-1/_task_tmp.-ext-10002/_tmp.000000_0/" + "datasource2015/20150601T000000.000-0400_20150602T000000.000-0400/2016-11-04T19_24_01.732-04_00/0/" + "index.zip\"},\"dimensions\":\"dimension1\",\"metrics\":\"bigint\",\"shardSpec\":{\"type\":\"linear\"," + "\"partitionNum\":0},\"binaryVersion\":9,\"size\":1765,\"identifier\":\"datasource2015_2015-06-01" + "T00:00:00.000-04:00_2015-06-02T00:00:00.000-04:00_2016-11-04T19:24:01.732-04:00\"}"; DataSegment dataSegment = objectMapper.readerFor(DataSegment.class).readValue(segment); Assert.assertEquals("datasource2015", dataSegment.getDataSource()); }
dataSegment.getDataSource(), dataSegment.getIdentifier())); } catch (MalformedURLException e) {
.put("dataSource", segment.getDataSource()) .put("created_date", new DateTime().toString()) .put("start", segment.getInterval().getStart().toString())
@Override public String apply(DataSegment input) { return input.getDataSource(); } }
@Override public boolean apply(@Nullable DataSegment segment) { return segment == null || !segment.getDataSource().equalsIgnoreCase(dataSource); } }
@Override public boolean apply(@Nullable DataSegment segment) { return segment == null || !segment.getDataSource().equalsIgnoreCase(dataSource); } }
boolean hasLoadPending(final String dataSource) { return loadManagementPeons .values() .stream() .flatMap((final LoadQueuePeon peon) -> peon.getSegmentsToLoad().stream()) .anyMatch((final DataSegment segment) -> segment.getDataSource().equals(dataSource)); }
SegmentProvider(List<DataSegment> segments) { Preconditions.checkArgument(segments != null && !segments.isEmpty()); final String dataSource = segments.get(0).getDataSource(); Preconditions.checkArgument( segments.stream().allMatch(segment -> segment.getDataSource().equals(dataSource)), "segments should have the same dataSource" ); this.segments = segments; this.dataSource = dataSource; this.interval = JodaUtils.umbrellaInterval( segments.stream().map(DataSegment::getInterval).collect(Collectors.toList()) ); }
public ClientConversionQuery( DataSegment segment ) { this.dataSource = segment.getDataSource(); this.interval = segment.getInterval(); this.segment = segment; }
private double computeDataSourceCost(DataSegment segment) { SegmentsCostCache costCache = segmentsPerDataSource.get(segment.getDataSource()); return (costCache == null) ? 0.0 : costCache.cost(segment); }
public ClientConversionQuery( DataSegment segment ) { this.dataSource = segment.getDataSource(); this.interval = segment.getInterval(); this.segment = segment; }
public Builder addSegment(DataSegment dataSegment) { allSegmentsCostCache.addSegment(dataSegment); segmentsPerDataSource .computeIfAbsent(dataSegment.getDataSource(), d -> SegmentsCostCache.builder()) .addSegment(dataSegment); return this; }
public static VersionConverterTask create(DataSegment segment) { final Interval interval = segment.getInterval(); final String dataSource = segment.getDataSource(); final String id = makeId(dataSource, interval); return new VersionConverterTask(id, id, dataSource, interval, segment); }
public void removeSegment(DataSegment segment) { log.info("Removing Segment[%s]", segment); metadataSegmentManager.removeSegment(segment.getDataSource(), segment.getIdentifier()); }
public void removeSegment(DataSegment segment) { log.info("Removing Segment[%s]", segment); databaseSegmentManager.removeSegment(segment.getDataSource(), segment.getIdentifier()); }
@Override public boolean apply(TaskLock taskLock) { final boolean versionOk = allowOlderVersions ? taskLock.getVersion().compareTo(segment.getVersion()) >= 0 : taskLock.getVersion().equals(segment.getVersion()); return versionOk && taskLock.getDataSource().equals(segment.getDataSource()) && taskLock.getInterval().contains(segment.getInterval()); } }
public Builder removeSegment(DataSegment dataSegment) { allSegmentsCostCache.removeSegment(dataSegment); segmentsPerDataSource.computeIfPresent( dataSegment.getDataSource(), (ds, builder) -> builder.removeSegment(dataSegment).isEmpty() ? null : builder ); return this; }
@Override public void publishSegment(final DataSegment segment) throws IOException { publishSegment( segment.getIdentifier(), segment.getDataSource(), DateTimes.nowUtc().toString(), segment.getInterval().getStart().toString(), segment.getInterval().getEnd().toString(), (segment.getShardSpec() instanceof NoneShardSpec) ? false : true, segment.getVersion(), true, jsonMapper.writeValueAsBytes(segment) ); }
public static SegmentIdentifier fromDataSegment(final DataSegment segment) { return new SegmentIdentifier( segment.getDataSource(), segment.getInterval(), segment.getVersion(), segment.getShardSpec() ); } }
public CountingMap<String> getSegmentAvailability() { final CountingMap<String> retVal = new CountingMap<>(); if (segmentReplicantLookup == null) { return retVal; } for (DataSegment segment : getAvailableDataSegments()) { int available = (segmentReplicantLookup.getTotalReplicants(segment.getIdentifier()) == 0) ? 0 : 1; retVal.add(segment.getDataSource(), 1 - available); } return retVal; }