@Override public void publishSegment(final DataSegment segment) throws IOException { publishSegment( segment.getId().toString(), segment.getDataSource(), DateTimes.nowUtc().toString(), segment.getInterval().getStart().toString(), segment.getInterval().getEnd().toString(), (segment.getShardSpec() instanceof NoneShardSpec) ? false : true, segment.getVersion(), true, jsonMapper.writeValueAsBytes(segment) ); }
public Builder(DataSegment segment) { this.dataSource = segment.getDataSource(); this.interval = segment.getInterval(); this.version = segment.getVersion(); this.loadSpec = segment.getLoadSpec(); this.dimensions = segment.getDimensions(); this.metrics = segment.getMetrics(); this.shardSpec = segment.getShardSpec(); this.binaryVersion = segment.getBinaryVersion(); this.size = segment.getSize(); }
@Override public DataSegment apply(DataSegment input) { DataSegment rv = input; if (config.isSkipDimensionsAndMetrics()) { rv = rv.withDimensions(null).withMetrics(null); } if (config.isSkipLoadSpec()) { rv = rv.withLoadSpec(null); } return rv; } };
@Override public String toString() { return "DataSegment{" + "size=" + size + ", shardSpec=" + shardSpec + ", metrics=" + metrics + ", dimensions=" + dimensions + ", version='" + getVersion() + '\'' + ", loadSpec=" + loadSpec + ", interval=" + getInterval() + ", dataSource='" + getDataSource() + '\'' + ", binaryVersion='" + binaryVersion + '\'' + '}'; }
public static SegmentIdWithShardSpec fromDataSegment(final DataSegment segment) { return new SegmentIdWithShardSpec( segment.getDataSource(), segment.getInterval(), segment.getVersion(), segment.getShardSpec() ); } }
@Override public String apply(DataSegment x) { return StringUtils.format( "%s_%s_%s_%s", x.getInterval().getStart(), x.getInterval().getEnd(), x.getVersion(), x.getShardSpec().getPartitionNum() ); } }
final File outDir = new File(baseStorageDir, this.getStorageDir(segment, useUniquePath)); log.info("Copying segment[%s] to local filesystem at location[%s]", segment.getId(), outDir.toString()); segment.withLoadSpec(makeLoadSpec(outDir.toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), outDir ); log.info("Creating intermediate directory[%s] for segment[%s]", tmpOutDir.toString(), segment.getId()); FileUtils.forceMkdir(tmpOutDir); segment.withLoadSpec(makeLoadSpec(new File(outDir, INDEX_FILENAME).toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), tmpDescriptorFile );
segment.getId(), fullyQualifiedStorageDirectory.get(), storageDir "%s/%s/%s/%s_index.zip", fullyQualifiedStorageDirectory.get(), segment.getDataSource(), UUIDUtils.generateUuid(), segment.getShardSpec().getPartitionNum() )); FileSystem fs = tmpIndexFile.getFileSystem(hadoopConfig); fullyQualifiedStorageDirectory.get(), storageDir, segment.getShardSpec().getPartitionNum(), uniquePrefix )); fullyQualifiedStorageDirectory.get(), storageDir, segment.getShardSpec().getPartitionNum(), uniquePrefix )); dataSegment = segment.withLoadSpec(makeLoadSpec(outIndexFile.toUri())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(inDir)); StringUtils.format("%s_descriptor.json", dataSegment.getShardSpec().getPartitionNum())
public DruidDataSource addSegment(DataSegment dataSegment) { idToSegmentMap.put(dataSegment.getId(), dataSegment); return this; }
@Test public void testUsedOutOfBoundsHigh() throws IOException { coordinator.announceHistoricalSegments(SEGMENTS); Assert.assertTrue( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), new Interval(defaultSegment.getInterval().getEnd(), defaultSegment.getInterval().getEnd().plusDays(10)) ).isEmpty() ); }
public DataSegment uploadDataSegment( DataSegment segment, final int binaryVersion, final long size, final File compressedSegmentData, final File descriptorFile, final Map<String, String> azurePaths ) throws StorageException, IOException, URISyntaxException { azureStorage.uploadBlob(compressedSegmentData, config.getContainer(), azurePaths.get("index")); azureStorage.uploadBlob(descriptorFile, config.getContainer(), azurePaths.get("descriptor")); final DataSegment outSegment = segment .withSize(size) .withLoadSpec(this.makeLoadSpec(new URI(azurePaths.get("index")))) .withBinaryVersion(binaryVersion); log.info("Deleting file [%s]", compressedSegmentData); compressedSegmentData.delete(); log.info("Deleting file [%s]", descriptorFile); descriptorFile.delete(); return outSegment; }
private void loadInLocation(DataSegment segment, File storageDir) throws SegmentLoadingException { // LoadSpec isn't materialized until here so that any system can interpret Segment without having to have all the // LoadSpec dependencies. final LoadSpec loadSpec = jsonMapper.convertValue(segment.getLoadSpec(), LoadSpec.class); final LoadSpec.LoadSpecResult result = loadSpec.loadSegment(storageDir); if (result.getSize() != segment.getSize()) { log.warn( "Segment [%s] is different than expected size. Expected [%d] found [%d]", segment.getId(), segment.getSize(), result.getSize() ); } }
Map<String, Object> loadSpec = segment.getLoadSpec(); String s3Bucket = MapUtils.getString(loadSpec, "bucket"); String s3Path = MapUtils.getString(loadSpec, "key"); safeMove(s3Bucket, s3DescriptorPath, targetS3Bucket, targetS3DescriptorPath); return segment.withLoadSpec( ImmutableMap.<String, Object>builder() .putAll( throw new SegmentLoadingException(e, "Unable to move segment[%s]: [%s]", segment.getId(), e);
private static Path getPath(DataSegment segment) { return new Path(String.valueOf(segment.getLoadSpec().get(PATH_KEY))); }
private Pair<Map<Interval, String>, Map<Interval, List<DataSegment>>> getVersionAndBaseSegments( List<DataSegment> snapshot ) { Map<Interval, String> versions = new HashMap<>(); Map<Interval, List<DataSegment>> segments = new HashMap<>(); for (DataSegment segment : snapshot) { Interval interval = segment.getInterval(); versions.put(interval, segment.getVersion()); segments.putIfAbsent(interval, new ArrayList<>()); segments.get(interval).add(segment); } return new Pair<>(versions, segments); }
@Test public void testIdentifierWithZeroPartition() { final DataSegment segment = DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-01/2012-01-02")) .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(getShardSpec(0)) .build(); Assert.assertEquals( "foo_2012-01-01T00:00:00.000Z_2012-01-02T00:00:00.000Z_2012-01-01T11:22:33.444Z", segment.getId().toString() ); }
@Override public boolean appliesTo(DataSegment segment, DateTime referenceTimestamp) { return appliesTo(segment.getInterval(), referenceTimestamp); }
@Override public String apply(DataSegment input) { return input.getDataSource(); } }
public DataSegment withVersion(String version) { return builder(this).version(version).build(); }
private ServerSelector makeMockSingleDimensionSelector( DruidServer server, String dimension, String start, String end, int partitionNum ) { DataSegment segment = EasyMock.createNiceMock(DataSegment.class); EasyMock.expect(segment.getId()).andReturn(SegmentId.dummy(DATA_SOURCE)).anyTimes(); EasyMock.expect(segment.getShardSpec()).andReturn(new SingleDimensionShardSpec(dimension, start, end, partitionNum)) .anyTimes(); EasyMock.replay(segment); ServerSelector selector = new ServerSelector( segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy()) ); selector.addServerAndUpdateSegment(new QueryableDruidServer(server, null), segment); return selector; }