public DataSegment build() { // Check stuff that goes into the id, at least. Preconditions.checkNotNull(dataSource, "dataSource"); Preconditions.checkNotNull(interval, "interval"); Preconditions.checkNotNull(version, "version"); Preconditions.checkNotNull(shardSpec, "shardSpec"); return new DataSegment( dataSource, interval, version, loadSpec, dimensions, metrics, shardSpec, binaryVersion, size ); } }
public DataSegment getSegment() { return new DataSegment( schema.getDataSource(), interval, version, ImmutableMap.of(), Collections.emptyList(), Lists.transform(Arrays.asList(schema.getAggregators()), AggregatorFactory::getName), shardSpec, null, 0 ); }
private DataSegment createSegment(int shift) { return new DataSegment( "dataSource", new Interval(referenceTime.plusHours(shift), referenceTime.plusHours(shift).plusHours(1)), "version", Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), null, 0, 100 ); }
private DataSegment getSegment(String dataSource, Interval interval) { // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, "dummy_version", new ConcurrentHashMap<>(), new ArrayList<>(), new ArrayList<>(), null, 0, 0L ); return segment; }
public static DataSegment createSegment(String dataSource, Interval interval, long size) { return new DataSegment( dataSource, interval, UUID.randomUUID().toString(), new ConcurrentHashMap<>(), new ArrayList<>(), new ArrayList<>(), null, 0, size ); } }
public static DataSegment getSegment(int index, String dataSource, Interval interval) { // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, String.valueOf(index), new ConcurrentHashMap<>(), new ArrayList<>(), new ArrayList<>(), null, 0, index * 100L ); return segment; }
public static DataSegment getSegment(int index, String dataSource, Interval interval) { // Not using EasyMock as it hampers the performance of multithreads. DataSegment segment = new DataSegment( dataSource, interval, String.valueOf(index), new ConcurrentHashMap<>(), new ArrayList<>(), new ArrayList<>(), null, 0, index * 100L ); return segment; }
DataSegment createSegment(DateTime t) { return new DataSegment( "test", new Interval(t, t.plusHours(1)), "v1", null, null, null, null, 0, 0 ); }
private DataSegment makeSegment(String dataSource, String version, Interval interval) { return new DataSegment( dataSource, interval, version, ImmutableMap.of("version", version, "interval", interval, "cacheDir", infoDir), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 123L ); }
private DataSegment makeSegment(String intervalString, long size) { return new DataSegment( "test", Intervals.of(intervalString), "1", ImmutableMap.of(), Collections.singletonList("d"), Collections.singletonList("m"), null, null, size ); } }
private DataSegment createSegment(Interval interval, String version) { return new DataSegment( task.getDataSource(), interval, version, null, ImmutableList.of("dim1", "dim2"), ImmutableList.of("met1", "met2"), NoneShardSpec.instance(), Integer.valueOf(version), 1 ); }
private DataSegment createDataSegment(String dataSource) { return new DataSegment( dataSource, Intervals.of("0/3000"), DateTimes.nowUtc().toString(), new HashMap<>(), new ArrayList<>(), new ArrayList<>(), NoneShardSpec.instance(), 0, 0 ); }
public void dropQueryable(String dataSource, String version, Interval interval) { segmentManager.dropSegment( new DataSegment( dataSource, interval, version, ImmutableMap.of("version", version, "interval", interval), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 123L ) ); }
public DataSegment make(File tmpDir) { final Map<String, Object> loadSpec = persist(tmpDir, Iterables.toArray(rows, InputRow.class)); return new DataSegment( DATA_SOURCE, interval, version, loadSpec, Arrays.asList(DIMENSIONS), Arrays.asList(METRICS), new LinearShardSpec(partitionNum), -1, 0L ); } }
private DataSegment getSegmentWithPath(String path) { return new DataSegment( "dataSource", Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", "path", path ), ImmutableList.of("product"), ImmutableList.of("visited_sum", "unique_hosts"), NoneShardSpec.instance(), 9, 12334 ); } }
public void loadQueryable(String dataSource, String version, Interval interval) { try { segmentManager.loadSegment( new DataSegment( dataSource, interval, version, ImmutableMap.of("version", version, "interval", interval), Arrays.asList("dim1", "dim2", "dim3"), Arrays.asList("metric1", "metric2"), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 123L ) ); } catch (SegmentLoadingException e) { throw new RuntimeException(e); } }
private DataSegment createRandomSegment(Random random, DateTime referenceTime) { int timeShift = random.nextInt((int) TimeUnit.DAYS.toHours(DAYS_IN_MONTH * 12)); return new DataSegment( String.valueOf(random.nextInt(50)), new Interval(referenceTime.plusHours(timeShift), referenceTime.plusHours(timeShift + 1)), "version", Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), null, 0, 100 ); } }
private DataSegment createSegment(Interval interval, String version, int partitionNumber) { return new DataSegment( "test_ds", interval, version, null, null, null, new NumberedShardSpec(partitionNumber, 100), 0, 0 ); } }
@Test public void testSerde() throws IOException { ImmutableSegmentLoadInfo segmentLoadInfo = new ImmutableSegmentLoadInfo( new DataSegment( "test_ds", Intervals.of("2011-04-01/2011-04-02"), "v1", null, null, null, NoneShardSpec.instance(), 0, 0 ), Sets.newHashSet(new DruidServerMetadata("a", "host", null, 10, ServerType.HISTORICAL, "tier", 1)) ); ImmutableSegmentLoadInfo serde = mapper.readValue( mapper.writeValueAsBytes(segmentLoadInfo), ImmutableSegmentLoadInfo.class ); Assert.assertEquals(segmentLoadInfo, serde); }
private void addSegment(final SegmentIdWithShardSpec identifier) { segments.put(identifier.getInterval().getStartMillis(), identifier); try { segmentAnnouncer.announceSegment( new DataSegment( identifier.getDataSource(), identifier.getInterval(), identifier.getVersion(), ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), identifier.getShardSpec(), null, 0 ) ); } catch (IOException e) { log.makeAlert(e, "Failed to announce new segment[%s]", identifier.getDataSource()) .addData("interval", identifier.getInterval()) .emit(); } }