@Override public GranularitySpec withIntervals(List<Interval> inputIntervals) { return new UniformGranularitySpec(segmentGranularity, queryGranularity, rollup, inputIntervals); } }
@Override public GranularitySpec withIntervals(List<Interval> inputIntervals) { return new ArbitraryGranularitySpec(queryGranularity, rollup, inputIntervals); } }
@Override public Optional<SortedSet<Interval>> bucketIntervals() { if (wrappedSpec == null) { return Optional.absent(); } else { return wrappedSpec.bucketIntervals(); } }
@Test public void testRollupSetting() { List<Interval> intervals = Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, false, intervals); Assert.assertFalse(spec.isRollup()); }
@Test public void testRollupSetting() { List<Interval> intervals = Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, false, intervals); Assert.assertFalse(spec.isRollup()); }
@Test public void testDefaultQueryGranularity() { final GranularitySpec spec = new ArbitraryGranularitySpec( null, Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); Assert.assertNotNull(spec.getQueryGranularity()); }
@Test public void testJson() { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); try { final GranularitySpec rtSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), GranularitySpec.class); Assert.assertEquals("Round-trip", spec.bucketIntervals(), rtSpec.bucketIntervals()); } catch (Exception e) { throw Throwables.propagate(e); } } }
public Optional<Set<Interval>> getSegmentGranularIntervals() { return Optional.fromNullable( (Set<Interval>) schema.getDataSchema() .getGranularitySpec() .bucketIntervals() .orNull() ); }
@Test public void testEquals() { final GranularitySpec spec = new UniformGranularitySpec( Granularities.DAY, null, Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); equalsCheck( spec, new UniformGranularitySpec( Granularities.DAY, null, Lists.newArrayList( Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); rollupGranularity = getConfig().getGranularitySpec().getQueryGranularity(); }
protected File mergeQueryableIndex( final List<QueryableIndex> indexes, final AggregatorFactory[] aggs, final File file, ProgressIndicator progressIndicator ) throws IOException { boolean rollup = config.getSchema().getDataSchema().getGranularitySpec().isRollup(); return HadoopDruidIndexerConfig.INDEX_MERGER_V9 .mergeQueryableIndex(indexes, rollup, aggs, file, config.getIndexSpec(), progressIndicator, null); }
public List<Interval> getInputIntervals() { return schema.getDataSchema() .getGranularitySpec() .inputIntervals(); }
@Override public Optional<Interval> bucketInterval(DateTime dt) { if (wrappedSpec == null) { return Optional.absent(); } else { return wrappedSpec.bucketInterval(dt); } }
@Override public int hashCode() { int result = segmentGranularity.hashCode(); result = 31 * result + queryGranularity.hashCode(); result = 31 * result + rollup.hashCode(); result = 31 * result + (inputIntervals != null ? inputIntervals.hashCode() : 0); result = 31 * result + (wrappedSpec != null ? wrappedSpec.hashCode() : 0); return result; }
public DeterminePartitionsDimSelectionMapperHelper(HadoopDruidIndexerConfig config, String partitionDimension) { this.config = config; this.partitionDimension = partitionDimension; final ImmutableMap.Builder<Long, Integer> timeIndexBuilder = ImmutableMap.builder(); int idx = 0; for (final Interval bucketInterval : config.getGranularitySpec().bucketIntervals().get()) { timeIndexBuilder.put(bucketInterval.getStartMillis(), idx); idx++; } this.intervalIndexes = timeIndexBuilder.build(); }
@Test public void testOverlapViolationSameStartInstant() { List<Interval> intervals = Lists.newArrayList( Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), Intervals.of("2012-01-03T00Z/2012-01-05T00Z") ); boolean thrown = false; try { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, intervals); } catch (IllegalArgumentException e) { thrown = true; } Assert.assertTrue("Exception thrown", thrown); }
public Optional<List<Interval>> getIntervals() { Optional<SortedSet<Interval>> setOptional = schema.getDataSchema().getGranularitySpec().bucketIntervals(); if (setOptional.isPresent()) { return Optional.of((List<Interval>) JodaUtils.condenseIntervals(setOptional.get())); } else { return Optional.absent(); } }
@Override public boolean isReady(TaskActionClient taskActionClient) throws Exception { final Optional<SortedSet<Interval>> intervals = ingestionSchema.getDataSchema() .getGranularitySpec() .bucketIntervals(); if (intervals.isPresent()) { return isReady(taskActionClient, intervals.get()); } else { return true; } }
@Override public boolean isReady(TaskActionClient taskActionClient) { final Optional<SortedSet<Interval>> intervals = ingestionSchema.getDataSchema() .getGranularitySpec() .bucketIntervals(); return !intervals.isPresent() || checkLockAcquired(taskActionClient, intervals.get()); }
@Override public boolean isReady(TaskActionClient taskActionClient) throws Exception { final Optional<SortedSet<Interval>> intervals = ingestionSchema.getDataSchema() .getGranularitySpec() .bucketIntervals(); return !intervals.isPresent() || isReady(taskActionClient, intervals.get()); }