public static GranularitySpec getGranularitySpec(Configuration configuration, Properties tableProperties) { final String segmentGranularity = tableProperties.getProperty(Constants.DRUID_SEGMENT_GRANULARITY) != null ? tableProperties.getProperty(Constants.DRUID_SEGMENT_GRANULARITY) : HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_DRUID_INDEXING_GRANULARITY); final boolean rollup = tableProperties.getProperty(DruidConstants.DRUID_ROLLUP) != null ? Boolean.parseBoolean(tableProperties.getProperty(Constants.DRUID_SEGMENT_GRANULARITY)) : HiveConf.getBoolVar(configuration, HiveConf.ConfVars.HIVE_DRUID_ROLLUP); return new UniformGranularitySpec(Granularity.fromString(segmentGranularity), Granularity.fromString(tableProperties.getProperty(DruidConstants.DRUID_QUERY_GRANULARITY) == null ? "NONE" : tableProperties.getProperty(DruidConstants.DRUID_QUERY_GRANULARITY)), rollup, null); }
DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(truncatedTime)); final Interval interval = new Interval(truncatedDateTime, segmentGranularity.increment(truncatedDateTime));
@Override public DateTime toDate(String filePath, Formatter formatter) { Integer[] vals = getDateValues(filePath, formatter); GranularityType granularityType = GranularityType.fromPeriod(period); DateTime date = granularityType.getDateTime(vals); if (date != null) { return bucketStart(date); } return null; }
Granularities .DAY .bucketStart(new DateTime((long) input.get(DruidConstants.DEFAULT_TIMESTAMP_COLUMN))) .getMillis()) .build()))
@Override public String toString() { return "{type=period, " + "period=" + getPeriod() + ", timeZone=" + getTimeZone() + ", origin=" + getOrigin() + '}'; }
Granularity create(DateTime origin, DateTimeZone tz) { if (period != null && (origin != null || tz != null)) { return new PeriodGranularity(period, origin, tz); } else { // If All or None granularity, or if origin and tz are both null, return the cached granularity return defaultGranularity; } }
GranularityType(GranularityType granularityType, String period) { this( granularityType.getHiveFormat(), granularityType.getLowerDefaultFormat(), granularityType.getDefaultFormat(), granularityType.dateValuePositions, period ); }
@Override public DateTime decrement(DateTime time) { return new DateTime(decrement(time.getMillis()), getTimeZone()); }
@Override public DateTime bucketStart(DateTime time) { return new DateTime(truncate(time.getMillis()), getTimeZone()); }
@Override public DateTimeZone getTimezone() { return granularity instanceof PeriodGranularity ? ((PeriodGranularity) granularity).getTimeZone() : DateTimeZone.UTC; }
public DateTime toDate(String filePath) { return toDate(filePath, Formatter.DEFAULT); }
@Override public void serializeWithType( JsonGenerator jsonGenerator, SerializerProvider serializerProvider, TypeSerializer typeSerializer ) throws IOException, JsonProcessingException { serialize(jsonGenerator, serializerProvider); } }
public Iterable<Interval> getIterable(final Interval input) { return new IntervalIterable(input); }
@Override public Iterator<Interval> iterator() { return new IntervalIterator(inputInterval); }
DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); final Interval interval = new Interval(truncatedDateTime, segmentGranularity.increment(truncatedDateTime));
public TimeseriesQueryBuilder granularity(String g) { granularity = Granularity.fromString(g); return this; }
public DateTime bucketEnd(DateTime time) { return increment(bucketStart(time)); }
/** * Return a granularity-sized Interval containing a particular DateTime. */ public final Interval bucket(DateTime t) { DateTime start = bucketStart(t); return new Interval(start, increment(start)); }
private IntervalIterator(Interval inputInterval) { this.inputInterval = inputInterval; currStart = bucketStart(inputInterval.getStart()); currEnd = increment(currStart); }