DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); final Interval interval = new Interval(truncatedDateTime, segmentGranularity.increment(truncatedDateTime));
Granularities .DAY .bucketStart(new DateTime((long) input.get(DruidConstants.DEFAULT_TIMESTAMP_COLUMN))) .getMillis()) .build()))
DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(truncatedTime)); final Interval interval = new Interval(truncatedDateTime, segmentGranularity.increment(truncatedDateTime));
@Override public String apply(long value) { final long truncated = granularity.bucketStart(DateTimes.utc(value)).getMillis(); return formatter == null ? String.valueOf(truncated) : formatter.print(truncated); }
public DateTime bucketEnd(DateTime time) { return increment(bucketStart(time)); }
@Override public int compare(Result<T> r1, Result<T> r2) { return Longs.compare( gran.bucketStart(r1.getTimestamp()).getMillis(), gran.bucketStart(r2.getTimestamp()).getMillis() ); }
@Override public long apply(Row row) { return query.getGranularity().bucketStart(row.getTimestamp()).getMillis(); } };
@Override protected void innerMap( InputRow inputRow, Context context, boolean reportParseExceptions ) throws IOException, InterruptedException { final List<Object> groupKey = Rows.toGroupKey( rollupGranularity.bucketStart(inputRow.getTimestamp()).getMillis(), inputRow ); context.write( new BytesWritable(HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(groupKey)), NullWritable.get() ); } }
/** * Return a granularity-sized Interval containing a particular DateTime. */ public final Interval bucket(DateTime t) { DateTime start = bucketStart(t); return new Interval(start, increment(start)); }
private IntervalIterator(Interval inputInterval) { this.inputInterval = inputInterval; currStart = bucketStart(inputInterval.getStart()); currEnd = increment(currStart); }
private Comparator<Row> getTimeComparator(boolean granular) { if (Granularities.ALL.equals(getGranularity())) { return null; } else if (granular) { return (lhs, rhs) -> Longs.compare( getGranularity().bucketStart(lhs.getTimestamp()).getMillis(), getGranularity().bucketStart(rhs.getTimestamp()).getMillis() ); } else { return NON_GRANULAR_TIME_COMP; } }
private DateTime adjustTimestamp(final Row row) { if (query.getGranularity() instanceof AllGranularity) { return row.getTimestamp(); } else { return query.getGranularity().bucketStart(row.getTimestamp()); } } }
/** * Get the proper bucket for some input row. * * @param inputRow an InputRow * * @return the Bucket that this row belongs to */ public Optional<Bucket> getBucket(InputRow inputRow) { final Optional<Interval> timeBucket = schema.getDataSchema().getGranularitySpec().bucketInterval( DateTimes.utc(inputRow.getTimestampFromEpoch()) ); if (!timeBucket.isPresent()) { return Optional.absent(); } final DateTime bucketStart = timeBucket.get().getStart(); final ShardSpec actualSpec = shardSpecLookups.get(bucketStart.getMillis()) .getShardSpec( rollupGran.bucketStart(inputRow.getTimestamp()).getMillis(), inputRow ); final HadoopyShardSpec hadoopyShardSpec = hadoopShardSpecLookup.get(bucketStart.getMillis()).get(actualSpec); return Optional.of( new Bucket( hadoopyShardSpec.getShardNum(), bucketStart, actualSpec.getPartitionNum() ) ); }
@Override public ScheduledExecutors.Signal doCall() { if (stopped) { log.info("Stopping flusher thread"); return ScheduledExecutors.Signal.STOP; } long minTimestamp = segmentGranularity.bucketStart( getRejectionPolicy().getCurrMaxTime().minus(windowMillis) ).getMillis(); List<Map.Entry<Long, Sink>> sinksToPush = Lists.newArrayList(); for (Map.Entry<Long, Sink> entry : getSinks().entrySet()) { final Long intervalStart = entry.getKey(); if (intervalStart < minTimestamp) { log.info("Adding entry[%s] to flush.", entry); sinksToPush.add(entry); } } for (final Map.Entry<Long, Sink> entry : sinksToPush) { flushAfterDuration(entry.getKey(), entry.getValue()); } if (stopped) { log.info("Stopping flusher thread"); return ScheduledExecutors.Signal.STOP; } else { return ScheduledExecutors.Signal.REPEAT; } } };
DateTime minTimestampAsDate = segmentGranularity.bucketStart( DateTimes.utc(Math.max(windowMillis, rejectionPolicy.getCurrMaxTime().getMillis()) - windowMillis) );
rollupGranularity.bucketStart(inputRow.getTimestamp()).getMillis(), inputRow );
private SegmentIdentifier getSegmentIdentifier(long timestamp) { if (!rejectionPolicy.accept(timestamp)) { return null; } final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); final long truncatedTime = truncatedDateTime.getMillis(); SegmentIdentifier retVal = segments.get(truncatedTime); if (retVal == null) { final Interval interval = new Interval( truncatedDateTime, segmentGranularity.increment(truncatedDateTime) ); retVal = new SegmentIdentifier( schema.getDataSource(), interval, versioningPolicy.getVersion(interval), config.getShardSpec() ); addSegment(retVal); } return retVal; }
private Sink getSink(long timestamp) { if (!rejectionPolicy.accept(timestamp)) { return null; } final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); final long truncatedTime = truncatedDateTime.getMillis(); Sink retVal = sinks.get(truncatedTime); if (retVal == null) { final Interval sinkInterval = new Interval( truncatedDateTime, segmentGranularity.increment(truncatedDateTime) ); retVal = new Sink( sinkInterval, schema, config.getShardSpec(), versioningPolicy.getVersion(sinkInterval), config.getMaxRowsInMemory(), config.isReportParseExceptions() ); addSink(retVal); } return retVal; }
@Override public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> arg1, Result<TimeseriesResultValue> arg2) { if (arg1 == null) { return arg2; } if (arg2 == null) { return arg1; } TimeseriesResultValue arg1Val = arg1.getValue(); TimeseriesResultValue arg2Val = arg2.getValue(); Map<String, Object> retVal = new LinkedHashMap<String, Object>(); for (AggregatorFactory factory : aggregations) { final String metricName = factory.getName(); retVal.put(metricName, factory.combine(arg1Val.getMetric(metricName), arg2Val.getMetric(metricName))); } return (gran instanceof AllGranularity) ? new Result<TimeseriesResultValue>( arg1.getTimestamp(), new TimeseriesResultValue(retVal) ) : new Result<TimeseriesResultValue>( gran.bucketStart(arg1.getTimestamp()), new TimeseriesResultValue(retVal) ); }
: gran.bucketStart(arg1.getTimestamp());