@Override public void run() { getAndProcessTimelineAggregationCandidates(); } },
for (int aggregationLevel = 0; aggregationLevel < config.getMaxAggregationLevel(); aggregationLevel++) { final long startingAggregatesCreated = aggregatesCreated.get(); final Map<String, Long> initialCounters = captureAggregatorCounters(); final int chunkCountIndex = aggregationLevel >= chunkCountsToAggregate.length ? chunkCountsToAggregate.length - 1 : aggregationLevel; final int chunksToAggregate = Integer.parseInt(chunkCountsToAggregate[chunkCountIndex]); streamingAggregateLevel(aggregationLevel, chunksToAggregate); final Map<String, Long> counterDeltas = subtractFromAggregatorCounters(initialCounters); final long netAggregatesCreated = aggregatesCreated.get() - startingAggregatesCreated; if (netAggregatesCreated == 0) {
aggregatesCreated.addAndGet(aggregateTimelineCandidates(sourceTimelineCandidates, aggregationLevel, chunksToAggregate)); performWrites();
private int aggregateTimelineCandidates(final List<TimelineChunk> timelineChunkCandidates, final int aggregationLevel, final int chunksToAggregate) { final TimelineChunk firstCandidate = timelineChunkCandidates.get(0); final int sourceId = firstCandidate.getSourceId(); final int metricId = firstCandidate.getMetricId(); log.debug("For sourceId {}, metricId {}, looking to aggregate {} candidates in {} chunks", new Object[]{sourceId, metricId, timelineChunkCandidates.size(), chunksToAggregate}); int aggregatesCreated = 0; int chunkIndex = 0; while (timelineChunkCandidates.size() >= chunkIndex + chunksToAggregate) { final List<TimelineChunk> chunkCandidates = timelineChunkCandidates.subList(chunkIndex, chunkIndex + chunksToAggregate); chunkIndex += chunksToAggregate; timelineChunksCombined.addAndGet(chunksToAggregate); try { aggregateHostSampleChunks(chunkCandidates, aggregationLevel); } catch (IOException e) { log.error(String.format("IOException aggregating {} chunks, sourceId %s, metricId %s, looking to aggregate %s candidates in %s chunks", new Object[]{firstCandidate.getSourceId(), firstCandidate.getMetricId(), timelineChunkCandidates.size(), chunksToAggregate}), e); } aggregatesCreated++; } return aggregatesCreated; }
private void performWrites() { final InternalCallContext context = createCallContext(); // This is the atomic operation: bulk insert the new aggregated TimelineChunk objects, and delete // or invalidate the ones that were aggregated. This should be very fast. final long startWriteTime = System.currentTimeMillis(); aggregatorSqlDao.begin(); timelineDao.bulkInsertTimelineChunks(chunksToWrite, context); if (config.getDeleteAggregatedChunks()) { aggregatorSqlDao.deleteTimelineChunks(chunkIdsToInvalidateOrDelete, context); } else { aggregatorSqlDao.makeTimelineChunksInvalid(chunkIdsToInvalidateOrDelete, context); } aggregatorSqlDao.commit(); msWritingDb.addAndGet(System.currentTimeMillis() - startWriteTime); timelineChunksWritten.addAndGet(chunksToWrite.size()); timelineChunksInvalidatedOrDeleted.addAndGet(chunkIdsToInvalidateOrDelete.size()); chunksToWrite.clear(); chunkIdsToInvalidateOrDelete.clear(); final long sleepMs = config.getAggregationSleepBetweenBatches().getMillis(); if (sleepMs > 0) { final long timeBeforeSleep = System.currentTimeMillis(); try { Thread.sleep(sleepMs); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } msSpentSleeping.addAndGet(System.currentTimeMillis() - timeBeforeSleep); } timelineChunkBatchesProcessed.incrementAndGet(); }
@BeforeMethod(groups = "mysql") public void setUp() throws Exception { timelineDao = new DefaultTimelineDao(getDBI()); final Properties properties = System.getProperties(); properties.put("killbill.usage.timelines.chunksToAggregate", "2,2"); final MeterConfig config = new ConfigurationObjectFactory(properties).build(MeterConfig.class); aggregator = new TimelineAggregator(getDBI(), timelineDao, timelineCoder, sampleCoder, config, internalCallContextFactory); }
@Override public void run() { getAndProcessTimelineAggregationCandidates(); } });
checkSamplesForATimeline(124, 66, 2); aggregator.getAndProcessTimelineAggregationCandidates();