final byte[] allCompressedTime = timelineCoder.compressDateTimes(dateTimes); final List<DateTime> restoredTimes = timelineCoder.decompressDateTimes(allCompressedTime); Assert.assertEquals(restoredTimes.size(), dateTimes.size()); for (int i = 0; i < count; i++) { final int fragIndex = fragCounter * fragmentLength; final List<DateTime> fragment = dateTimes.subList(fragIndex, Math.min(count, fragIndex + fragmentLength)); fragments.add(timelineCoder.compressDateTimes(fragment)); final byte[] combined = timelineCoder.combineTimelines(fragments, null); final List<DateTime> restoredDateTimes = timelineCoder.decompressDateTimes(combined);
@Test(groups = "fast") public void testBasicEncodeDecode() throws Exception { final DateTime firstTime = DateTimeUtils.dateTimeFromUnixSeconds(1000000); final List<DateTime> unencodedTimes = makeSomeTimes(firstTime); final byte[] compressedTimes = timelineCoder.compressDateTimes(unencodedTimes); //System.out.printf("Compressed times: %s\n", new String(Hex.encodeHex(compressedTimes))); final List<DateTime> decompressedTimes = timelineCoder.decompressDateTimes(compressedTimes); Assert.assertEquals(decompressedTimes.size(), unencodedTimes.size()); for (int i = 0; i < unencodedTimes.size(); i++) { Assert.assertEquals(decompressedTimes.get(i), unencodedTimes.get(i)); } }
@Test(groups = "fast") public void testCombiningShortFragments() throws Exception { final byte[] fragment0 = new byte[]{(byte) -1, (byte) 0, (byte) 15, (byte) 66, (byte) 84, (byte) 20}; final byte[] fragment1 = new byte[]{(byte) -1, (byte) 0, (byte) 15, (byte) 66, (byte) -122, (byte) 30}; final byte[] fragment2 = new byte[]{(byte) -1, (byte) 0, (byte) 15, (byte) 66, (byte) -62, (byte) 30}; final byte[] fragment3 = new byte[]{(byte) -1, (byte) 0, (byte) 15, (byte) 66, (byte) -2, (byte) 30}; final byte[][] fragmentArray = new byte[][]{fragment0, fragment1, fragment2, fragment3}; final byte[] combined = timelineCoder.combineTimelines(Arrays.asList(fragmentArray), null); final List<DateTime> restoredTimes = timelineCoder.decompressDateTimes(combined); final List<List<DateTime>> fragmentIntTimes = new ArrayList<List<DateTime>>(); final List<DateTime> allFragmentTimes = new ArrayList<DateTime>(); int totalLength = 0; for (final byte[] aFragmentArray : fragmentArray) { final List<DateTime> fragmentTimes = timelineCoder.decompressDateTimes(aFragmentArray); fragmentIntTimes.add(fragmentTimes); totalLength += fragmentTimes.size(); for (final DateTime time : fragmentTimes) { allFragmentTimes.add(time); } } Assert.assertEquals(restoredTimes.size(), totalLength); for (int i = 0; i < totalLength; i++) { Assert.assertEquals(restoredTimes.get(i), allFragmentTimes.get(i)); } }
/** * This method queues a map of TimelineChunks extracted from the TimelineChunkAccumulators * to be written to the db. When memory chunks are requested, any queued chunk will be included * in the list. */ public synchronized void extractAndQueueTimelineChunks() { if (times.size() > 0) { final Map<Integer, TimelineChunk> chunkMap = new HashMap<Integer, TimelineChunk>(); final byte[] timeBytes = timelineCoder.compressDateTimes(times); for (final Map.Entry<Integer, TimelineChunkAccumulator> entry : timelines.entrySet()) { final int metricId = entry.getKey(); final TimelineChunkAccumulator accumulator = entry.getValue(); final TimelineChunk chunk = accumulator.extractTimelineChunkAndReset(startTime, endTime, timeBytes); chunkMap.put(metricId, chunk); } times.clear(); sampleCount = 0; final long counter = pendingChunkMapIdCounter++; final PendingChunkMap newChunkMap = new PendingChunkMap(this, counter, chunkMap); pendingChunkMaps.add(newChunkMap); backgroundWriter.addPendingChunkMap(newChunkMap); } }
unencodedTimes2.add(DateTimeUtils.dateTimeFromUnixSeconds(firstTime + sampleCount * 100 + i * 100)); final byte[] compressedTimes1 = timelineCoder.compressDateTimes(unencodedTimes1); final byte[] compressedTimes2 = timelineCoder.compressDateTimes(unencodedTimes2); Assert.assertEquals(compressedTimes1.length, 8); Assert.assertEquals(compressedTimes1[0] & 0xff, TimelineOpcode.FULL_TIME.getOpcodeIndex()); timesList.add(compressedTimes1); timesList.add(compressedTimes2); final byte[] combinedTimes = timelineCoder.combineTimelines(timesList, null); Assert.assertEquals(combinedTimes.length, 8); Assert.assertEquals(combinedTimes[0] & 0xff, TimelineOpcode.FULL_TIME.getOpcodeIndex()); Assert.assertEquals(timelineCoder.countTimeBytesSamples(combinedTimes), 20);
unencodedTimes2.add(DateTimeUtils.dateTimeFromUnixSeconds(firstTime + sampleCount * 100 + i * 100)); final byte[] compressedTimes1 = timelineCoder.compressDateTimes(unencodedTimes1); final byte[] compressedTimes2 = timelineCoder.compressDateTimes(unencodedTimes2); Assert.assertEquals(compressedTimes1.length, 8); Assert.assertEquals(compressedTimes1[0] & 0xff, TimelineOpcode.FULL_TIME.getOpcodeIndex()); timesList.add(compressedTimes1); timesList.add(compressedTimes2); final byte[] combinedTimes = timelineCoder.combineTimelines(timesList, null); Assert.assertEquals(combinedTimes.length, 9); Assert.assertEquals(combinedTimes[0] & 0xff, TimelineOpcode.FULL_TIME.getOpcodeIndex());
private void checkCombinedTimelines(final String... timelines) throws Exception { final List<byte[]> timeParts = new ArrayList<byte[]>(); for (final String timeline : timelines) { timeParts.add(Hex.decodeHex(timeline.toCharArray())); } int sampleCount = 0; int byteCount = 0; for (final byte[] timePart : timeParts) { byteCount += timePart.length; sampleCount += timelineCoder.countTimeBytesSamples(timePart); } final byte[] concatedTimes = new byte[byteCount]; int offset = 0; for (final byte[] timePart : timeParts) { final int length = timePart.length; System.arraycopy(timePart, 0, concatedTimes, offset, length); offset += length; } final byte[] newCombined = timelineCoder.combineTimelines(timeParts, null); final int newCombinedLength = timelineCoder.countTimeBytesSamples(newCombined); final DefaultTimelineCursor concatedCursor = new DefaultTimelineCursor(concatedTimes, sampleCount); final DefaultTimelineCursor combinedCursor = new DefaultTimelineCursor(newCombined, sampleCount); for (int i = 0; i < sampleCount; i++) { final DateTime concatedTime = concatedCursor.getNextTime(); final DateTime combinedTime = combinedCursor.getNextTime(); Assert.assertEquals(combinedTime, concatedTime); } Assert.assertEquals(newCombinedLength, sampleCount); } }
@Test(groups = "fast") public void testCombiningTimesError() throws Exception { final byte[] times1 = Hex.decodeHex("ff10000001fe0310ff1000011bfe0310".toCharArray()); final byte[] times2 = Hex.decodeHex("ff10000160".toCharArray()); final List<byte[]> timesList = new ArrayList<byte[]>(); timesList.add(times1); timesList.add(times2); final byte[] combinedTimes = timelineCoder.combineTimelines(timesList, null); final String hexCombinedTimes = new String(Hex.encodeHex(combinedTimes)); //System.out.printf("Combined times: %s\n", hexCombinedTimes); Assert.assertEquals(hexCombinedTimes, "ff10000001fe0310eafe031015"); }
public synchronized Collection<TimelineChunk> getInMemoryTimelineChunks(final List<Integer> metricIds) throws IOException { final List<TimelineChunk> timelineChunks = new ArrayList<TimelineChunk>(); // Get all the older chunks from the staging area of the BackgroundDBChunkWriter for (final PendingChunkMap pendingChunkMap : pendingChunkMaps) { for (final Integer metricId : metricIds) { final TimelineChunk timelineChunkForMetricId = pendingChunkMap.getChunkMap().get(metricId); if (timelineChunkForMetricId != null) { timelineChunks.add(timelineChunkForMetricId); } } } // Get the data in this accumulator, not yet in the staging area // This is very similar to extractAndQueueTimelineChunks() above, but without changing the global state final byte[] timeBytes = timelineCoder.compressDateTimes(times); for (final Integer metricId : metricIds) { final TimelineChunkAccumulator chunkAccumulator = timelines.get(metricId); if (chunkAccumulator != null) { // Extract the timeline for this chunk by copying it and reading encoded bytes final TimelineChunkAccumulator chunkAccumulatorCopy = chunkAccumulator.deepCopy(); final TimelineChunk timelineChunk = chunkAccumulatorCopy.extractTimelineChunkAndReset(startTime, endTime, timeBytes); timelineChunks.add(timelineChunk); } } return timelineChunks; }
@Test(groups = "fast") public void testRepeats() throws Exception { final DateTime firstTime = DateTimeUtils.dateTimeFromUnixSeconds(1293846); final List<DateTime> unencodedTimes = makeSomeTimes(firstTime); final byte[] compressedTimes = timelineCoder.compressDateTimes(unencodedTimes); final List<DateTime> decompressedTimes = timelineCoder.decompressDateTimes(compressedTimes); Assert.assertEquals(decompressedTimes.size(), unencodedTimes.size()); for (int i = 0; i < unencodedTimes.size(); i++) { Assert.assertEquals(decompressedTimes.get(i), unencodedTimes.get(i)); } }
timelineChunkIds.add(timelineChunk.getChunkId()); final byte[] combinedTimeBytes = timelineCoder.combineTimelines(timeParts, sampleCount); final byte[] combinedSampleBytes = sampleCoder.combineSampleBytes(sampleParts); final int timeBytesLength = combinedTimeBytes.length;
@BeforeMethod(groups = "fast") public void setUp() throws Exception { final List<DateTime> dateTimes = new ArrayList<DateTime>(); final ByteArrayOutputStream out = new ByteArrayOutputStream(); final DataOutputStream output = new DataOutputStream(out); for (int i = 0; i < SAMPLE_COUNT; i++) { sampleCoder.encodeSample(output, new ScalarSample<Long>(SampleOpcode.LONG, 10L)); dateTimes.add(START_TIME.plusMinutes(i)); } output.flush(); output.close(); samples = out.toByteArray(); final DateTime endTime = dateTimes.get(dateTimes.size() - 1); timeBytes = timelineCoder.compressDateTimes(dateTimes); chunk = new TimelineChunk(CHUNK_ID, HOST_ID, SAMPLE_KIND_ID, START_TIME, endTime, timeBytes, samples, SAMPLE_COUNT); }
@Test(groups = "fast") public void test65KRepeats() throws Exception { final int count = 0; final List<DateTime> dateTimes = new ArrayList<DateTime>(); DateTime time = DateTimeUtils.dateTimeFromUnixSeconds(1000000); for (int i = 0; i < 20; i++) { time = time.plusSeconds(200); dateTimes.add(time); } for (int i = 0; i < 0xFFFF + 100; i++) { time = time.plusSeconds(100); dateTimes.add(time); } final byte[] timeBytes = timelineCoder.compressDateTimes(dateTimes); final String hex = new String(Hex.encodeHex(timeBytes)); // Here are the compressed samples: ff000f4308fe13c8fdffff64fe6464 // Translation: // [ff 00 0f 43 08] means absolution time 1000000 // [fe 13 c8] means repeat 19 times delta 200 seconds // [fd ff ff 64] means repeat 65525 times delta 100 seconds // [fe 64 64] means repeat 100 times delta 100 seconds Assert.assertEquals(timeBytes, Hex.decodeHex("ff000f4308fe13c8fdffff64fe6464".toCharArray())); final List<DateTime> restoredSamples = timelineCoder.decompressDateTimes(timeBytes); Assert.assertEquals(restoredSamples.size(), dateTimes.size()); for (int i = 0; i < count; i++) { Assert.assertEquals(restoredSamples.get(i), DateTimeUtils.unixSeconds(dateTimes.get(i))); } }
@Test(groups = "fast") public void testTimeRangeSampleProcessor() throws Exception { final DateTime startTime = new DateTime(dateFormatter.parseDateTime("2012-03-23T17:35:11.000Z")); final DateTime endTime = new DateTime(dateFormatter.parseDateTime("2012-03-23T17:35:17.000Z")); final int sampleCount = 2; final List<DateTime> dateTimes = ImmutableList.<DateTime>of(startTime, endTime); final byte[] compressedTimes = timelineCoder.compressDateTimes(dateTimes); final TimelineCursor cursor = new DefaultTimelineCursor(compressedTimes, sampleCount); Assert.assertEquals(cursor.getNextTime(), startTime); Assert.assertEquals(cursor.getNextTime(), endTime); // 2 x the value 12: REPEAT_BYTE, SHORT, 2, SHORT, 12 (2 bytes) final byte[] samples = new byte[]{(byte) 0xff, 2, 2, 0, 12}; final AtomicInteger samplesCount = new AtomicInteger(0); sampleCoder.scan(samples, compressedTimes, sampleCount, new TimeRangeSampleProcessor(startTime, endTime) { @Override public void processOneSample(final DateTime time, final SampleOpcode opcode, final Object value) { if (samplesCount.get() == 0) { Assert.assertEquals(DateTimeUtils.unixSeconds(time), DateTimeUtils.unixSeconds(startTime)); } else { Assert.assertEquals(DateTimeUtils.unixSeconds(time), DateTimeUtils.unixSeconds(endTime)); } samplesCount.incrementAndGet(); } }); Assert.assertEquals(samplesCount.get(), sampleCount); }
/** * This method simulates adding a ton of timelines, in more-or-less the way they would be added in real life. */ private void insertManyTimelines() throws Exception { final List<TimelineChunk> timelineChunkList = new ArrayList<TimelineChunk>(); DateTime startTime = new DateTime().minusDays(1); DateTime endTime = startTime.plusHours(1); final int sampleCount = 120; // 1 hours worth for (int i = 0; i < 12; i++) { for (final int hostId : hostIds) { for (final int categoryId : categoriesForHostId.get(hostId)) { final List<DateTime> dateTimes = new ArrayList<DateTime>(sampleCount); for (int sc = 0; sc < sampleCount; sc++) { dateTimes.add(startTime.plusSeconds(sc * 30)); } final byte[] timeBytes = timelineCoder.compressDateTimes(dateTimes); for (final int sampleKindId : categorySampleKindIds.get(categoryId)) { final TimelineChunk timelineChunk = makeTimelineChunk(hostId, sampleKindId, startTime, endTime, timeBytes, sampleCount); addChunkAndMaybeSave(timelineChunkList, timelineChunk); } } } if (timelineChunkList.size() > 0) { defaultTimelineDAO.bulkInsertTimelineChunks(timelineChunkList, internalCallContext); } log.info("After hour %d, inserted %d TimelineChunk rows", i, timelineChunkIdCounter.get()); startTime = endTime; endTime = endTime.plusHours(1); } }
@SuppressWarnings("unchecked") private void checkDoubleCodedResult(final double value, final SampleOpcode expectedOpcode, final int expectedSize) { final ScalarSample codedSample = sampleCoder.compressSample(new ScalarSample(SampleOpcode.DOUBLE, value)); Assert.assertEquals(codedSample.getOpcode(), expectedOpcode); final double error = value == 0.0 ? 0.0 : Math.abs((value - codedSample.getDoubleValue()) / value); Assert.assertTrue(error <= sampleCoder.getMaxFractionError()); final TimelineChunkAccumulator accum = new TimelineChunkAccumulator(123, 456, sampleCoder); accum.addSample(codedSample); final DateTime now = new DateTime(); final List<DateTime> dateTimes = new ArrayList<DateTime>(); dateTimes.add(now); final byte[] timeBytes = timelineCoder.compressDateTimes(dateTimes); final byte[] encodedSampleBytes = accum.extractTimelineChunkAndReset(now, now, timeBytes).getTimeBytesAndSampleBytes().getSampleBytes(); Assert.assertEquals(encodedSampleBytes.length, expectedSize); } }
@Test(groups = "fast") public void testScan() throws Exception { final DateTime startTime = new DateTime(DateTimeZone.UTC); final DateTime endTime = startTime.plusSeconds(5); final List<DateTime> dateTimes = ImmutableList.<DateTime>of(startTime.plusSeconds(1), startTime.plusSeconds(2), startTime.plusSeconds(3), startTime.plusSeconds(4)); final byte[] compressedTimes = timelineCoder.compressDateTimes(dateTimes); final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); final DataOutputStream dataOutputStream = new DataOutputStream(outputStream); final ScalarSample<Short> sample = new ScalarSample<Short>(SampleOpcode.SHORT, (short) 4); sampleCoder.encodeSample(dataOutputStream, sample); sampleCoder.encodeSample(dataOutputStream, new RepeatSample<Short>(3, sample)); dataOutputStream.close(); sampleCoder.scan(outputStream.toByteArray(), compressedTimes, dateTimes.size(), new TimeRangeSampleProcessor(startTime, endTime) { @Override public void processOneSample(final DateTime time, final SampleOpcode opcode, final Object value) { Assert.assertTrue(time.isAfter(startTime)); Assert.assertTrue(time.isBefore(endTime)); Assert.assertEquals(Short.valueOf(value.toString()), sample.getSampleValue()); } }); }
final byte[] compressedTimes = timelineCoder.compressDateTimes(dateTimes); final TimelineChunk chunk = accum.extractTimelineChunkAndReset(startTime, endTime, compressedTimes); final byte[] samples = chunk.getTimeBytesAndSampleBytes().getSampleBytes();
final byte[] compressedTimes = timelineCoder.compressDateTimes(dateTimes); final TimelineChunk chunk = accum.extractTimelineChunkAndReset(startTime, endTime, compressedTimes); final byte[] samples = chunk.getTimeBytesAndSampleBytes().getSampleBytes();
final byte[] times = timelineCoder.compressDateTimes(dateTimes); final TimelineChunk timelineChunk = new TimelineChunk(CHUNK_ID, HOST_ID, SAMPLE_KIND_ID, startTime, endTime, times, out.toByteArray(), sampleCount);