/** * Creates a new InMemory Storage, without a rolling wrapper. * * @param executor An Executor to use for async operations. * @return A new InMemoryStorage. */ @VisibleForTesting public static Storage newStorage(Executor executor) { return new AsyncStorageWrapper(new InMemoryStorage(), executor); }
@Override protected Storage createStorage() { return wrap(new InMemoryStorage()); } }
@Override protected Storage createStorage() { return new AsyncStorageWrapper(new RollingStorage(new InMemoryStorage(), DEFAULT_ROLLING_POLICY), executorService()); }
/** * Tests the append() method. */ @Test public void testAppend() throws Exception { final String segmentName = "segment"; @Cleanup val storage = new InMemoryStorage(); storage.initialize(DEFAULT_EPOCH); storage.create(segmentName); val handle = storage.openWrite(segmentName); ByteArrayOutputStream writeStream = new ByteArrayOutputStream(); for (int j = 0; j < APPENDS_PER_SEGMENT; j++) { byte[] writeData = String.format("Segment_%s_Append_%d", segmentName, j).getBytes(); ByteArrayInputStream dataStream = new ByteArrayInputStream(writeData); storage.append(handle, dataStream, writeData.length); writeStream.write(writeData); } byte[] expectedData = writeStream.toByteArray(); byte[] readBuffer = new byte[expectedData.length]; int bytesRead = storage.read(handle, 0, readBuffer, 0, readBuffer.length); Assert.assertEquals("Unexpected number of bytes read.", readBuffer.length, bytesRead); AssertExtensions.assertArrayEquals("Unexpected read result.", expectedData, 0, readBuffer, 0, bytesRead); }
TestContext(WriterConfig config, WriterFactory.CreateProcessors createProcessors) { this.metadata = new MetadataBuilder(CONTAINER_ID).build(); this.baseStorage = new InMemoryStorage(); this.storage = new TestStorage(this.baseStorage, executorService()); this.storage.initialize(1); this.config = config; this.createProcessors = createProcessors; this.transactionIds = new HashMap<>(); val dataSourceConfig = new TestWriterDataSource.DataSourceConfig(); dataSourceConfig.autoInsertCheckpointFrequency = METADATA_CHECKPOINT_FREQUENCY; this.dataSource = new TestWriterDataSource(this.metadata, executorService(), dataSourceConfig); this.writer = new StorageWriter(this.config, this.dataSource, this.storage, this.createProcessors, executorService()); }
/** * Tests the ability to auto-refresh a Write Handle upon offset disagreement. */ @Test public void testRefreshHandleBadOffset() throws Exception { // Write small and large writes, alternatively. @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY); s.initialize(1); s.create(SEGMENT_NAME); val h1 = s.openWrite(SEGMENT_NAME); val h2 = s.openWrite(SEGMENT_NAME); // Open now, before writing, so we force a refresh. byte[] data = "data".getBytes(); s.write(h1, 0, new ByteArrayInputStream(data), data.length); s.write(h2, data.length, new ByteArrayInputStream(data), data.length); // Check that no file has exceeded its maximum length. byte[] expectedData = new byte[data.length * 2]; System.arraycopy(data, 0, expectedData, 0, data.length); System.arraycopy(data, 0, expectedData, data.length, data.length); checkWrittenData(expectedData, h2, s); }
@Test @Override public void testFencing() throws Exception { final String segment1 = "segment1"; final String segment2 = "segment2"; @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val storage = new AsyncStorageWrapper(baseStorage, executorService()); storage.initialize(DEFAULT_EPOCH); // Part 1: Create a segment and verify all operations are allowed. storage.create(segment1, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); SegmentHandle handle1 = storage.openWrite(segment1).join(); verifyAllOperationsSucceed(handle1, storage); // Part 2: Change owner, verify segment operations are not allowed until a call to open() is made. baseStorage.changeOwner(); verifyWriteOperationsFail(handle1, storage); handle1 = storage.openWrite(segment1).join(); verifyAllOperationsSucceed(handle1, storage); // Part 3: Create new segment and verify all operations are allowed. storage.create(segment2, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); SegmentHandle handle2 = storage.openWrite(segment2).join(); verifyAllOperationsSucceed(handle2, storage); // Cleanup. storage.delete(handle1, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); storage.delete(handle2, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS); }
TestContext(WriterConfig config) { this.containerMetadata = new MetadataBuilder(CONTAINER_ID).build(); this.storage = new TestStorage(new InMemoryStorage(), executorService()); this.storage.initialize(1); this.timer = new ManualTimer(); val dataSourceConfig = new TestWriterDataSource.DataSourceConfig(); dataSourceConfig.autoInsertCheckpointFrequency = TestWriterDataSource.DataSourceConfig.NO_METADATA_CHECKPOINT; this.dataSource = new TestWriterDataSource(this.containerMetadata, executorService(), dataSourceConfig); this.transactionAggregators = new SegmentAggregator[TRANSACTION_COUNT]; UpdateableSegmentMetadata segmentMetadata = initialize(this.containerMetadata.mapStreamSegmentId(SEGMENT_NAME, SEGMENT_ID)); this.segmentAggregator = new SegmentAggregator(segmentMetadata, this.dataSource, this.storage, config, this.timer, executorService()); this.transactionIds = new HashMap<>(); for (int i = 0; i < TRANSACTION_COUNT; i++) { String name = TRANSACTION_NAME_PREFIX + i; long id = TRANSACTION_ID_START + i; this.transactionIds.put(id, SEGMENT_ID); UpdateableSegmentMetadata transactionMetadata = initialize(this.containerMetadata.mapStreamSegmentId(name, TRANSACTION_ID_START + i)); this.transactionAggregators[i] = new SegmentAggregator(transactionMetadata, this.dataSource, this.storage, config, this.timer, executorService()); } }
TestContext(AttributeIndexConfig config, CachePolicy cachePolicy) { this.memoryStorage = new InMemoryStorage(); this.memoryStorage.initialize(1); this.storage = new TestContext.TestStorage(new RollingStorage(this.memoryStorage, config.getAttributeSegmentRollingPolicy()), executorService()); this.containerMetadata = new MetadataBuilder(CONTAINER_ID).build(); this.cacheFactory = new InMemoryCacheFactory(); this.cacheManager = new TestCacheManager(cachePolicy, executorService()); val factory = new ContainerAttributeIndexFactoryImpl(config, this.cacheFactory, this.cacheManager, executorService()); this.index = factory.createContainerAttributeIndex(this.containerMetadata, this.storage); }
/** * Tests the ability to concat using the header file for those cases when native concat cannot be used because the * source Segment has multiple SegmentChunks. */ @Test public void testConcatHeaderMultiFile() throws Exception { final int initialTargetLength = (int) DEFAULT_ROLLING_POLICY.getMaxLength() / 2; final String sourceSegmentName = "SourceSegment"; @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY); s.initialize(1); // Create a Target Segment and a Source Segment and write some data to them. s.create(SEGMENT_NAME); val targetHandle = (RollingSegmentHandle) s.openWrite(SEGMENT_NAME); val writeStream = new ByteArrayOutputStream(); populate(s, targetHandle, 1, initialTargetLength, initialTargetLength, writeStream); s.create(sourceSegmentName); val sourceHandle = (RollingSegmentHandle) s.openWrite(sourceSegmentName); populate(s, sourceHandle, APPENDS_PER_SEGMENT, initialTargetLength, initialTargetLength, writeStream); s.seal(sourceHandle); // Concat and verify the handle has been updated accordingly. s.concat(targetHandle, initialTargetLength, sourceSegmentName); checkConcatResult(s, targetHandle, sourceSegmentName, 1 + sourceHandle.chunks().size(), initialTargetLength + (int) sourceHandle.length()); checkWrittenData(writeStream.toByteArray(), s.openRead(SEGMENT_NAME), s); }
/** * Tests the ability to concat using the header file for those cases when native concat cannot be used because the * source Segment has a single SegmentChunk, but it's too large to fit into the Target's active SegmentChunk. */ @Test public void testConcatHeaderSingleFile() throws Exception { final int initialTargetLength = (int) DEFAULT_ROLLING_POLICY.getMaxLength() / 2; final int bigSourceLength = (int) DEFAULT_ROLLING_POLICY.getMaxLength() - initialTargetLength + 1; final String sourceSegmentName = "SourceSegment"; @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY); s.initialize(1); // Create a Target Segment and a Source Segment and write some data to them. s.create(SEGMENT_NAME); val targetHandle = (RollingSegmentHandle) s.openWrite(SEGMENT_NAME); val writeStream = new ByteArrayOutputStream(); populate(s, targetHandle, 1, initialTargetLength, initialTargetLength, writeStream); s.create(sourceSegmentName); val sourceHandle = (RollingSegmentHandle) s.openWrite(sourceSegmentName); populate(s, sourceHandle, 1, bigSourceLength, bigSourceLength, writeStream); s.seal(sourceHandle); // Concat and verify the handle has been updated accordingly. s.concat(targetHandle, initialTargetLength, sourceSegmentName); checkConcatResult(s, targetHandle, sourceSegmentName, 2, initialTargetLength + bigSourceLength); checkWrittenData(writeStream.toByteArray(), s.openRead(SEGMENT_NAME), s); }
val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY);
/** * Tests the ability to use native concat for those cases when it's appropriate. */ @Test public void testConcatNatively() throws Exception { final int initialTargetLength = (int) DEFAULT_ROLLING_POLICY.getMaxLength() / 2; final int initialSourceLength = (int) DEFAULT_ROLLING_POLICY.getMaxLength() - initialTargetLength; final String sourceSegmentName = "SourceSegment"; @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY); s.initialize(1); // Create a target Segment and write a little data to it. s.create(SEGMENT_NAME); val targetHandle = (RollingSegmentHandle) s.openWrite(SEGMENT_NAME); val writeStream = new ByteArrayOutputStream(); populate(s, targetHandle, 1, initialTargetLength, initialTargetLength, writeStream); // Create a source Segment and write a little data to it, making sure it is small enough to fit into the target // when we need to concat. s.create(sourceSegmentName); val sourceHandle = (RollingSegmentHandle) s.openWrite(sourceSegmentName); populate(s, sourceHandle, 1, initialSourceLength, initialSourceLength, writeStream); s.seal(sourceHandle); // Concat and verify the handle has been updated accordingly. s.concat(targetHandle, initialTargetLength, sourceSegmentName); checkConcatResult(s, targetHandle, sourceSegmentName, 1, initialTargetLength + initialSourceLength); checkWrittenData(writeStream.toByteArray(), s.openRead(SEGMENT_NAME), s); }
final String segmentName = "SonHeaderSegment"; @Cleanup val baseStorage = new InMemoryStorage(); @Cleanup val s = new RollingStorage(baseStorage, DEFAULT_ROLLING_POLICY);