/** * Gets a new instance of a Configuration for this builder. * * @param constructor A Supplier for a ConfigBuilder for the given Configuration. * @param <T> The type of the Configuration to instantiate. */ public <T> T getConfig(Supplier<? extends ConfigBuilder<? extends T>> constructor) { return constructor.get() .rebase(this.properties) .build(); }
public static MetricsConfig getMetricsConfig() { val builder = MetricsConfig.builder(); for (Map.Entry<String, ConfigValue> e : CONFIG.entrySet()) { if (e.getKey().startsWith(METRIC_PATH)) { builder.with(Property.named(e.getKey().replaceFirst(METRIC_PATH, "")), e.getValue().unwrapped()); } } return builder.build(); } }
@Before public void setUp() { this.tempDir.set(Files.createTempDir()); this.config.set(RocksDBConfig.builder().with(RocksDBConfig.DATABASE_DIR, tempDir.get().getAbsolutePath()).build()); this.factory.set(new RocksDBCacheFactory(this.config.get())); }
@Before public void setUp() throws Exception { this.baseDir = Files.createTempDirectory("test_nfs").toFile().getAbsoluteFile(); this.adapterConfig = FileSystemStorageConfig .builder() .with(FileSystemStorageConfig.ROOT, this.baseDir.getAbsolutePath()) .build(); }
@Before public void setUp() throws Exception { this.baseDir = Files.createTempDirectory("test_nfs").toFile().getAbsoluteFile(); MetricsConfig metricsConfig = MetricsConfig.builder().with(MetricsConfig.ENABLE_STATISTICS, true).build(); MetricsProvider.initialize(metricsConfig); this.adapterConfig = FileSystemStorageConfig .builder() .with(FileSystemStorageConfig.ROOT, this.baseDir.getAbsolutePath()) .build(); }
@Before public void setUp() throws Exception { this.baseDir = Files.createTempDirectory("test_hdfs").toFile().getAbsoluteFile(); this.hdfsCluster = HDFSClusterHelpers.createMiniDFSCluster(this.baseDir.getAbsolutePath()); this.adapterConfig = HDFSStorageConfig .builder() .with(HDFSStorageConfig.REPLICATION, 1) .with(HDFSStorageConfig.URL, String.format("hdfs://localhost:%d/", hdfsCluster.getNameNodePort())) .build(); }
@Before public void setUp() throws Exception { this.baseDir = Files.createTempDirectory("test_hdfs").toFile().getAbsoluteFile(); this.hdfsCluster = HDFSClusterHelpers.createMiniDFSCluster(this.baseDir.getAbsolutePath()); this.adapterConfig = HDFSStorageConfig .builder() .with(HDFSStorageConfig.REPLICATION, 1) .with(HDFSStorageConfig.URL, String.format("hdfs://localhost:%d/", hdfsCluster.getNameNodePort())) .build(); }
@Test public void testZkHierarchyDepth() { AssertExtensions.assertThrows("BookKeeperConfig did not throw InvalidPropertyValueException", () -> BookKeeperConfig.builder() .with(BookKeeperConfig.ZK_HIERARCHY_DEPTH, -1) .build(), ex -> ex instanceof InvalidPropertyValueException); } }
@Before public void setUp() { MetricsProvider.initialize(MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, true) .build()); }
@Test public void testQuorumSize() { AssertExtensions.assertThrows("BookKeeperConfig did not throw InvalidPropertyValueException", () -> BookKeeperConfig.builder() .with(BookKeeperConfig.BK_ACK_QUORUM_SIZE, 3) .with(BookKeeperConfig.BK_WRITE_QUORUM_SIZE, 2) .build(), ex -> ex instanceof InvalidPropertyValueException); }
/** * Tests the BookKeeperLogFactory and its initialization. */ @Test public void testFactoryInitialize() { BookKeeperConfig bkConfig = BookKeeperConfig .builder() .with(BookKeeperConfig.ZK_ADDRESS, "localhost:" + BK_PORT.get()) .with(BookKeeperConfig.BK_LEDGER_MAX_SIZE, WRITE_MAX_LENGTH * 10) // Very frequent rollovers. .with(BookKeeperConfig.ZK_METADATA_PATH, this.zkClient.get().getNamespace()) .build(); @Cleanup val factory = new BookKeeperLogFactory(bkConfig, this.zkClient.get(), executorService()); AssertExtensions.assertThrows("", factory::initialize, ex -> ex instanceof DataLogNotAvailableException && ex.getCause() instanceof BKException.ZKException ); }
@Test public void testZkServers() { // When multiple servers are specified and separated by comma, it should replace it by semicolon BookKeeperConfig cfg1 = BookKeeperConfig.builder() .with(BookKeeperConfig.ZK_ADDRESS, "foo:12345,bar:54321") .build(); Assert.assertEquals("foo:12345;bar:54321", cfg1.getZkAddress()); // No changes should be made to the following configs BookKeeperConfig cfg2 = BookKeeperConfig.builder() .with(BookKeeperConfig.ZK_ADDRESS, "foo:12345") .build(); Assert.assertEquals("foo:12345", cfg2.getZkAddress()); BookKeeperConfig cfg3 = BookKeeperConfig.builder() .with(BookKeeperConfig.ZK_ADDRESS, "10.20.30.40:12345;bar:2181") .build(); Assert.assertEquals("10.20.30.40:12345;bar:2181", cfg3.getZkAddress()); }
static DurableLogConfig createDurableLogConfig(Integer checkpointMinCommitCount, Long checkpointMinTotalCommitLength) { if (checkpointMinCommitCount == null) { checkpointMinCommitCount = Integer.MAX_VALUE; } if (checkpointMinTotalCommitLength == null) { checkpointMinTotalCommitLength = Long.MAX_VALUE; } return DurableLogConfig .builder() .with(DurableLogConfig.CHECKPOINT_MIN_COMMIT_COUNT, CHECKPOINT_MIN_COMMIT_COUNT) .with(DurableLogConfig.CHECKPOINT_COMMIT_COUNT, checkpointMinCommitCount) .with(DurableLogConfig.CHECKPOINT_TOTAL_COMMIT_LENGTH, checkpointMinTotalCommitLength) .with(DurableLogConfig.START_RETRY_DELAY_MILLIS, START_RETRY_DELAY_MILLIS) .build(); } }
private MetadataCheckpointPolicy getNoOpCheckpointPolicy() { // Turn off any MetadataCheckpointing. In these tests, we are doing that manually. DurableLogConfig dlConfig = DurableLogConfig .builder() .with(DurableLogConfig.CHECKPOINT_COMMIT_COUNT, Integer.MAX_VALUE) .with(DurableLogConfig.CHECKPOINT_TOTAL_COMMIT_LENGTH, Long.MAX_VALUE) .build(); return new MetadataCheckpointPolicy(dlConfig, Runnables.doNothing(), executorService()); }
/** * Tests the with() method. */ @Test public void testWith() { final String namespace = "ns"; final int propertyCount = 10; val builder = new ConfigBuilder<TestConfig>(namespace, TestConfig::new); for (int i = 0; i < propertyCount; i++) { val result = builder.with(Property.named(Integer.toString(i)), i); Assert.assertEquals("with() did not return this instance.", builder, result); } TestConfig c = builder.build(); for (int i = 0; i < propertyCount; i++) { val p = Property.<Integer>named(Integer.toString(i)); val actual = c.getProperties().getInt(p); Assert.assertEquals("Unexpected value in result.", i, actual); } }
@Test(timeout = 10000) public void testCacheExpiry() { CompletableFuture<Void> scaleDownFuture = new CompletableFuture<>(); AutoScaleProcessor monitor = new AutoScaleProcessor(createWriter(event -> { if (event.getDirection() == AutoScaleEvent.DOWN) { scaleDownFuture.complete(null); } else { scaleDownFuture.completeExceptionally(new RuntimeException()); } }), AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0) .with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0) .with(AutoScalerConfig.CACHE_CLEANUP_IN_SECONDS, 1) .with(AutoScalerConfig.CACHE_EXPIRY_IN_SECONDS, 1).build(), executorService()); String streamSegmentName1 = StreamSegmentNameUtils.getQualifiedStreamSegmentName(SCOPE, STREAM1, 0L); monitor.notifyCreated(streamSegmentName1, WireCommands.CreateSegment.IN_EVENTS_PER_SEC, 10); assertTrue(Futures.await(scaleDownFuture)); assertNull(monitor.get(streamSegmentName1)); }
/** * Test that we can transition from stats enabled, to disabled, to enabled. */ @Test public void testMultipleInitialization() { MetricsConfig config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, false) .build(); MetricsProvider.initialize(config); statsLogger.createCounter("counterDisabled"); assertEquals(null, MetricRegistryUtils.getCounter("counterDisabled")); config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, true) .build(); MetricsProvider.initialize(config); statsLogger.createCounter("counterEnabled"); Assert.assertNotNull( MetricRegistryUtils.getCounter("pravega.testStatsLogger.counterEnabled")); }
/** * Test transition back to null provider. */ @Test public void testTransitionBackToNullProvider() { MetricsConfig config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, false) .build(); MetricsProvider.initialize(config); Counter counter = statsLogger.createCounter("continuity-counter"); counter.add(1L); assertEquals(0L, counter.get()); config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, true) .build(); MetricsProvider.initialize(config); counter.add(1L); assertEquals(1L, counter.get()); } }
TestContext() { this.cacheFactory = new InMemoryCacheFactory(); this.storage = InMemoryStorageFactory.newStorage(executorService()); this.storage.initialize(1); this.metadata = new MetadataBuilder(CONTAINER_ID).build(); ReadIndexConfig readIndexConfig = ReadIndexConfig.builder().with(ReadIndexConfig.STORAGE_READ_ALIGNMENT, 1024).build(); this.cacheManager = new CacheManager(CachePolicy.INFINITE, executorService()); this.readIndex = new ContainerReadIndex(readIndexConfig, this.metadata, this.cacheFactory, this.storage, this.cacheManager, executorService()); this.memoryLog = new SequencedItemList<>(); this.stateUpdater = new MemoryStateUpdater(this.memoryLog, this.readIndex, Runnables.doNothing()); }
TestContext(String segmentName, AttributeIndexConfig config) { val storageConfig = FileSystemStorageConfig.builder() .with(FileSystemStorageConfig.ROOT, OUTPUT_DIR_NAME) .build(); val storageFactory = new FileSystemStorageFactory(storageConfig, executorService()); this.storage = storageFactory.createStorageAdapter(); this.containerMetadata = new MetadataBuilder(0).build(); this.cacheFactory = new NoOpCacheFactory(); //this.cacheFactory = new InMemoryCacheFactory(); this.cacheManager = new CacheManager(CachePolicy.INFINITE, executorService()); val factory = new ContainerAttributeIndexFactoryImpl(config, this.cacheFactory, this.cacheManager, executorService()); this.index = factory.createContainerAttributeIndex(this.containerMetadata, this.storage); // Setup the segment in the metadata. this.segmentId = 0L; this.attributeSegmentName = StreamSegmentNameUtils.getAttributeSegmentName(segmentName); this.containerMetadata.mapStreamSegmentId(segmentName, this.segmentId); // Cleanup any existing data. cleanup(); }