/** * @return The bootstrapped configuration that's been read after bootstrap has * been invoked. */ public Config getConfig() { if (isBootstrapped) { return new MapConfig(configMap); } else { throw new SamzaException("Must call bootstrap before retrieving config."); } }
static Config mergeConfig(Map<String, String> originalConfig, Map<String, String> generatedConfig) { Map<String, String> mergedConfig = new HashMap<>(generatedConfig); originalConfig.forEach((k, v) -> { if (generatedConfig.containsKey(k) && !Objects.equals(generatedConfig.get(k), v)) { LOG.info("Replacing generated config for key: {} value: {} with original config value: {}", k, generatedConfig.get(k), v); } mergedConfig.put(k, v); }); return Util.rewriteConfig(new MapConfig(mergedConfig)); }
private UdfResolver createUdfResolver(Map<String, String> config) { String udfResolveValue = config.get(CFG_UDF_RESOLVER); Validate.notEmpty(udfResolveValue, "udfResolver config is not set or empty"); HashMap<String, String> domainConfig = getDomainProperties(config, String.format(CFG_FMT_UDF_RESOLVER_DOMAIN, udfResolveValue), false); Properties props = new Properties(); props.putAll(domainConfig); HashMap<String, String> udfConfig = getDomainProperties(config, CFG_UDF_CONFIG_DOMAIN, false); return new ConfigBasedUdfResolver(props, new MapConfig(udfConfig)); }
private Config buildConfigForContainerCount(int count) { Map<String, String> map = new HashMap<>(); map.put("job.container.count", String.valueOf(count)); return new MapConfig(map); }
private Config getConfigWithHostAffinity() { Map<String, String> map = new HashMap<>(); map.putAll(config); map.put("job.host-affinity.enabled", "true"); return new MapConfig(map); }
public static SqlIOResolver createIOResolver(Config config) { String sourceResolveValue = config.get(CFG_IO_RESOLVER); Map<String, String> metadataPrefixProperties = new HashMap<>(); metadataPrefixProperties.put( String.format(CFG_FMT_SOURCE_RESOLVER_DOMAIN, sourceResolveValue) + CFG_METADATA_TOPIC_PREFIX, config.get(CFG_METADATA_TOPIC_PREFIX, DEFAULT_METADATA_TOPIC_PREFIX)); Config newConfig = new MapConfig(Arrays.asList(config, metadataPrefixProperties)); Validate.notEmpty(sourceResolveValue, "ioResolver config is not set or empty"); return initializePlugin("SqlIOResolver", sourceResolveValue, newConfig, CFG_FMT_SOURCE_RESOLVER_DOMAIN, (o, c) -> ((SqlIOResolverFactory) o).create(c, newConfig)); }
private Config addConfigs(Config original, String... kvs) { Map<String, String> result = new HashMap<>(); result.putAll(original); result.putAll(buildConfig(kvs)); return new MapConfig(result); }
private Config getConfig() { HashMap<String, String> configMap = new HashMap<>(); configMap.put(JobConfig.JOB_NAME(), "test-job"); return new MapConfig(configMap); }
private Config getConfig() { HashMap<String, String> configMap = new HashMap<>(); configMap.put(JobConfig.JOB_NAME(), "test-job"); configMap.put(JobConfig.JOB_ID(), "1"); return new MapConfig(configMap); }
@Test public void testgetKCLConfigWithUnknownConfigs() { Map<String, String> kv = new HashMap<>(); kv.put("systems.kinesis.aws.region", "us-east-1"); kv.put("systems.kinesis.streams.kinesis-stream.aws.kcl.random", "value"); Config config = new MapConfig(kv); KinesisConfig kConfig = new KinesisConfig(config); // Should not throw any exception and just ignore the unknown configs. kConfig.getKinesisClientLibConfig("kinesis", "kinesis-stream", "sample-app"); } }
@Test public void testStreamAppClass() { Map<String, String> configMap = new HashMap<>(); configMap.put(ApplicationConfig.APP_CLASS, MockStreamApplication.class.getName()); SamzaApplication app = ApplicationUtil.fromConfig(new MapConfig(configMap)); assertTrue(app instanceof MockStreamApplication); configMap.put(TaskConfig.TASK_CLASS(), MockStreamTask.class.getName()); app = ApplicationUtil.fromConfig(new MapConfig(configMap)); assertTrue(app instanceof MockStreamApplication); }
@Test public void testNoTaskOnlyContainsBroadcastStreams() { Config config = new MapConfig(ImmutableMap.of("task.broadcast.inputs", "SystemA.StreamA#0, SystemA.StreamB#1")); GroupByPartition grouper = new GroupByPartition(config); Map<TaskName, Set<SystemStreamPartition>> result = grouper.group(ImmutableSet.of(aa0, ab1, ab2)); Map<TaskName, Set<SystemStreamPartition>> expectedResult = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("Partition 2"), ImmutableSet.of(aa0, ab1, ab2)).build(); assertEquals(expectedResult, result); }
@Test(expected = ConfigException.class) public void testEmptyTaskClassOnly() { Map<String, String> configMap = new HashMap<>(); configMap.put(TaskConfig.TASK_CLASS(), ""); ApplicationUtil.fromConfig(new MapConfig(configMap)); }
@Test public void testGetProcessorLocality() { // Mock the dependencies. LocalityManager mockLocalityManager = mock(LocalityManager.class); Map<String, Map<String, String>> localityMappings = new HashMap<>(); localityMappings.put("0", ImmutableMap.of(SetContainerHostMapping.HOST_KEY, "abc-affinity")); // Mock the container locality assignment. when(mockLocalityManager.readContainerLocality()).thenReturn(localityMappings); Map<String, LocationId> processorLocality = JobModelManager.getProcessorLocality(new MapConfig(), mockLocalityManager); Mockito.verify(mockLocalityManager).readContainerLocality(); Assert.assertEquals(ImmutableMap.of("0", new LocationId("abc-affinity"), "1", new LocationId("ANY_HOST")), processorLocality); }
@Test(expected = NullPointerException.class) public void testChangelogWithoutJobName() { Map<String, String> jobConfig = new HashMap<>(); jobConfig.put("job.id", JOB_ID); createTableDescriptor() .withChangelogEnabled() .toConfig(new MapConfig(jobConfig)); }
@Test(expected = NullPointerException.class) public void testSerializeNullReadFunction() { RemoteTableDescriptor desc = new RemoteTableDescriptor("1"); Map<String, String> tableConfig = desc.toConfig(new MapConfig()); Assert.assertTrue(tableConfig.containsKey(RemoteTableDescriptor.READ_FN)); }
@Test public void testTriggerIntervalForStatelessOperators() { Map<String, String> map = new HashMap<>(config); map.put(JobConfig.JOB_INTERMEDIATE_STREAM_PARTITIONS(), String.valueOf(DEFAULT_PARTITIONS)); Config cfg = new MapConfig(map); ExecutionPlanner planner = new ExecutionPlanner(cfg, streamManager); StreamApplicationDescriptorImpl graphSpec = createSimpleGraph(); ExecutionPlan plan = planner.plan(graphSpec); List<JobConfig> jobConfigs = plan.getJobConfigs(); assertEquals(1, jobConfigs.size()); assertFalse(jobConfigs.get(0).containsKey(TaskConfig.WINDOW_MS())); }
@Test public void testGetNextOpIdIncrementsId() { HashMap<String, String> configMap = new HashMap<>(); configMap.put(JobConfig.JOB_NAME(), "jobName"); configMap.put(JobConfig.JOB_ID(), "1234"); Config config = new MapConfig(configMap); StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(appDesc -> { }, config); assertEquals("jobName-1234-merge-0", streamAppDesc.getNextOpId(OpCode.MERGE, null)); assertEquals("jobName-1234-join-customName", streamAppDesc.getNextOpId(OpCode.JOIN, "customName")); assertEquals("jobName-1234-map-2", streamAppDesc.getNextOpId(OpCode.MAP, null)); }
@Test public void testRewriteOverridesConfig() throws Exception { Map<String, String> config = createMap("foo.bar", "a"); Map<String, String> env = createMap("SAMZA_FOO_BAR", "b"); Config rewritten = rewriter.rewrite(new MapConfig(config), env); assertEquals("b", rewritten.get("foo.bar")); }
@Test public void testSerializeNullWriteFunction() { String tableId = "1"; RemoteTableDescriptor desc = new RemoteTableDescriptor(tableId) .withReadFunction(createMockTableReadFunction()); Map<String, String> tableConfig = desc.toConfig(new MapConfig()); assertExists(RemoteTableDescriptor.READ_FN, tableId, tableConfig); assertEquals(null, RemoteTableDescriptor.WRITE_FN, tableId, tableConfig); }