@Override public Config withValue(String path, ConfigValue value) { return c.withValue(path, value); } }
public static Config setReportingInterval(Config config, long reportingInterval, TimeUnit reportingIntervalUnit) { long seconds = TimeUnit.SECONDS.convert(reportingInterval, reportingIntervalUnit); if (seconds > Integer.MAX_VALUE) { throw new RuntimeException(String.format("Reporting interval is too long. Max: %d seconds.", Integer.MAX_VALUE)); } return config.withValue(REPORTING_INTERVAL, ConfigValueFactory.fromAnyRef(seconds + "S")); }
/** * Modify the configuration to set the {@link ContextFilter} class. * @param config Input {@link Config}. * @param klazz Class of desired {@link ContextFilter}. * @return Modified {@link Config}. */ public static Config setContextFilterClass(Config config, Class<? extends ContextFilter> klazz) { return config.withValue(CONTEXT_FILTER_CLASS, ConfigValueFactory.fromAnyRef(klazz.getCanonicalName())); }
/** * Initialize the FlowCatalog * @param sysConfig that must contain the fully qualified path of the flow template catalog * @throws IOException */ public FSFlowCatalog(Config sysConfig) throws IOException { super(sysConfig.withValue(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, sysConfig.getValue(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY))); }
public ConfigBuilder addList(String name, Iterable<? extends Object> values) { this.currentConfig = this.originDestription.isPresent() ? this.currentConfig.withValue(name, ConfigValueFactory.fromIterable(values, this.originDestription.get())) : this.currentConfig.withValue(name, ConfigValueFactory.fromIterable(values)); return this; }
/** * Initialize the JobCatalog, fetch all jobs in jobConfDirPath. * @param sysConfig * @throws Exception */ public NonObservingFSJobCatalog(Config sysConfig) throws IOException { super(sysConfig.withValue(ConfigurationKeys.JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL_KEY, ConfigValueFactory.fromAnyRef(ConfigurationKeys.DISABLED_JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL))); }
public NonObservingFSJobCatalog(Config sysConfig, Optional<MetricContext> parentMetricContext, boolean instrumentationEnabled) throws IOException{ super(sysConfig.withValue(ConfigurationKeys.JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL_KEY, ConfigValueFactory.fromAnyRef(ConfigurationKeys.DISABLED_JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL)), parentMetricContext, instrumentationEnabled); }
/** * Helper that overrides the data.node.id property with name derived from the node file path * @param nodeConfig node config * @param nodeFilePath path of the node file * @return config with overridden data.node.id */ private Config getNodeConfigWithOverrides(Config nodeConfig, Path nodeFilePath) { String nodeId = nodeFilePath.getParent().getName(); return nodeConfig.withValue(FlowGraphConfigurationKeys.DATA_NODE_ID_KEY, ConfigValueFactory.fromAnyRef(nodeId)); }
private static Config getStateStoreConfig(Config config, String rootDir, String dbTableKey) { Config fallbackConfig = ConfigFactory.empty() .withFallback(config) .withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, ConfigValueFactory.fromAnyRef(rootDir)) .withValue(ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, ConfigValueFactory.fromAnyRef(dbTableKey)); Config scopedConfig = ConfigFactory.empty(); for (Map.Entry<String, ConfigValue> entry : config.withOnlyPath(ConfigurationKeys.INTERMEDIATE_STATE_STORE_PREFIX).entrySet()) { scopedConfig.withValue(entry.getKey().substring(ConfigurationKeys.INTERMEDIATE_STATE_STORE_PREFIX.length()), entry.getValue()); } return scopedConfig.withFallback(fallbackConfig); } }
@Override public DatasetStateStore<JobState.DatasetState> createStateStore(Config config) { // dummy root dir for noop state store Config config2 = config.withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, ConfigValueFactory.fromAnyRef("")); return FsDatasetStateStore.createStateStore(config2, NoopDatasetStateStore.class.getName()); } }
@Override public Config config() { return ConfigFactory.empty(Hbs.class.getName()) .withValue("hbs.cache", ConfigValueFactory.fromAnyRef("maximumSize=100")); }
private void addMetadata(String key, String value) throws IOException { Config newConfig; if (isStoreMetadataFilePresent()) { newConfig = readMetadata().withValue(key, ConfigValueFactory.fromAnyRef(value)); } else { newConfig = ConfigFactory.empty().withValue(key, ConfigValueFactory.fromAnyRef(value)); } writeMetadata(newConfig); }
private Config saveConfigToFile(Config config) throws IOException { Config newConf = config .withValue(CLUSTER_WORK_DIR, ConfigValueFactory.fromAnyRef(this.appWorkPath.toString())); ConfigUtils configUtils = new ConfigUtils(new FileUtils()); configUtils.saveConfigToFile(newConf, CLUSTER_CONF_PATH); return newConf; }
@Override protected Config getConfig(String jobConfZipUri) { return ConfigFactory.empty() .withValue(GobblinAWSConfigurationKeys.JOB_CONF_SOURCE_FILE_FS_URI_KEY, ConfigValueFactory.fromAnyRef("file:///")) .withValue(GobblinAWSConfigurationKeys.JOB_CONF_SOURCE_FILE_PATH_KEY, ConfigValueFactory.fromAnyRef(jobConfZipUri)); } }
@BeforeClass public void setUp() throws IOException { this.checkpointDir = new File(dagStateStoreDir); FileUtils.deleteDirectory(this.checkpointDir); Config config = ConfigFactory.empty().withValue(DagManager.DAG_STATESTORE_DIR, ConfigValueFactory.fromAnyRef( this.dagStateStoreDir)); this._dagStateStore = new FSDagStateStore(config); }
@Override protected Config getConfig(String jobConfZipUri) { return ConfigFactory.empty() .withValue(GobblinAWSConfigurationKeys.JOB_CONF_S3_URI_KEY, ConfigValueFactory.fromAnyRef(jobConfZipUri)); } }
@Test public void testConfigureFromConfig() { final Config config = ConfigFactory.empty() .withValue(HttpClientConfiguratorLoader.HTTP_CLIENT_CONFIGURATOR_TYPE_KEY, ConfigValueFactory.fromAnyRef("blah")); Assert.assertThrows(new ThrowingRunnable() { @Override public void run() throws Throwable { new HttpClientConfiguratorLoader(config); } }); }
@Test public void testGetFactoryConfig() { FileBasedJobLockFactoryManager mgr = new FileBasedJobLockFactoryManager(); Config sysConfig1 = ConfigFactory.empty(); Assert.assertTrue(mgr.getFactoryConfig(sysConfig1).isEmpty()); Config sysConfig2 = sysConfig1.withValue("some.prop", ConfigValueFactory.fromAnyRef("test")); Assert.assertTrue(mgr.getFactoryConfig(sysConfig2).isEmpty()); Config sysConfig3 = sysConfig2.withValue(FileBasedJobLockFactoryManager.CONFIG_PREFIX + "." + FileBasedJobLockFactory.LOCK_DIR_CONFIG, ConfigValueFactory.fromAnyRef("/tmp")); Config factoryCfg3 = mgr.getFactoryConfig(sysConfig3); Assert.assertEquals(factoryCfg3.getString(FileBasedJobLockFactory.LOCK_DIR_CONFIG), "/tmp"); }
private SystemProperties withConfig(int databaseVersion, Behavior behavior) { Config config = ConfigFactory.empty() // reset is true for tests .withValue("database.reset", ConfigValueFactory.fromAnyRef(false)); if (behavior != null) { config = config.withValue("database.incompatibleDatabaseBehavior", ConfigValueFactory.fromAnyRef(behavior.toString().toLowerCase())); } SPO systemProperties = new SPO(config); systemProperties.setDataBaseDir(databaseDir); systemProperties.setDatabaseVersion(databaseVersion); return systemProperties; }
@Test public void helper_shouldPutVersion_afterDatabaseReset() throws IOException { Config config = ConfigFactory.empty() .withValue("database.reset", ConfigValueFactory.fromAnyRef(true)); SPO systemProperties = new SPO(config); systemProperties.setDataBaseDir(databaseDir); systemProperties.setDatabaseVersion(33); final File testFile = createFile(); assertTrue(testFile.exists()); resetHelper.process(systemProperties); assertEquals(new Integer(33), resetHelper.getDatabaseVersion(versionFile)); assertFalse(testFile.exists()); // reset should have cleared file }