private static Configuration getConfiguration(@NonNull final JsonHoodieIngestionCommandLineOptions cmd) { final Configuration conf = new Configuration(); try { final FileSystem fs = FSUtils.getFs(conf); final Path dataFeedConfFile = new Path(cmd.getConfFile()); log.info("Loading configuration from {}", dataFeedConfFile.toString()); conf.loadYamlStream(fs.open(dataFeedConfFile), Optional.absent()); } catch (IOException e) { final String errorMessage = String.format("Unable to find configuration for %s", cmd.getConfFile()); log.error(errorMessage); throw new JobRuntimeException(errorMessage, e); } return conf; }
@Test(expected = IllegalArgumentException.class) public void testConfigurationParseWithNonExistentScope() { new Configuration(scopeAwareConfigInputStream, Optional.of("non-existent-scope")); } }
@Test(expected = IllegalStateException.class) public void testConfigurationParseFailWithScopeButWithoutScopeOverrideKey() { final Configuration conf = new Configuration(new File(CONFIG_YAML), Optional.of("non-existent-scope")); }
public static KafkaSourceConfiguration getKafkaSourceConfiguration(@NotEmpty final String topicName, @NotEmpty final String brokerAddress, @NotEmpty final String startDate) { Preconditions.checkArgument(!Strings.isNullOrEmpty(topicName)); Preconditions.checkArgument(!Strings.isNullOrEmpty(brokerAddress)); final Configuration conf = new Configuration(); KafkaTestHelper.setMandatoryConf(conf, Arrays.asList(KAFKA_BROKER_LIST, KAFKA_TOPIC_NAME, KAFKA_CLUSTER_NAME, KAFKA_START_DATE), Arrays.asList(brokerAddress, topicName, TEST_KAFKA_CLUSTER_NAME, startDate)); return new KafkaSourceConfiguration(conf); }
@Test public void testSparkConfOverrideDoesNotFailWithoutAnySparkConfDefinitions() { final Configuration conf = new Configuration(new File(TestConfiguration.CONFIG_YAML), Optional.absent()); SparkUtil .getSparkConf("fooApp", Optional.absent(), Arrays.asList(), conf); } }
@Before public void setupTest() throws IOException { this.fileSystem = FSUtils.getFs(new Configuration()); final AtomicBoolean condition = new AtomicBoolean(true); final String metadataPath = new Path(HDFSTestConstants.BASE_METADATA_PATH, JOB_NAME).toString(); this.metadataManager = new HDFSMetadataManager(this.fileSystem, metadataPath, condition); }
private HDFSSchemaService getHdfsSchemaService() { final Configuration conf = new Configuration(); conf.setProperty(HDFSSchemaServiceConfiguration.PATH, "src/test/resources/schemas/schemasource"); return new HDFSSchemaService(conf); } }
@Test public void testGetDataCenter() throws IOException { final Configuration conf = new Configuration(); final String dcFile = FileHelperUtil.getResourcePath(getClass(), new Path("datacenter", "datacenter").toString()); final String dc = JobUtil.getDataCenterForJob(dcFile); Assert.assertEquals("test_dc", dc); } }
private void initService(final int numThreads, final int numJobDagThreads, final int numActionsThreads) { final Configuration conf = new Configuration(); conf.setProperty(ThreadPoolService.NUM_THREADS, String.valueOf(numThreads)); conf.setProperty(ThreadPoolService.JOB_DAG_THREADS, String.valueOf(numJobDagThreads)); conf.setProperty(ThreadPoolService.ACTIONS_THREADS, String.valueOf(numActionsThreads)); ThreadPoolService.init(conf); }
@Test public void testGroupIdDefault() { final Configuration conf = new Configuration(); KafkaTestHelper.setMandatoryConf(conf, Collections.singletonList(KAFKA_BROKER_LIST), Collections.singletonList(BROKER_LIST)); final KafkaConfiguration kafkaConf = new KafkaConfiguration(conf); Assert.assertEquals(DEFAULT_GROUP_ID, kafkaConf.getKafkaParams().get(GROUP_ID)); }
@Test public void testAutoCommitNotConfigurable() { final Configuration conf = new Configuration(); KafkaTestHelper.setMandatoryConf(conf, Arrays.asList(KAFKA_BROKER_LIST, KAFKA_PROPERTY_PREFIX + ENABLE_AUTO_COMMIT), Arrays.asList(BROKER_LIST, "bar")); final KafkaConfiguration kafkaConf = new KafkaConfiguration(conf); Assert.assertEquals(ENABLE_AUTO_COMMIT_VALUE, kafkaConf.getKafkaParams().get(ENABLE_AUTO_COMMIT)); }
@BeforeClass public static void setUp() { final Configuration conf = new Configuration(); conf.setProperty(ThreadPoolService.NUM_THREADS, Integer.toString(NUM_THREADS)); conf.setProperty(ThreadPoolService.JOB_DAG_THREADS, Integer.toString(NUM_JOB_DAG_THREADS)); try { ThreadPoolService.init(conf); } catch (JobRuntimeException e) { // thread pool service already initialized } }
@Before public void setupTest() throws IOException { config = new Configuration(); config.setProperty(MetadataConstants.JOBMANAGER_METADATA_STORAGE, "hdfs"); config.setProperty(MetadataConstants.JOBMANAGER_METADATA_HDFS_BASEPATH, HDFSTestConstants.JOBMANAGER_BASE_METADATA_BASEPATH); this.fileSystem = FSUtils.getFs(config); final AtomicBoolean condition = new AtomicBoolean(true); basePath = new Path(HDFSTestConstants.JOBMANAGER_BASE_METADATA_BASEPATH); this.tracker = Optional.of(new JobManagerMetadataTracker(config)); }
@Test public void testGroupIdConfigurable() { final String myGroupId = "mygroup"; final Configuration conf = new Configuration(); KafkaTestHelper.setMandatoryConf(conf, Arrays.asList(KAFKA_BROKER_LIST, KAFKA_GROUP_ID), Arrays.asList(BROKER_LIST, myGroupId)); final KafkaConfiguration kafkaConf = new KafkaConfiguration(conf); Assert.assertEquals(myGroupId, kafkaConf.getKafkaParams().get(GROUP_ID)); }
@Test public void testSparkConfOverride() { final Configuration conf = new Configuration( TestSparkUtil.class.getResourceAsStream("/configWithScopes.yaml"), Optional.of("incremental")); final SparkConf sparkConf = SparkUtil .getSparkConf("fooApp", Optional.absent(), Arrays.asList(), conf); Assert.assertEquals("4g", sparkConf.get("spark.executor.memory")); Assert.assertEquals("4g", sparkConf.get("spark.driver.memory")); Assert.assertEquals("100s", sparkConf.get("spark.network.timeout")); }
@Test(expected = MissingPropertyException.class) public void testMissingHiveDataPath() { final Configuration config = new Configuration(); config.setProperty(HiveSourceConfiguration.JOB_NAME, JOB_NAME); final HiveSourceConfiguration hiveConfig = new HiveSourceConfiguration(config); Assert.fail(); }
@BeforeClass public static void setupClass() { final Configuration conf = new Configuration(); conf.setProperty(ThreadPoolService.NUM_THREADS, "4"); conf.setProperty(ThreadPoolService.JOB_DAG_THREADS, "2"); ThreadPoolService.init(conf); }
public static HiveSourceConfiguration initializeConfig(final String jobName, final String dataPath, final String metadataPath) { final Configuration config = new Configuration(); config.setProperty(HiveSourceConfiguration.JOB_NAME, jobName); config.setProperty(HiveSourceConfiguration.HIVE_DATA_PATH, dataPath); config.setProperty(HiveSourceConfiguration.BASE_METADATA_PATH, metadataPath); return new HiveSourceConfiguration(config); } }
@Test(expected = JobRuntimeException.class) public void computeWorkUnitsNoSuchDirectory() { final Configuration conf = new Configuration(); conf.setProperty(FileSourceConfiguration.TYPE, "json"); conf.setProperty(FileSourceConfiguration.SCHEMA, "{}"); conf.setProperty(FileSourceConfiguration.DIRECTORY, "path/not/exist"); final FileWorkUnitCalculator workUnitCalculator = new FileWorkUnitCalculator(new FileSourceConfiguration(conf)); final FileWorkUnitCalculator.FileWorkUnitCalculatorResult result = workUnitCalculator.computeWorkUnits(); }
@Before public void setupTest() throws IOException { this.config = new Configuration(); this.fileSystem = FSUtils.getFs(this.config); this.dataPath = FileTestUtil.getTempFolder(); this.metadataPath = FileTestUtil.getTempFolder(); this.config.setProperty(HiveSourceConfiguration.JOB_NAME, JOB_NAME); this.config.setProperty(HiveSourceConfiguration.BASE_METADATA_PATH, this.metadataPath); this.config.setProperty(HiveSourceConfiguration.HIVE_DATA_PATH, this.dataPath); this.config.setProperty(HiveSourceConfiguration.PARTITION_TYPE, PartitionType.NORMAL.toString()); this.hiveConfig = new HiveSourceConfiguration(this.config); }