@Test public void testConfigsForInitialization() { //Test to make sure that configs listed in RpcConfiguration.HIVE_SPARK_RSC_CONFIGS which are passed // through HiveConf are included in the Spark configuration. HiveConf hiveConf = getHiveConf(); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS, "test-rpc-server-address"); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); assertEquals("49152-49222,49223,49224-49333", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname)); assertEquals("test-rpc-server-address", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname)); }
HiveConf hiveConf = getHiveConf();
@Test public void testGetHiveException() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); SparkSessionImpl ss = (SparkSessionImpl) ssm.getSession(
/** Tests CLI scenario where we get a single session and use it multiple times. */ @Test public void testSingleSessionMultipleUse() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSession sparkSession1 = sessionManager.getSession(null, conf, true); assertTrue(sparkSession1.isOpen()); SparkSession sparkSession2 = sessionManager.getSession(sparkSession1, conf, true); assertTrue(sparkSession1 == sparkSession2); // Same session object is expected. assertTrue(sparkSession2.isOpen()); sessionManager.shutdown(); sessionManager.closeSession(sparkSession1); }
/** * Test HIVE-16395 - by default we force cloning of Configurations for Spark jobs */ @Test public void testForceConfCloning() throws Exception { HiveConf conf = getHiveConf(); String sparkCloneConfiguration = HiveSparkClientFactory.SPARK_CLONE_CONFIGURATION; // Clear the value of sparkCloneConfiguration conf.unset(sparkCloneConfiguration); assertNull( "Could not clear " + sparkCloneConfiguration + " in HiveConf", conf.get(sparkCloneConfiguration)); // By default we should set sparkCloneConfiguration to true in the Spark config checkSparkConf(conf, sparkCloneConfiguration, "true"); // User can override value for sparkCloneConfiguration in Hive config to false conf.set(sparkCloneConfiguration, "false"); checkSparkConf(conf, sparkCloneConfiguration, "false"); // User can override value of sparkCloneConfiguration in Hive config to true conf.set(sparkCloneConfiguration, "true"); checkSparkConf(conf, sparkCloneConfiguration, "true"); }
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {