@Test public void testGetHiveException() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); SparkSessionImpl ss = (SparkSessionImpl) ssm.getSession( checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_TIMEOUT); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_INTERRUPTED); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_TIMEOUT); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_INVALID_QUEUE, "submitted by user hive to unknown queue: foo"); + "Queue root.foo is STOPPED. Cannot accept submission of application: " + "application_1508358311878_3369187"); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_INVALID_QUEUE, "Queue root.foo is STOPPED"); + "Queue root.foo already has 10 applications, cannot accept submission of application: " + "application_1508358311878_3384544"); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_QUEUE_FULL, "Queue root.foo already has 10 applications"); + "cluster! Please check the values of 'yarn.scheduler.maximum-allocation-mb' and/or " + "'yarn.nodemanager.resource.memory-mb'."); checkHiveException(ss, e, ErrorMsg.SPARK_CREATE_CLIENT_INVALID_RESOURCE_REQUEST, "Required executor memory (7168+10240 MB) is above the max threshold (16384 MB)");
/** * Test HIVE-16395 - by default we force cloning of Configurations for Spark jobs */ @Test public void testForceConfCloning() throws Exception { HiveConf conf = getHiveConf(); String sparkCloneConfiguration = HiveSparkClientFactory.SPARK_CLONE_CONFIGURATION; // Clear the value of sparkCloneConfiguration conf.unset(sparkCloneConfiguration); assertNull( "Could not clear " + sparkCloneConfiguration + " in HiveConf", conf.get(sparkCloneConfiguration)); // By default we should set sparkCloneConfiguration to true in the Spark config checkSparkConf(conf, sparkCloneConfiguration, "true"); // User can override value for sparkCloneConfiguration in Hive config to false conf.set(sparkCloneConfiguration, "false"); checkSparkConf(conf, sparkCloneConfiguration, "false"); // User can override value of sparkCloneConfiguration in Hive config to true conf.set(sparkCloneConfiguration, "true"); checkSparkConf(conf, sparkCloneConfiguration, "true"); }
@Test public void testConfigsForInitialization() { //Test to make sure that configs listed in RpcConfiguration.HIVE_SPARK_RSC_CONFIGS which are passed // through HiveConf are included in the Spark configuration. HiveConf hiveConf = getHiveConf(); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS, "test-rpc-server-address"); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); assertEquals("49152-49222,49223,49224-49333", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname)); assertEquals("test-rpc-server-address", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname)); }
private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) { checkHiveException(ss, e, expectedErrMsg, null); }
HiveConf hiveConf = getHiveConf();
/** Tests CLI scenario where we get a single session and use it multiple times. */ @Test public void testSingleSessionMultipleUse() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSession sparkSession1 = sessionManager.getSession(null, conf, true); assertTrue(sparkSession1.isOpen()); SparkSession sparkSession2 = sessionManager.getSession(sparkSession1, conf, true); assertTrue(sparkSession1 == sparkSession2); // Same session object is expected. assertTrue(sparkSession2.isOpen()); sessionManager.shutdown(); sessionManager.closeSession(sparkSession1); }
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {