@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);
SparkSessionManagerImpl.getInstance().shutdown(); } catch(Exception ex) { LOG.error("Spark session pool manager failed to stop during HiveServer2 shutdown.", ex);