/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); createdSessions.add(existingSession); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(SessionState.get().getNewSparkSessionId()); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
SparkSessionManagerImpl.getInstance().setup(hiveConf);
sessionManagerHS2.setup(hiveConf);
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {
SparkSessionManagerImpl.getInstance().setup(hiveConf);
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(); createdSessions.add(sparkSession); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } return sparkSession; }
SparkSessionManagerImpl.getInstance().setup(hiveConf);
SparkSessionManagerImpl.getInstance().setup(hiveConf);