@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
public static synchronized SparkSessionManagerImpl getInstance() throws HiveException { if (instance == null) { instance = new SparkSessionManagerImpl(); } return instance; }
SparkSessionImpl(String sessionId) { this.sessionId = sessionId; initErrorPatterns(); }
@Test public void testServerPortAssignment() throws Exception { HiveConf conf = getHiveConf(); conf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); SparkSessionManagerImpl testSessionManager = SparkSessionManagerImpl.getInstance(); testSessionManager.setup(conf); assertTrue("Port should be within configured port range:" + SparkClientFactory.getServerPort(), SparkClientFactory.getServerPort() >= 49152 && SparkClientFactory.getServerPort() <= 49333); //Verify that new spark session can be created to ensure that new SparkSession // is successfully able to connect to the RpcServer with custom port. try { testSessionManager.getSession(null, conf, true); } catch (HiveException e) { Assert.fail("Failed test to connect to the RpcServer with custom port"); } testSessionManager.shutdown(); } private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) {
/** Tests CLI scenario where we get a single session and use it multiple times. */ @Test public void testSingleSessionMultipleUse() throws Exception { HiveConf conf = getHiveConf(); SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSession sparkSession1 = sessionManager.getSession(null, conf, true); assertTrue(sparkSession1.isOpen()); SparkSession sparkSession2 = sessionManager.getSession(sparkSession1, conf, true); assertTrue(sparkSession1 == sparkSession2); // Same session object is expected. assertTrue(sparkSession2.isOpen()); sessionManager.shutdown(); sessionManager.closeSession(sparkSession1); }
/** * If the <i>existingSession</i> can be reused return it. * Otherwise * - close it and remove it from the list. * - create a new session and add it to the list. */ @Override public SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException { setup(conf); if (existingSession != null) { // Open the session if it is closed. if (!existingSession.isOpen() && doOpen) { existingSession.open(conf); createdSessions.add(existingSession); } return existingSession; } SparkSession sparkSession = new SparkSessionImpl(SessionState.get().getNewSparkSessionId()); if (doOpen) { sparkSession.open(conf); } if (LOG.isDebugEnabled()) { LOG.debug(String.format("New session (%s) is created.", sparkSession.getSessionId())); } createdSessions.add(sparkSession); return sparkSession; }
/** * Force a Spark config to be generated and check that a config value has the expected value * @param conf the Hive config to use as a base * @param paramName the Spark config name to check * @param expectedValue the expected value in the Spark config */ private void checkSparkConf(HiveConf conf, String paramName, String expectedValue) throws HiveException { SparkSessionManager sessionManager = SparkSessionManagerImpl.getInstance(); SparkSessionImpl sparkSessionImpl = (SparkSessionImpl) sessionManager.getSession(null, conf, true); assertTrue(sparkSessionImpl.isOpen()); HiveSparkClient hiveSparkClient = sparkSessionImpl.getHiveSparkClient(); SparkConf sparkConf = hiveSparkClient.getSparkConf(); String cloneConfig = sparkConf.get(paramName); sessionManager.closeSession(sparkSessionImpl); assertEquals(expectedValue, cloneConfig); sessionManager.shutdown(); }
public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
@Test public void testGetSessionId() throws HiveException { SessionState ss = SessionState.start(SESSION_HIVE_CONF); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_1", ss.getSparkSession().getSessionId()); ss = SessionState.start(SESSION_HIVE_CONF); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); }
@Override public void closeSession(SparkSession sparkSession) throws HiveException { if (sparkSession == null) { return; } if (LOG.isDebugEnabled()) { LOG.debug(String.format("Closing Spark session (%s).", sparkSession.getSessionId())); } sparkSession.close(); createdSessions.remove(sparkSession); }
/** * Create scratch directory for spark session if it does not exist. * This method is not thread safe. * @return Path to Spark session scratch directory. * @throws IOException */ @Override public Path getHDFSSessionDir() throws IOException { if (scratchDir == null) { scratchDir = createScratchDir(); } return scratchDir; }
public SparkSessionImpl() { sessionId = makeSessionId(); }
@Override public void close() { isOpen = false; if (hiveSparkClient != null) { try { hiveSparkClient.close(); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close spark session (" + sessionId + ").", e); } } hiveSparkClient = null; }
private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg, String expectedMatchedStr) { HiveException he = ss.getHiveException(e); assertEquals(expectedErrMsg, he.getCanonicalErrorMsg()); if (expectedMatchedStr != null) { assertTrue(he.getMessage().contains(expectedMatchedStr)); } }
private void checkHiveException(SparkSessionImpl ss, Throwable e, ErrorMsg expectedErrMsg) { checkHiveException(ss, e, expectedErrMsg, null); }
public void closeSparkSession() { if (sparkSession != null) { try { SparkSessionManagerImpl.getInstance().closeSession(sparkSession); } catch (Exception ex) { LOG.error("Error closing spark session.", ex); } finally { sparkSession = null; } } }
@Override public void run() { try { if (instance != null) { instance.shutdown(); } } catch (Exception e) { // ignore } } });
public static synchronized SparkSessionManagerImpl getInstance() throws HiveException { if (instance == null) { instance = new SparkSessionManagerImpl(); } return instance; }
@Override public Path getHDFSSessionDir() throws IOException { if (scratchDir == null) { synchronized (dirLock) { if (scratchDir == null) { scratchDir = createScratchDir(); } } } return scratchDir; }