public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf, String sparkSessionId, String hiveSessionId) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf, hiveSessionId); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf), hiveconf); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf, sparkSessionId); } }
@Override public void open(HiveConf conf) throws HiveException { this.conf = conf; isOpen = true; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf); } catch (Throwable e) { throw new HiveException("Failed to create spark client.", e); } }
@Override public void setup(HiveConf hiveConf) throws HiveException { if (!inited) { synchronized (this) { if (!inited) { LOG.info("Setting up the session manager."); Map<String, String> conf = HiveSparkClientFactory.initiateSparkConf(hiveConf); try { SparkClientFactory.initialize(conf); inited = true; } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } } } }
RemoteHiveSparkClient(HiveConf hiveConf, Map<String, String> conf, String sessionId) throws Exception { this.hiveConf = hiveConf; sparkClientTimtout = hiveConf.getTimeVar(HiveConf.ConfVars.SPARK_CLIENT_FUTURE_TIMEOUT, TimeUnit.SECONDS); sparkConf = HiveSparkClientFactory.generateSparkConf(conf); this.conf = conf; this.sessionId = sessionId; createRemoteClient(); }
addCredentialProviderPassword(sparkConf, password);
@Override public void setup(HiveConf hiveConf) throws HiveException { if (!inited) { synchronized (this) { if (!inited) { LOG.info("Setting up the session manager."); conf = hiveConf; startTimeoutThread(); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); try { SparkClientFactory.initialize(sparkConf, hiveConf); inited = true; } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } } } }
RemoteHiveSparkClient(HiveConf hiveConf, Map<String, String> conf) throws Exception { this.hiveConf = hiveConf; sparkClientTimtout = hiveConf.getTimeVar(HiveConf.ConfVars.SPARK_CLIENT_FUTURE_TIMEOUT, TimeUnit.SECONDS); sparkConf = HiveSparkClientFactory.generateSparkConf(conf); this.conf = conf; createRemoteClient(); }
addCredentialProviderPassword(sparkConf, password);
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }
@Test public void testConfigsForInitialization() { //Test to make sure that configs listed in RpcConfiguration.HIVE_SPARK_RSC_CONFIGS which are passed // through HiveConf are included in the Spark configuration. HiveConf hiveConf = getHiveConf(); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS, "test-rpc-server-address"); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); assertEquals("49152-49222,49223,49224-49333", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname)); assertEquals("test-rpc-server-address", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname)); }
RemoteHiveSparkClient(HiveConf hiveConf, Map<String, String> conf) throws IOException, SparkException { this.hiveConf = hiveConf; sparkConf = HiveSparkClientFactory.generateSparkConf(conf); remoteClient = SparkClientFactory.createClient(conf, hiveConf); sparkClientTimtout = hiveConf.getTimeVar(HiveConf.ConfVars.SPARK_CLIENT_FUTURE_TIMEOUT, TimeUnit.SECONDS); }
@Override public void open(HiveConf conf) throws HiveException { closeLock.writeLock().lock(); try { if (!isOpen) { LOG.info("Trying to open Hive on Spark session {}", sessionId); this.conf = conf; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf, sessionId, SessionState.get().getSessionId()); isOpen = true; } catch (Throwable e) { throw getHiveException(e); } LOG.info("Hive on Spark session {} successfully opened", sessionId); } else { LOG.info("Hive on Spark session {} is already opened", sessionId); } } finally { closeLock.writeLock().unlock(); } }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws IOException, SparkException { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }
@Override public void setup(HiveConf hiveConf) throws HiveException { if (inited.compareAndSet(false, true)) { LOG.info("Setting up the session manager."); createdSessions = Collections.synchronizedSet(new HashSet<SparkSession>()); Map<String, String> conf = HiveSparkClientFactory.initiateSparkConf(hiveConf); try { SparkClientFactory.initialize(conf); } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } }
@Override public void open(HiveConf conf) throws HiveException { this.conf = conf; isOpen = true; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf); } catch (Exception e) { throw new HiveException("Failed to create spark client.", e); } }