public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf, String sparkSessionId, String hiveSessionId) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf, hiveSessionId); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf), hiveconf); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf, sparkSessionId); } }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }
@Override public void setup(HiveConf hiveConf) throws HiveException { if (!inited) { synchronized (this) { if (!inited) { LOG.info("Setting up the session manager."); Map<String, String> conf = HiveSparkClientFactory.initiateSparkConf(hiveConf); try { SparkClientFactory.initialize(conf); inited = true; } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } } } }
@Override public void setup(HiveConf hiveConf) throws HiveException { if (!inited) { synchronized (this) { if (!inited) { LOG.info("Setting up the session manager."); conf = hiveConf; startTimeoutThread(); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); try { SparkClientFactory.initialize(sparkConf, hiveConf); inited = true; } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } } } }
@Test public void testConfigsForInitialization() { //Test to make sure that configs listed in RpcConfiguration.HIVE_SPARK_RSC_CONFIGS which are passed // through HiveConf are included in the Spark configuration. HiveConf hiveConf = getHiveConf(); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT, "49152-49222,49223,49224-49333"); hiveConf.setVar(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS, "test-rpc-server-address"); Map<String, String> sparkConf = HiveSparkClientFactory.initiateSparkConf(hiveConf, null); assertEquals("49152-49222,49223,49224-49333", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_PORT.varname)); assertEquals("test-rpc-server-address", sparkConf.get(HiveConf.ConfVars.SPARK_RPC_SERVER_ADDRESS.varname)); }
@Override public void setup(HiveConf hiveConf) throws HiveException { if (inited.compareAndSet(false, true)) { LOG.info("Setting up the session manager."); createdSessions = Collections.synchronizedSet(new HashSet<SparkSession>()); Map<String, String> conf = HiveSparkClientFactory.initiateSparkConf(hiveConf); try { SparkClientFactory.initialize(conf); } catch (IOException e) { throw new HiveException("Error initializing SparkClientFactory", e); } } }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws IOException, SparkException { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }