public static synchronized LocalHiveSparkClient getInstance(SparkConf sparkConf, HiveConf hiveConf) throws FileNotFoundException, MalformedURLException { if (client == null) { client = new LocalHiveSparkClient(sparkConf, hiveConf); } ++client.activeSessions; return client; }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf, String sparkSessionId, String hiveSessionId) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf, hiveSessionId); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf), hiveconf); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf, sparkSessionId); } }
addJars((new JobConf(this.getClass())).getJar()); addJars(conf.getAuxJars()); addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars()); addJars(addedJars); work.configureJobConf(jobConf); addJars(jobConf.get(MR_JAR_PROPERTY)); addResources(addedFiles); addResources(addedArchives);
Context ctx = driverContext.getCtx(); HiveConf hiveConf = (HiveConf) ctx.getConf(); refreshLocalResources(sparkWork, hiveConf); JobConf jobConf = new JobConf(hiveConf);
addJars((new JobConf(this.getClass())).getJar()); addJars(conf.getAuxJars()); addJars(SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars()); addJars(addedJars); work.configureJobConf(jobConf); addJars(conf.get(MR_JAR_PROPERTY)); addResources(addedFiles); addResources(addedArchives);
Context ctx = driverContext.getCtx(); HiveConf hiveConf = (HiveConf) ctx.getConf(); refreshLocalResources(sparkWork, hiveConf); JobConf jobConf = new JobConf(hiveConf);
addJars((new JobConf(this.getClass())).getJar()); addJars(HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS)); addJars(addedJars); work.configureJobConf(jobConf); addJars(conf.get(MR_JAR_PROPERTY)); addResources(addedFiles); addResources(addedArchives);
public static synchronized LocalHiveSparkClient getInstance(SparkConf sparkConf) { if (client == null) { client = new LocalHiveSparkClient(sparkConf); } return client; }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws Exception { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }
Context ctx = driverContext.getCtx(); HiveConf hiveConf = (HiveConf) ctx.getConf(); refreshLocalResources(sparkWork, hiveConf); JobConf jobConf = new JobConf(hiveConf);
public static synchronized LocalHiveSparkClient getInstance(SparkConf sparkConf) { if (client == null) { client = new LocalHiveSparkClient(sparkConf); } return client; }
public static HiveSparkClient createHiveSparkClient(HiveConf hiveconf) throws IOException, SparkException { Map<String, String> sparkConf = initiateSparkConf(hiveconf); // Submit spark job through local spark context while spark master is local mode, otherwise submit // spark job through remote spark context. String master = sparkConf.get("spark.master"); if (master.equals("local") || master.startsWith("local[")) { // With local spark context, all user sessions share the same spark context. return LocalHiveSparkClient.getInstance(generateSparkConf(sparkConf)); } else { return new RemoteHiveSparkClient(hiveconf, sparkConf); } }