@Override public SparkSession getOrCreate() { if (!options.containsKey("spark.master")) { // default spark_master option. return this.master("local[*]").getOrCreate(); } if (!options.containsKey("spark.driver.allowMultipleContexts")) { return this.config("spark.driver.allowMultipleContexts", "true").getOrCreate(); } UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("ubuntu")); // Set up spark context with given options. final SparkConf sparkConf = new SparkConf(); if (!options.containsKey("spark.app.name")) { try { // get and override configurations from JobLauncher. final Configuration configurations = JobLauncher.getBuiltJobConf(); final Injector injector = Tang.Factory.getTang().newInjector(configurations); options.put("spark.app.name", injector.getNamedInstance(JobConf.JobId.class)); } catch (final InjectionException e) { throw new RuntimeException(e); } } options.forEach(sparkConf::set); final SparkContext sparkContext = new org.apache.nemo.compiler.frontend.spark.core.SparkContext(sparkConf); super.sparkContext(sparkContext); return SparkSession.from(super.getOrCreate(), this.options); } }
@Bean public SparkSession sparkSession() { return SparkSession .builder() .sparkContext(javaSparkContext().sc()) .appName("Java Spark SQL basic example") .getOrCreate(); }
public static SparkSession getSparkSession(@NonNull final SparkConf sparkConf) { final SparkContext sc = SparkTestUtil.getSparkContext(sparkConf).sc(); final SparkSession session = org.apache.spark.sql.SparkSession.builder() .master("local[*]") .sparkContext(sc) .getOrCreate(); return session; } }