@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
@Before public void setUp() { SparkConf conf = new SparkConf() .set("spark.streaming.clock", "org.apache.spark.util.ManualClock"); spark = SparkSession.builder() .master("local[2]") .appName("JavaStatistics") .config(conf) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); ssc = new JavaStreamingContext(jsc, new Duration(1000)); ssc.checkpoint("checkpoint"); }
@Before public void setUp() { SparkConf conf = new SparkConf() .set("spark.streaming.clock", "org.apache.spark.util.ManualClock"); spark = SparkSession.builder() .master("local[2]") .appName("JavaStatistics") .config(conf) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); ssc = new JavaStreamingContext(jsc, new Duration(1000)); ssc.checkpoint("checkpoint"); }
private void start() { SparkSession spark = SparkSession.builder() .appName("Array to Dataset<String>") .master("local") .getOrCreate(); String[] l = new String[] { "a", "b", "c", "d" }; List<String> data = Arrays.asList(l); Dataset<String> df = spark.createDataset(data, Encoders.STRING()); df.show(); } }
@BeforeClass public static void startSpark() { TestDataFrameWrites.spark = SparkSession.builder().master("local[2]").getOrCreate(); TestDataFrameWrites.sc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
public AgePredicterLocal(String pathToClassifyModel, String pathToRegressionModel) throws InvalidFormatException, IOException{ spark = SparkSession.builder().master("local").appName("AgePredict").getOrCreate(); classifyModel = new AgeClassifyModel(new File(pathToClassifyModel)); classify = new AgeClassifyME(classifyModel); model = AgePredictModel.readModel(new File(pathToRegressionModel)); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
public static SparkSession getSparkSession() { return SparkSession.builder().appName(APP_NAME).master(NUM_EXECUTORS) .config(UI_SHOW_CONSOLE_PROGRESS, false).getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }