final String outputPath = args[1]; SparkSession spark = SparkSession.builder().appName("minmax").getOrCreate();
final String outputPath = args[1]; SparkSession spark = SparkSession.builder().appName("minmax").getOrCreate();
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
public AgePredicterLocal(String pathToClassifyModel, String pathToRegressionModel) throws InvalidFormatException, IOException{ spark = SparkSession.builder().master("local").appName("AgePredict").getOrCreate(); classifyModel = new AgeClassifyModel(new File(pathToClassifyModel)); classify = new AgeClassifyME(classifyModel); model = AgePredictModel.readModel(new File(pathToRegressionModel)); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[2]") .appName(getClass().getSimpleName()) .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
.appName("knn2") .getOrCreate();
.appName("knn") .getOrCreate();
.appName("knn") .getOrCreate();
SparkSession spark = SparkSession .builder() .appName("SparkSQLRelativeFrequency") .config(sparkConf) .getOrCreate();
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
public static SparkSession getSparkSession() { return SparkSession.builder().appName(APP_NAME).master(NUM_EXECUTORS) .config(UI_SHOW_CONSOLE_PROGRESS, false).getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }