@Test public void testLoadAPI() { spark.read().format("org.apache.spark.sql.test").load(); spark.read().format("org.apache.spark.sql.test").load(input); spark.read().format("org.apache.spark.sql.test").load(input, input, input); spark.read().format("org.apache.spark.sql.test").load(new String[]{input, input}); }
@Test public void testLoadAPI() { spark.read().format("org.apache.spark.sql.test").load(); spark.read().format("org.apache.spark.sql.test").load(input); spark.read().format("org.apache.spark.sql.test").load(input, input, input); spark.read().format("org.apache.spark.sql.test").load(new String[]{input, input}); }
/** * This only tests whether API compiles, but does not run it as orc() * cannot be run without Hive classes. */ public void testOrcAPI() { spark.read().schema(schema).orc(); spark.read().schema(schema).orc(input); spark.read().schema(schema).orc(input, input, input); spark.read().schema(schema).orc(new String[]{input, input}) .write().orc(output); } }
/** * This only tests whether API compiles, but does not run it as orc() * cannot be run without Hive classes. */ public void testOrcAPI() { spark.read().schema(schema).orc(); spark.read().schema(schema).orc(input); spark.read().schema(schema).orc(input, input, input); spark.read().schema(schema).orc(new String[]{input, input}) .write().orc(output); } }
@Test public void saveAndLoad() { Map<String, String> options = new HashMap<>(); options.put("path", path.toString()); df.write().mode(SaveMode.ErrorIfExists).format("json").options(options).save(); Dataset<Row> loadedDF = spark.read().format("json").options(options).load(); checkAnswer(loadedDF, df.collectAsList()); }
@Test public void saveAndLoad() { Map<String, String> options = new HashMap<>(); options.put("path", path.toString()); df.write().mode(SaveMode.ErrorIfExists).format("json").options(options).save(); Dataset<Row> loadedDF = spark.read().format("json").options(options).load(); checkAnswer(loadedDF, df.collectAsList()); }
@Test public void saveAndLoad() { Map<String, String> options = new HashMap<>(); options.put("path", path.toString()); df.write().mode(SaveMode.ErrorIfExists).format("json").options(options).save(); Dataset<Row> loadedDF = spark.read().format("json").options(options).load(); checkAnswer(loadedDF, df.collectAsList()); }
@Test public void testFormatAPI() { spark .read() .format("org.apache.spark.sql.test") .load() .write() .format("org.apache.spark.sql.test") .save(); }
@Test public void testFormatAPI() { spark .read() .format("org.apache.spark.sql.test") .load() .write() .format("org.apache.spark.sql.test") .save(); }