public static GeoWaveRDD loadRDD(final SparkContext sc, final DataStorePluginOptions storeOptions) throws IOException { final RDDOptions defaultOptions = new RDDOptions(); return GeoWaveRDDLoader.loadRDD(sc, storeOptions, defaultOptions); }
private void loadStoresAndViews() throws IOException { final Collection<InputStoreInfo> addStores = inputStores.values(); for (final InputStoreInfo storeInfo : addStores) { final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(storeInfo.typeName).build()); storeInfo.rdd = GeoWaveRDDLoader.loadRDD(session.sparkContext(), storeInfo.storeOptions, rddOpts); // Create a DataFrame from the Left RDD final SimpleFeatureDataFrame dataFrame = new SimpleFeatureDataFrame(session); if (!dataFrame.init(storeInfo.storeOptions, storeInfo.typeName)) { LOGGER.error("Failed to initialize dataframe"); return; } LOGGER.debug(dataFrame.getSchema().json()); final Dataset<Row> dfTemp = dataFrame.getDataFrame(storeInfo.rdd); dfTemp.createOrReplaceTempView(storeInfo.viewName); } }
sparkConf.setMaster("local"); final JavaSparkContext context = new JavaSparkContext(sparkConf); final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().constraints(query).build()); rddOpts.setMinSplits(minSplits);
final RDDOptions rddOpts = new RDDOptions(); rddOpts.setQuery(QueryBuilder.newBuilder().addTypeName(adapterTypeName).build()); rddOpts.setMinSplits(partCount);
bldr.indexName(indexName); RDDOptions rddOptions = new RDDOptions(); rddOptions.setQuery(bldr.constraints(bldr.constraintsFactory().cqlConstraints(cqlStr)).build()); try {
final RDDOptions kmeansOpts = new RDDOptions(); kmeansOpts.setMinSplits(minSplits); kmeansOpts.setMaxSplits(maxSplits);
final RDDOptions queryOpts = new RDDOptions(); queryOpts.setQuery(QueryBuilder.newBuilder().constraints(query).build()); queryOpts.setMinSplits(DEFAULT_SPLITS_FOR_COUNT);