public boolean isLoaded() { return (getRawRDD() != null); } }
/** * Translate a set of objects in a JavaRDD to SimpleFeatures and push to GeoWave * * @throws IOException */ public static void writeRDDToGeoWave( SparkContext sc, Index index, DataStorePluginOptions outputStoreOptions, FeatureDataAdapter adapter, GeoWaveRDD inputRDD) throws IOException { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return; } writeToGeoWave(sc, index, outputStoreOptions, adapter, inputRDD.getRawRDD().values()); }
public static void writeRDDToGeoWave( SparkContext sc, Index[] indices, DataStorePluginOptions outputStoreOptions, FeatureDataAdapter adapter, GeoWaveRDD inputRDD) throws IOException { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return; } for (int iStrategy = 0; iStrategy < indices.length; iStrategy += 1) { writeToGeoWave( sc, indices[iStrategy], outputStoreOptions, adapter, inputRDD.getRawRDD().values()); } }
public static JavaRDD<Point> rddFeatureCentroids(GeoWaveRDD inputRDD) { if (!inputRDD.isLoaded()) { LOGGER.error("Must provide a loaded RDD."); return null; } JavaRDD<Point> centroids = inputRDD.getRawRDD().values().map(feature -> { Geometry geom = (Geometry) feature.getDefaultGeometry(); return geom.getCentroid(); }); return centroids; }
public Dataset<Row> getDataFrame(GeoWaveRDD pairRDD) { if (rowRDD == null) { SimpleFeatureMapper mapper = new SimpleFeatureMapper(schema); rowRDD = pairRDD.getRawRDD().values().map(mapper); } if (dataFrame == null) { dataFrame = sparkSession.createDataFrame(rowRDD, schema); } return dataFrame; }
geowaveRDD.getRawRDD().flatMapToPair( new PairFlatMapFunction<Tuple2<GeoWaveInputKey, SimpleFeature>, ByteArray, Tuple2<GeoWaveInputKey, SimpleFeature>>() {
return null; JavaRDD<Vector> vectorRDD = inputRDD.getRawRDD().values().map(feature -> { Point centroid = ((Geometry) feature.getDefaultGeometry()).getCentroid();
rddOpts.setMaxSplits(maxSplits); final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = GeoWaveRDDLoader.loadRDD(context.sc(), inputStoreOptions, rddOpts).getRawRDD();
session.sparkContext(), pluginOptions, rddOptions).getRawRDD().count(); } catch (IOException e) { LOGGER.warn("Unable to load RDD", e);
geowaveRDD.getRawRDD().filter( t -> ((t._2.getDefaultGeometry() != null) && !((Geometry) t._2.getDefaultGeometry()).getEnvelopeInternal().isNull())).flatMapToPair(
queryOpts.setMaxSplits(DEFAULT_SPLITS_FOR_COUNT); final GeoWaveRDD newRDD = GeoWaveRDDLoader.loadRDD(context, dataStore, queryOpts); final JavaPairRDD<GeoWaveInputKey, SimpleFeature> javaRdd = newRDD.getRawRDD(); final long count = getCount(javaRdd, dataStore.getType());
if (commonRightExist) { commonRightRDD = rightRDD.getGeoWaveRDD().getRawRDD().filter( t -> t._2.getDefaultGeometry() != null).mapValues( (Function<SimpleFeature, Geometry>) t -> { if (commonLeftExist) { commonLeftRDD = leftRDD.getGeoWaveRDD().getRawRDD().filter( t -> t._2.getDefaultGeometry() != null).mapValues( (Function<SimpleFeature, Geometry>) t -> { setLeftResults( new GeoWaveRDD( leftRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache())); setRightResults( new GeoWaveRDD( rightRDD.getGeoWaveRDD().getRawRDD().subtractByKey(combinedResults).cache())); } else { setLeftResults( new GeoWaveRDD( leftRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair( t -> new Tuple2<>(t._1(), t._2._1())).cache())); setRightResults( new GeoWaveRDD( rightRDD.getGeoWaveRDD().getRawRDD().join(combinedResults).mapToPair( t -> new Tuple2<>(t._1(), t._2._1())).cache()));