javaFunctions(productsRDD).writerBuilder("java_api", "products", productWriter).saveToCassandra(); javaFunctions(salesRDD).writerBuilder("java_api", "sales", saleWriter).saveToCassandra();
/** * @deprecated this method will be removed in future release, please use {@link #writerBuilder(String, String, * com.datastax.spark.connector.writer.RowWriterFactory)} */ @Deprecated public void saveToCassandra(String keyspace, String table, RowWriterFactory<T> rowWriterFactory, ColumnSelector columnNames) { new WriterBuilder(keyspace, table, rowWriterFactory, columnNames, defaultConnector(), defaultWriteConf()).saveToCassandra(); }
.withConstantTTL(120)//keeping data for 2 minutes .saveToCassandra();
javaFunctions(summariesRDD).writerBuilder("java_api", "summaries", summaryWriter).saveToCassandra();
/** * Method to get total traffic counts of different type of vehicles for each route. * * @param filteredIotDataStream IoT data stream */ public void processTotalTrafficData(JavaDStream<IoTData> filteredIotDataStream) { // We need to get count of vehicle group by routeId and vehicleType JavaPairDStream<AggregateKey, Long> countDStreamPair = filteredIotDataStream .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L)) .reduceByKey((a, b) -> a + b); // Need to keep state for total count JavaMapWithStateDStream<AggregateKey, Long, Long, Tuple2<AggregateKey, Long>> countDStreamWithStatePair = countDStreamPair .mapWithState(StateSpec.function(totalSumFunc).timeout(Durations.seconds(3600)));//maintain state for one hour // Transform to dstream of TrafficData JavaDStream<Tuple2<AggregateKey, Long>> countDStream = countDStreamWithStatePair.map(tuple2 -> tuple2); JavaDStream<TotalTrafficData> trafficDStream = countDStream.map(totalTrafficDataFunc); // Map Cassandra table column Map<String, String> columnNameMappings = new HashMap<String, String>(); columnNameMappings.put("routeId", "routeid"); columnNameMappings.put("vehicleType", "vehicletype"); columnNameMappings.put("totalCount", "totalcount"); columnNameMappings.put("timeStamp", "timestamp"); columnNameMappings.put("recordDate", "recorddate"); // call CassandraStreamingJavaUtil function to save in DB javaFunctions(trafficDStream).writerBuilder("traffickeyspace", "total_traffic", CassandraJavaUtil.mapToRow(TotalTrafficData.class, columnNameMappings)).saveToCassandra(); }
return om.readValue(r.value(), Order.class); })) .writerBuilder("test", "orders", CassandraJavaUtil.mapToRow(Order.class)).saveToCassandra();
/** * Creates a writer builder object with specified parameters. * * <p>Writer builder is used to configure parameters write operation and eventually save the data to Cassandra. By * default the builder is configured to save all the columns with default connector and Spark Cassandra Connector * default parameters.</p> * * <p>To obtain an instance of {@link com.datastax.spark.connector.writer.RowWriterFactory} use one of utility * methods in {@link com.datastax.spark.connector.japi.CassandraJavaUtil}.</p> * * @param keyspaceName the target keyspace name * @param tableName the target table name * @param rowWriterFactory a row writer factory to be used to save objects from RDD or DStream * * @return an instance of {@link com.datastax.spark.connector.japi.RDDAndDStreamCommonJavaFunctions.WriterBuilder} * * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(com.datastax.spark.connector.mapper.ColumnMapper) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, java.util.Map) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, org.apache.commons.lang3.tuple.Pair[]) */ public WriterBuilder writerBuilder(String keyspaceName, String tableName, RowWriterFactory<T> rowWriterFactory) { return new WriterBuilder(keyspaceName, tableName, rowWriterFactory, allColumns, defaultConnector(), defaultWriteConf()); }
/** * Creates a writer builder object with specified parameters. * * <p>Writer builder is used to configure parameters write operation and eventually save the data to Cassandra. By * default the builder is configured to save all the columns with default connector and Spark Cassandra Connector * default parameters.</p> * * <p>To obtain an instance of {@link com.datastax.spark.connector.writer.RowWriterFactory} use one of utility * methods in {@link com.datastax.spark.connector.japi.CassandraJavaUtil}.</p> * * @param keyspaceName the target keyspace name * @param tableName the target table name * @param rowWriterFactory a row writer factory to be used to save objects from RDD or DStream * * @return an instance of {@link com.datastax.spark.connector.japi.RDDAndDStreamCommonJavaFunctions.WriterBuilder} * * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(com.datastax.spark.connector.mapper.ColumnMapper) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, java.util.Map) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, org.apache.commons.lang3.tuple.Pair[]) */ public WriterBuilder writerBuilder(String keyspaceName, String tableName, RowWriterFactory<T> rowWriterFactory) { return new WriterBuilder(keyspaceName, tableName, rowWriterFactory, allColumns, defaultConnector(), defaultWriteConf()); }
/** * Creates a writer builder object with specified parameters. * * <p>Writer builder is used to configure parameters write operation and eventually save the data to Cassandra. By * default the builder is configured to save all the columns with default connector and Spark Cassandra Connector * default parameters.</p> * * <p>To obtain an instance of {@link com.datastax.spark.connector.writer.RowWriterFactory} use one of utility * methods in {@link com.datastax.spark.connector.japi.CassandraJavaUtil}.</p> * * @param keyspaceName the target keyspace name * @param tableName the target table name * @param rowWriterFactory a row writer factory to be used to save objects from RDD or DStream * * @return an instance of {@link com.datastax.spark.connector.japi.RDDAndDStreamCommonJavaFunctions.WriterBuilder} * * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(com.datastax.spark.connector.mapper.ColumnMapper) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, java.util.Map) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, org.apache.commons.lang3.tuple.Pair[]) */ public WriterBuilder writerBuilder(String keyspaceName, String tableName, RowWriterFactory<T> rowWriterFactory) { return new WriterBuilder(keyspaceName, tableName, rowWriterFactory, allColumns, defaultConnector(), defaultWriteConf()); }
/** * Method to get window traffic counts of different type of vehicles for each route. * Window duration = 30 seconds and Slide interval = 10 seconds * * @param filteredIotDataStream IoT data stream */ public void processWindowTrafficData(JavaDStream<IoTData> filteredIotDataStream) { // reduce by key and window (30 sec window and 10 sec slide). JavaPairDStream<AggregateKey, Long> countDStreamPair = filteredIotDataStream .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L)) .reduceByKeyAndWindow((a, b) -> a + b, Durations.seconds(30), Durations.seconds(10)); // Transform to dstream of TrafficData JavaDStream<WindowTrafficData> trafficDStream = countDStreamPair.map(windowTrafficDataFunc); // Map Cassandra table column Map<String, String> columnNameMappings = new HashMap<String, String>(); columnNameMappings.put("routeId", "routeid"); columnNameMappings.put("vehicleType", "vehicletype"); columnNameMappings.put("totalCount", "totalcount"); columnNameMappings.put("timeStamp", "timestamp"); columnNameMappings.put("recordDate", "recorddate"); // call CassandraStreamingJavaUtil function to save in DB javaFunctions(trafficDStream).writerBuilder("traffickeyspace", "window_traffic", CassandraJavaUtil.mapToRow(WindowTrafficData.class, columnNameMappings)).saveToCassandra(); }
/** * Creates a writer builder object with specified parameters. * * <p>Writer builder is used to configure parameters write operation and eventually save the data to Cassandra. By * default the builder is configured to save all the columns with default connector and Spark Cassandra Connector * default parameters.</p> * * <p>To obtain an instance of {@link com.datastax.spark.connector.writer.RowWriterFactory} use one of utility * methods in {@link com.datastax.spark.connector.japi.CassandraJavaUtil}.</p> * * @param keyspaceName the target keyspace name * @param tableName the target table name * @param rowWriterFactory a row writer factory to be used to save objects from RDD or DStream * * @return an instance of {@link com.datastax.spark.connector.japi.RDDAndDStreamCommonJavaFunctions.WriterBuilder} * * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(com.datastax.spark.connector.mapper.ColumnMapper) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, java.util.Map) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, org.apache.commons.lang3.tuple.Pair[]) */ public WriterBuilder writerBuilder(String keyspaceName, String tableName, RowWriterFactory<T> rowWriterFactory) { return new WriterBuilder(keyspaceName, tableName, rowWriterFactory, allColumns, defaultConnector(), defaultWriteConf()); }
/** * Creates a writer builder object with specified parameters. * * <p>Writer builder is used to configure parameters write operation and eventually save the data to Cassandra. By * default the builder is configured to save all the columns with default connector and Spark Cassandra Connector * default parameters.</p> * * <p>To obtain an instance of {@link com.datastax.spark.connector.writer.RowWriterFactory} use one of utility * methods in {@link com.datastax.spark.connector.japi.CassandraJavaUtil}.</p> * * @param keyspaceName the target keyspace name * @param tableName the target table name * @param rowWriterFactory a row writer factory to be used to save objects from RDD or DStream * * @return an instance of {@link com.datastax.spark.connector.japi.RDDAndDStreamCommonJavaFunctions.WriterBuilder} * * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(com.datastax.spark.connector.mapper.ColumnMapper) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, java.util.Map) * @see com.datastax.spark.connector.japi.CassandraJavaUtil#mapToRow(Class, org.apache.commons.lang3.tuple.Pair[]) */ public WriterBuilder writerBuilder(String keyspaceName, String tableName, RowWriterFactory<T> rowWriterFactory) { return new WriterBuilder(keyspaceName, tableName, rowWriterFactory, allColumns, defaultConnector(), defaultWriteConf()); }
private void store(JavaSparkContext sc, List<Dependency> links) { CassandraDependencies dependencies = new CassandraDependencies(links, day); javaFunctions(sc.parallelize(Collections.singletonList(dependencies))) .writerBuilder(keyspace, "dependencies", mapToRow(CassandraDependencies.class)) .saveToCassandra(); }
/** * @deprecated this method will be removed in future release, please use {@link #writerBuilder(String, String, * com.datastax.spark.connector.writer.RowWriterFactory)} */ @Deprecated public void saveToCassandra(String keyspace, String table, RowWriterFactory<T> rowWriterFactory, ColumnSelector columnNames) { new WriterBuilder(keyspace, table, rowWriterFactory, columnNames, defaultConnector(), defaultWriteConf()).saveToCassandra(); }
/** * @deprecated this method will be removed in future release, please use {@link #writerBuilder(String, String, * com.datastax.spark.connector.writer.RowWriterFactory)} */ @Deprecated public void saveToCassandra(String keyspace, String table, RowWriterFactory<T> rowWriterFactory, ColumnSelector columnNames) { new WriterBuilder(keyspace, table, rowWriterFactory, columnNames, defaultConnector(), defaultWriteConf()).saveToCassandra(); }
/** * @deprecated this method will be removed in future release, please use {@link #writerBuilder(String, String, * com.datastax.spark.connector.writer.RowWriterFactory)} */ @Deprecated public void saveToCassandra(String keyspace, String table, RowWriterFactory<T> rowWriterFactory, ColumnSelector columnNames) { new WriterBuilder(keyspace, table, rowWriterFactory, columnNames, defaultConnector(), defaultWriteConf()).saveToCassandra(); }
/** * @deprecated this method will be removed in future release, please use {@link #writerBuilder(String, String, * com.datastax.spark.connector.writer.RowWriterFactory)} */ @Deprecated public void saveToCassandra(String keyspace, String table, RowWriterFactory<T> rowWriterFactory, ColumnSelector columnNames) { new WriterBuilder(keyspace, table, rowWriterFactory, columnNames, defaultConnector(), defaultWriteConf()).saveToCassandra(); }
@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { try { Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD(); CassandraJavaUtil.javaFunctions(personRDD) .writerBuilder(m.getSchema(), m.getTableName(), CassandraJavaUtil.mapToRow(m.getEntityClazz())) .saveToCassandra(); return true; } catch (Exception e) { throw new KunderaException("Cannot persist object(s)", e); } }