@Override public void registerTable(EntityMetadata m, SparkClient sparkClient) { SparkContextJavaFunctions functions = CassandraJavaUtil.javaFunctions(sparkClient.sparkContext); Class clazz = m.getEntityClazz(); JavaRDD cassandraRowsRDD = functions.cassandraTable(m.getSchema(), m.getTableName(), CassandraJavaUtil.mapRowTo(clazz)); sparkClient.sqlContext.createDataFrame(cassandraRowsRDD, clazz).registerTempTable(m.getTableName()); }
@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { try { Seq s = scala.collection.JavaConversions.asScalaBuffer(listEntity).toList(); ClassTag tag = scala.reflect.ClassTag$.MODULE$.apply(m.getEntityClazz()); JavaRDD personRDD = sparkClient.sparkContext.parallelize(s, 1, tag).toJavaRDD(); CassandraJavaUtil.javaFunctions(personRDD) .writerBuilder(m.getSchema(), m.getTableName(), CassandraJavaUtil.mapToRow(m.getEntityClazz())) .saveToCassandra(); return true; } catch (Exception e) { throw new KunderaException("Cannot persist object(s)", e); } }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B, C, D, E> RowWriterFactory<Tuple5<A, B, C, D, E>> mapTupleToRow( Class<A> a, Class<B> b, Class<C> c, Class<D> d, Class<E> e ) { return mapToRow(Tuple5.class, tuple5ColumnMapper(a, b, c, d, e)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B> RowWriterFactory<Tuple2<A, B>> mapTupleToRow( Class<A> a, Class<B> b ) { return mapToRow(Tuple2.class, tuple2ColumnMapper(a, b)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A> RowWriterFactory<Tuple1<A>> mapTupleToRow( Class<A> a ) { return mapToRow(Tuple1.class, tuple1ColumnMapper(a)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B, C> RowWriterFactory<Tuple3<A, B, C>> mapTupleToRow( Class<A> a, Class<B> b, Class<C> c ) { return mapToRow(Tuple3.class, tuple3ColumnMapper(a, b, c)); }
/** * A static factory method to create a {@link SparkContextJavaFunctions} based on an existing {@link * SparkContext} instance. */ public static SparkContextJavaFunctions javaFunctions(SparkContext sparkContext) { return new SparkContextJavaFunctions(sparkContext); }
public static ColumnRef[] toSelectableColumnRefs(String... columnNames) { ColumnName[] refs = new ColumnName[columnNames.length]; for (int i = 0; i < columnNames.length; i++) { refs[i] = column(columnNames[i]); } return refs; }
/** * A static factory method to create a {@link StreamingContextJavaFunctions} based on an existing * {@link StreamingContext} instance. */ public static StreamingContextJavaFunctions javaFunctions(StreamingContext streamingContext) { return new StreamingContextJavaFunctions(streamingContext); }
/** * A static factory method to create a {@link DStreamJavaFunctions} based on an existing * {@link DStream} instance. */ public static <T> DStreamJavaFunctions<T> javaFunctions(DStream<T> dStream) { return new DStreamJavaFunctions<>(dStream); }
/** * A static factory method to create a {@link RDDJavaFunctions} based on an existing {@link RDD} * instance. */ public static <T> RDDJavaFunctions<T> javaFunctions(RDD<T> rdd) { return new RDDJavaFunctions<>(rdd); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B, C, D, E> RowWriterFactory<Tuple5<A, B, C, D, E>> mapTupleToRow( Class<A> a, Class<B> b, Class<C> c, Class<D> d, Class<E> e ) { return mapToRow(Tuple5.class, tuple5ColumnMapper(a, b, c, d, e)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B> RowWriterFactory<Tuple2<A, B>> mapTupleToRow( Class<A> a, Class<B> b ) { return mapToRow(Tuple2.class, tuple2ColumnMapper(a, b)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A> RowWriterFactory<Tuple1<A>> mapTupleToRow( Class<A> a ) { return mapToRow(Tuple1.class, tuple1ColumnMapper(a)); }
/** * A static factory method to create a {@link SparkContextJavaFunctions} based on an existing {@link * SparkContext} instance. */ public static SparkContextJavaFunctions javaFunctions(SparkContext sparkContext) { return new SparkContextJavaFunctions(sparkContext); }
public static ColumnRef[] toSelectableColumnRefs(String... columnNames) { ColumnName[] refs = new ColumnName[columnNames.length]; for (int i = 0; i < columnNames.length; i++) { refs[i] = column(columnNames[i]); } return refs; }
/** * A static factory method to create a {@link StreamingContextJavaFunctions} based on an existing * {@link StreamingContext} instance. */ public static StreamingContextJavaFunctions javaFunctions(StreamingContext streamingContext) { return new StreamingContextJavaFunctions(streamingContext); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B, C, D, E> RowWriterFactory<Tuple5<A, B, C, D, E>> mapTupleToRow( Class<A> a, Class<B> b, Class<C> c, Class<D> d, Class<E> e ) { return mapToRow(Tuple5.class, tuple5ColumnMapper(a, b, c, d, e)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B> RowWriterFactory<Tuple2<A, B>> mapTupleToRow( Class<A> a, Class<B> b ) { return mapToRow(Tuple2.class, tuple2ColumnMapper(a, b)); }
/** * Creates a RowWriterFactory instance for writing tuples with given parameter types. */ public static <A, B> RowWriterFactory<Tuple2<A, B>> mapTupleToRow( Class<A> a, Class<B> b ) { return mapToRow(Tuple2.class, tuple2ColumnMapper(a, b)); }