@Override /** * Gets the persistent object class */ public Class<T> getPersistentClass() { return getDataStore().getPersistentClass(); }
@Override /** * Gets the persistent object class */ public Class<T> getPersistentClass() { return getDataStore().getPersistentClass(); }
@Override public Class<T> getPersistentClass() { return getDataStore().getPersistentClass(); }
@Override public Class<T> getPersistentClass() { return getDataStore().getPersistentClass(); }
/** * Sets the output parameters for the conf that Spark will use * * @param job the job to set the properties for * @param dataStore the datastore as the output * @return a populated output {@link org.apache.hadoop.conf.Configuration} */ public <K, V extends Persistent> Configuration generateOutputConf(Job job, DataStore<K, V> dataStore) { return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), dataStore.getPersistentClass()); }
/** * Sets the output parameters for the conf that Spark will use * * @param job the job to set the properties for * @param dataStore the datastore as the output * @return a populated output {@link org.apache.hadoop.conf.Configuration} */ public <K, V extends Persistent> Configuration generateOutputConf(Job job, DataStore<K, V> dataStore) { return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), dataStore.getPersistentClass()); }
/** * Sets the output parameters for the job * * @param job the job to set the properties for * @param dataStore the datastore as the output * @param reuseObjects whether to reuse objects in serialization * @param <K> key class for the {@link org.apache.gora.store.DataStore} * @param <V> value class for the {@link org.apache.gora.store.DataStore} */ public static <K, V extends Persistent> void setOutput(Job job, DataStore<K,V> dataStore, boolean reuseObjects) { setOutput(job, dataStore.getClass(), dataStore.getKeyClass() , dataStore.getPersistentClass(), reuseObjects); }
/** * Sets the output parameters for the job * * @param job the job to set the properties for * @param dataStore the datastore as the output * @param reuseObjects whether to reuse objects in serialization * @param <K> key class for the {@link org.apache.gora.store.DataStore} * @param <V> value class for the {@link org.apache.gora.store.DataStore} */ public static <K, V extends Persistent> void setOutput(Job job, DataStore<K,V> dataStore, boolean reuseObjects) { setOutput(job, dataStore.getClass(), dataStore.getKeyClass() , dataStore.getPersistentClass(), reuseObjects); }
AvroSerializer(CassandraClient cassandraClient, DataStore<K, T> dataStore, CassandraMapping mapping) { super(cassandraClient, dataStore.getKeyClass(), dataStore.getPersistentClass(), mapping); if (PersistentBase.class.isAssignableFrom(dataStore.getPersistentClass())) { persistentSchema = ((PersistentBase) dataStore.getBeanFactory().getCachedPersistent()).getSchema(); } else { persistentSchema = null; } this.cassandraDataStore = dataStore; try { analyzePersistent(); } catch (Exception e) { throw new RuntimeException("Error occurred while analyzing the persistent class, :" + e.getMessage()); } }
/** * Creates a job and sets the output parameters for the conf that Spark will use * * @param dataStore the datastore as the output * @return a populated output {@link org.apache.hadoop.conf.Configuration} * @throws IOException if there is an error creating the configuration */ public <K, V extends Persistent> Configuration generateOutputConf(DataStore<K, V> dataStore) throws IOException { Configuration hadoopConf = new Configuration(); GoraMapReduceUtils.setIOSerializations(hadoopConf, true); Job job = Job.getInstance(hadoopConf); return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), dataStore.getPersistentClass()); }
/** * Creates a job and sets the output parameters for the conf that Spark will use * * @param dataStore the datastore as the output * @return a populated output {@link org.apache.hadoop.conf.Configuration} * @throws IOException if there is an error creating the configuration */ public <K, V extends Persistent> Configuration generateOutputConf(DataStore<K, V> dataStore) throws IOException { Configuration hadoopConf = new Configuration(); GoraMapReduceUtils.setIOSerializations(hadoopConf, true); Job job = Job.getInstance(hadoopConf); return generateOutputConf(job, dataStore.getClass(), dataStore.getKeyClass(), dataStore.getPersistentClass()); }
AvroSerializer(CassandraClient cassandraClient, DataStore<K, T> dataStore, CassandraMapping mapping) throws GoraException { super(cassandraClient, dataStore.getKeyClass(), dataStore.getPersistentClass(), mapping); if (PersistentBase.class.isAssignableFrom(dataStore.getPersistentClass())) { persistentSchema = ((PersistentBase) dataStore.getBeanFactory().getCachedPersistent()).getSchema(); } else { throw new GoraException("Unsupported persistent class, couldn't able to find the Avro schema."); } this.cassandraDataStore = dataStore; try { analyzePersistent(); } catch (Exception e) { throw new GoraException("Error occurred while analyzing the persistent class, :" + e.getMessage()); } }
/** * This method returns the Cassandra Serializer according the Cassandra serializer property. * * @param cc Cassandra Client * @param type Serialization type * @param dataStore Cassandra DataStore * @param mapping Cassandra Mapping * @param <K> key class * @param <T> persistent class * @return Serializer */ public static <K, T extends Persistent> CassandraSerializer getSerializer(CassandraClient cc, String type, final DataStore<K, T> dataStore, CassandraMapping mapping) { CassandraStore.SerializerType serType = type == null || type.isEmpty() ? CassandraStore.SerializerType.NATIVE : CassandraStore.SerializerType.valueOf(type.toUpperCase(Locale.ENGLISH)); CassandraSerializer serializer; switch (serType) { case AVRO: serializer = new AvroSerializer(cc, dataStore, mapping); break; case NATIVE: default: serializer = new NativeSerializer(cc, dataStore.getKeyClass(), dataStore.getPersistentClass(), mapping); } return serializer; }
/** * This method returns the Cassandra Serializer according the Cassandra serializer property. * * @param cc Cassandra Client * @param type Serialization type * @param dataStore Cassandra DataStore * @param mapping Cassandra Mapping * @param <K> key class * @param <T> persistent class * @return Serializer * @throws GoraException */ public static <K, T extends Persistent> CassandraSerializer getSerializer(CassandraClient cc, String type, final DataStore<K, T> dataStore, CassandraMapping mapping) throws GoraException { CassandraStore.SerializerType serType = type == null || type.isEmpty() ? CassandraStore.SerializerType.NATIVE : CassandraStore.SerializerType.valueOf(type.toUpperCase(Locale.ENGLISH)); CassandraSerializer serializer; switch (serType) { case AVRO: serializer = new AvroSerializer(cc, dataStore, mapping); break; case NATIVE: default: serializer = new NativeSerializer(cc, dataStore.getKeyClass(), dataStore.getPersistentClass(), mapping); } return serializer; }
public static <K, T extends Persistent> void testNewPersistent( DataStore<K,T> dataStore) throws Exception { T obj1 = dataStore.newPersistent(); T obj2 = dataStore.newPersistent(); assertEquals(dataStore.getPersistentClass(), obj1.getClass()); assertNotNull(obj1); assertNotNull(obj2); assertFalse( obj1 == obj2 ); }
c[0] = inStore.getPersistentClass(); sparkConf.registerKryoClasses(c);
@Test public void testGetClasses() throws GoraException { DataStore<?, ?> dataStore = DataStoreFactory.getDataStore( "org.apache.gora.mock.store.MockDataStore", String.class, MockPersistent.class, conf); assertNotNull(dataStore); assertEquals(String.class, dataStore.getKeyClass()); assertEquals(MockPersistent.class, dataStore.getPersistentClass()); }