@Override public boolean persist(List listEntity, EntityMetadata m, SparkClient sparkClient) { throw new KunderaException("Entity persistence in teradata is currently not supported. "); }
public void mergeCounter() { EntityManager em = emf.createEntityManager(); Counters counters = new Counters(); counters = em.find(Counters.class, id1); Assert.assertNotNull(counters); Assert.assertNotNull(counters.getCounter()); try { em.merge(counters); } catch (KunderaException ke) { Assert.assertEquals("java.lang.UnsupportedOperationException: Merge is not permitted on counter column! ", ke.getMessage()); } finally { em.close(); } }
@Test public void testCreateEMFWithNullPu() { KunderaPersistence persistence = new KunderaPersistence(); try { EntityManagerFactory emf = persistence.createEntityManagerFactory(null, null); Assert.fail("Should have gone to catch block!"); } catch (KunderaException kex) { Assert.assertEquals("Persistence unit name should not be null", kex.getMessage()); } }
@Override public void saveDataFrame(DataFrame dataFrame, Class<?> entityClazz, Map<String, Object> properties) { throw new KunderaException("Dataframe persistence in teradata is currently not supported. "); }
@Test public void test() throws NoSuchFieldException, SecurityException { try { Class clazz = ReflectUtils.classForName("Person", this.getClass().getClassLoader()); }catch(KunderaException cfex) { Assert.assertNotNull(cfex.getMessage()); } Class clazz = ReflectUtils.classForName("com.impetus.kundera.query.Person", this.getClass().getClassLoader()); Assert.assertNotNull(clazz); Assert.assertEquals(Person.class, clazz); Field field = Person.class.getDeclaredField("personName"); Assert.assertFalse(ReflectUtils.isTransientOrStatic(field)); Assert.assertFalse(ReflectUtils.hasInterface(Client.class, Person.class)); Assert.assertTrue(ReflectUtils.hasInterface(Client.class, Client.class)); Assert.assertTrue(ReflectUtils.hasInterface(Client.class,CoreTestClient.class)); Assert.assertTrue(ReflectUtils.hasSuperClass(ClientBase.class, CoreTestClient.class)); Assert.assertTrue(ReflectUtils.hasSuperClass(ClientBase.class, ClientBase.class)); Assert.assertFalse(ReflectUtils.hasSuperClass(ClientBase.class, Client.class)); Assert.assertEquals(ClientBase.class,ReflectUtils.stripEnhancerClass(ClientBase.class)); }
@Override public void saveDataFrame(DataFrame dataFrame, Class<?> entityClazz, Map<String, Object> properties) { throw new KunderaException("Saving data of DataFrame back to MongoDB is currently not supported. "); }
private void mergeCounter() { EntityManager em = emf.createEntityManager(); SuperCounters counter = new SuperCounters(); counter = em.find(SuperCounters.class, id1); Assert.assertNotNull(counter); Assert.assertNotNull(counter.getCounter()); try { em.merge(counter); } catch (KunderaException ke) { Assert.assertEquals("java.lang.UnsupportedOperationException: Merge is not permitted on counter column! ", ke.getMessage()); } finally { em.close(); } }
@Override public Map<String, Object> search(KunderaMetadata kunderaMetadata, KunderaQuery kunderaQuery, PersistenceDelegator persistenceDelegator, EntityMetadata m, int firstResult, int maxResults) { throw new KunderaException("Unsupported Method"); } }
@Test public void testEntityOperations() { try { MappedPerson p = new MappedPerson(); p.setId("dd"); p.setFirstName("mapped"); p.setLastName("superclass"); em.persist(p); Assert.fail("Should have gone to catch block!"); } catch (KunderaException kex) { Assert.assertNotNull(kex.getMessage()); } }
@Override public Map<String, Object> search(KunderaMetadata kunderaMetadata, KunderaQuery kunderaQuery, PersistenceDelegator persistenceDelegator, EntityMetadata m, int firstResult, int maxResults) { throw new KunderaException("Unsupported Method"); } }
@Override protected List recursivelyPopulateEntities(EntityMetadata m, Client client) { throw new KunderaException("Query on entities having relations is currently not supported in kundera-spark."); }
public List executeQuery(Class clazz, List<String> relationalField, boolean isNative, String cqlQuery, final List<KunderaQuery.BindParameter> parameters) { throw new KunderaException("not implemented"); }
/** * Execute with bind parameters * * @param <T> * the generic type * @param query * the query * @param connection * the connection * @return the t */ public <T> T execute(final String query, final Object connection, final List<KunderaQuery.BindParameter> parameters) { throw new KunderaException("not implemented"); }
/** * Builds the index name. * * @param bucketName * the bucket name * @return the string */ private String buildIndexName(String bucketName) { if (bucketName == null) { throw new KunderaException("Bucket Name can't be null!"); } return (bucketName + CouchbaseConstants.INDEX_SUFFIX).toLowerCase(); }
@Override public String getInputFilePath(Map<String, Object> properties) { String path = (String) properties.get(SparkPropertiesConstants.HDFS_INPUT_FILE_PATH); if (path == null || path.isEmpty()) { throw new KunderaException( "Please set the path of inputfile while creating EntityManager using the property" + "\"" + SparkPropertiesConstants.HDFS_INPUT_FILE_PATH + "\"."); } return path; }
@Override public String getOutputFilePath(Map<String, Object> properties) { String path = (String) properties.get(SparkPropertiesConstants.HDFS_OUTPUT_FILE_PATH); if (path == null || path.isEmpty()) { throw new KunderaException( "Please set the path of outputfile while creating EntityManager using the property" + "\"" + SparkPropertiesConstants.HDFS_OUTPUT_FILE_PATH + "\"."); } return path; }
/** * Gets the valid Id type. * * @param type * the type * @return the valid Id type */ public static String getValidIdType(String type) { if(validationClassMapperforId.get(type) == null){ throw new KunderaException("ID of type: "+type+" is not supported for Kundera Oracle NOSQL."); } return validationClassMapperforId.get(type); } }
/** * Gets the output file path. * * @param properties * the properties * @return the output file path */ @Override public String getOutputFilePath(Map<String, Object> properties) { String path = (String) properties.get(SparkPropertiesConstants.FS_OUTPUT_FILE_PATH); if (path == null || path.isEmpty()) { throw new KunderaException( "Please set the path of outputfile while creating EntityManager using the property" + "\"" + SparkPropertiesConstants.FS_OUTPUT_FILE_PATH + "\"."); } return path; }
/** * Gets the input file path. * * @param properties * the properties * @return the input file path */ @Override public String getInputFilePath(Map<String, Object> properties) { String path = (String) properties.get(SparkPropertiesConstants.FS_INPUT_FILE_PATH); if (path == null || path.isEmpty()) { throw new KunderaException( "Please set the path of inputfile while creating EntityManager using the property" + "\"" + SparkPropertiesConstants.FS_INPUT_FILE_PATH + "\"."); } return path; }
private void onPostEvent(EntityMetadata metadata, EntityEvent event) { try { this.eventDispatcher.fireEventListeners(metadata, this.data, EntityEvent.getPostEvent(event)); } catch (Exception es) { throw new KunderaException(es); } }