@Override protected boolean visitCollectionsBeforeSave( Object entity, Serializable id, Object[] values, Type[] types, EventSource source) { //TODO: we use two visitors here, inefficient! OnReplicateVisitor visitor = new OnReplicateVisitor( source, id, entity, false ); visitor.processEntityPropertyValues( values, types ); return super.visitCollectionsBeforeSave( entity, id, values, types, source ); }
private void justCascade(Map createCache, EventSource source, Object entity, EntityPersister persister) { //TODO: merge into one method! cascadeBeforeSave( source, persister, entity, createCache ); cascadeAfterSave( source, persister, entity, createCache ); }
@Override void processValue(int i, Object[] values, Type[] types) { Object result = processValue( values[i], types[i] ); if ( result != null ) { substitute = true; values[i] = result; } }
protected boolean visitCollectionsBeforeSave( Object entity, Serializable id, Object[] values, Type[] types, EventSource source) { WrapVisitor visitor = new WrapVisitor( source ); // substitutes into values by side-effect visitor.processEntityPropertyValues( values, types ); return visitor.isSubstitutionRequired(); }
private void saveTransientEntity( Object entity, String entityName, Serializable requestedId, EventSource source, Map copyCache) { //this bit is only *really* absolutely necessary for handling //requestedId, but is also good if we merge multiple object //graphs, since it helps ensure uniqueness if ( requestedId == null ) { saveWithGeneratedId( entity, entityName, copyCache, source, false ); } else { saveWithRequestedId( entity, requestedId, entityName, copyCache, source ); } }
protected void entityIsPersistent(MergeEvent event, Map copyCache) { LOG.trace( "Ignoring persistent instance" ); //TODO: check that entry.getIdentifier().equals(requestedId) final Object entity = event.getEntity(); final EventSource source = event.getSession(); final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); ( (MergeContext) copyCache ).put( entity, entity, true ); //before cascade! cascadeOnMerge( source, persister, entity, copyCache ); copyValues( persister, entity, entity, source, copyCache ); event.setResult( entity ); }
/** * Dispatch each property value to processValue(). * * @param values * @param types * @throws HibernateException */ void processValues(Object[] values, Type[] types) throws HibernateException { for ( int i=0; i<types.length; i++ ) { if ( includeProperty(values, i) ) { processValue( i, values, types ); } } }
/** * Dispatch each property value to processValue(). * * @param values * @param types * @throws HibernateException */ public void processEntityPropertyValues(Object[] values, Type[] types) throws HibernateException { for ( int i=0; i<types.length; i++ ) { if ( includeEntityProperty(values, i) ) { processValue( i, values, types ); } } }
protected Serializable saveWithGeneratedOrRequestedId(SaveOrUpdateEvent event) { if ( event.getRequestedId() == null ) { return super.saveWithGeneratedOrRequestedId(event); } else { return saveWithRequestedId( event.getEntity(), event.getRequestedId(), event.getEntityName(), null, event.getSession() ); } }
/** * {@inheritDoc} */ @Override Object processComponent(Object component, CompositeType componentType) throws HibernateException { Type[] types = componentType.getSubtypes(); if ( component == null ) { processValues( new Object[types.length], types ); } else { super.processComponent( component, componentType ); } return null; }
@Override Object processCollection(Object collection, CollectionType type) throws HibernateException { if (collection != null) { evictCollection(collection, type); } return null; }
boolean includeEntityProperty(Object[] values, int i) { return propertyVersionability[i] && super.includeEntityProperty( values, i ); } }
void processValue(int i, Object[] values, Type[] types) { processValue( values[i], types[i] ); }
boolean includeEntityProperty(Object[] values, int i) { return includeProperty(values, i); }
@Override public void onResolveNaturalId(ResolveNaturalIdEvent event) throws HibernateException { final Serializable entityId = resolveNaturalId( event ); event.setEntityId( entityId ); }
/** * Handle the given delete event. * * @param event The delete event to be handled. * * @throws HibernateException */ public void onDelete(DeleteEvent event) throws HibernateException { onDelete( event, new IdentitySet() ); }
@Override protected Map getMergeMap(Object anything) { return ( (MergeContext) anything ).invertMap(); }
/** * Called when we have recognized an attempt to delete a detached entity. * <p/> * This is perfectly valid in Hibernate usage; JPA, however, forbids this. * Thus, this is a hook for HEM to affect this behavior. * * @param event The event. */ protected void performDetachedEntityDeletionCheck(DeleteEvent event) { if ( jpaBootstrap ) { disallowDeletionOfDetached( event ); } // ok in normal Hibernate usage to delete a detached entity; JPA however // forbids it, thus this is a hook for HEM to affect this behavior }
@Override public void onPostUpdate(PostUpdateEvent event) { Object entity = event.getEntity(); EventSource eventSource = event.getSession(); handlePostUpdate(entity, eventSource); }
public void onInitializeCollection(InitializeCollectionEvent event) { super.onInitializeCollection( event ); addEvent( event, this ); } public void addEvent(AbstractCollectionEvent event, Listener listener) {