FileDocCategorySizeDatePackage
AbstractFlushingEventListener.javaAPI DocHibernate 3.2.512330Wed Apr 11 09:25:14 BST 2007org.hibernate.event.def

AbstractFlushingEventListener

public abstract class AbstractFlushingEventListener extends Object implements Serializable
A convenience base class for listeners whose functionality results in flushing.
author
Steve Eberole

Fields Summary
private static final Log
log
Constructors Summary
Methods Summary
private voidcascadeOnFlush(org.hibernate.event.EventSource session, org.hibernate.persister.entity.EntityPersister persister, java.lang.Object object, java.lang.Object anything)

		session.getPersistenceContext().incrementCascadeLevel();
		try {
			new Cascade( getCascadingAction(), Cascade.BEFORE_FLUSH, session )
			.cascade( persister, object, anything );
		}
		finally {
			session.getPersistenceContext().decrementCascadeLevel();
		}
	
private voidflushCollections(org.hibernate.event.EventSource session)
process any unreferenced collections and then inspect all known collections, scheduling creates/removes/updates


		log.trace("Processing unreferenced collections");

		List list = IdentityMap.entries( session.getPersistenceContext().getCollectionEntries() );
		int size = list.size();
		for ( int i = 0; i < size; i++ ) {
			Map.Entry me = ( Map.Entry ) list.get( i );
			CollectionEntry ce = (CollectionEntry) me.getValue();
			if ( !ce.isReached() && !ce.isIgnore() ) {
				Collections.processUnreachableCollection( (PersistentCollection) me.getKey(), session );
			}
		}

		// Schedule updates to collections:

		log.trace( "Scheduling collection removes/(re)creates/updates" );

		list = IdentityMap.entries( session.getPersistenceContext().getCollectionEntries() );
		size = list.size();
		ActionQueue actionQueue = session.getActionQueue();
		for ( int i = 0; i < size; i++ ) {
			Map.Entry me = (Map.Entry) list.get(i);
			PersistentCollection coll = (PersistentCollection) me.getKey();
			CollectionEntry ce = (CollectionEntry) me.getValue();

			if ( ce.isDorecreate() ) {
				session.getInterceptor().onCollectionRecreate( coll, ce.getCurrentKey() );
				actionQueue.addAction(
						new CollectionRecreateAction( 
								coll, 
								ce.getCurrentPersister(), 
								ce.getCurrentKey(), 
								session 
							)
					);
			}
			if ( ce.isDoremove() ) {
				session.getInterceptor().onCollectionRemove( coll, ce.getLoadedKey() );
				actionQueue.addAction(
						new CollectionRemoveAction( 
								coll, 
								ce.getLoadedPersister(), 
								ce.getLoadedKey(), 
								ce.isSnapshotEmpty(coll), 
								session 
							)
					);
			}
			if ( ce.isDoupdate() ) {
				session.getInterceptor().onCollectionUpdate( coll, ce.getLoadedKey() );
				actionQueue.addAction(
						new CollectionUpdateAction( 
								coll, 
								ce.getLoadedPersister(), 
								ce.getLoadedKey(), 
								ce.isSnapshotEmpty(coll), 
								session 
							)
					);
			}

		}

		actionQueue.sortCollectionActions();
		
	
private voidflushEntities(org.hibernate.event.FlushEvent event)
1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable collections


		log.trace("Flushing entities and processing referenced collections");

		// Among other things, updateReachables() will recursively load all
		// collections that are moving roles. This might cause entities to
		// be loaded.

		// So this needs to be safe from concurrent modification problems.
		// It is safe because of how IdentityMap implements entrySet()

		final EventSource source = event.getSession();
		
		final Map.Entry[] list = IdentityMap.concurrentEntries( source.getPersistenceContext().getEntityEntries() );
		final int size = list.length;
		for ( int i = 0; i < size; i++ ) {

			// Update the status of the object and if necessary, schedule an update

			Map.Entry me = list[i];
			EntityEntry entry = (EntityEntry) me.getValue();
			Status status = entry.getStatus();

			if ( status != Status.LOADING && status != Status.GONE ) {
				FlushEntityEvent entityEvent = new FlushEntityEvent( source, me.getKey(), entry );
				FlushEntityEventListener[] listeners = source.getListeners().getFlushEntityEventListeners();
				for ( int j = 0; j < listeners.length; j++ ) {
					listeners[j].onFlushEntity(entityEvent);
				}
			}
		}

		source.getActionQueue().sortActions();
	
protected voidflushEverythingToExecutions(org.hibernate.event.FlushEvent event)
Coordinates the processing necessary to get things ready for executions as db calls by preping the session caches and moving the appropriate entities and collections to their respective execution queues.

param
event The flush event.
throws
HibernateException Error flushing caches to execution queues.

	
	// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
	// Pre-flushing section
	// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

	                                            	 
	      

		log.trace("flushing session");
		
		EventSource session = event.getSession();
		
		final PersistenceContext persistenceContext = session.getPersistenceContext();
		session.getInterceptor().preFlush( new LazyIterator( persistenceContext.getEntitiesByKey() ) );

		prepareEntityFlushes(session);
		// we could move this inside if we wanted to
		// tolerate collection initializations during
		// collection dirty checking:
		prepareCollectionFlushes(session);
		// now, any collections that are initialized
		// inside this block do not get updated - they
		// are ignored until the next flush
				
		persistenceContext.setFlushing(true);
		try {
			flushEntities(event);
			flushCollections(session);
		}
		finally {
			persistenceContext.setFlushing(false);
		}

		//some statistics
		if ( log.isDebugEnabled() ) {
			log.debug( "Flushed: " +
					session.getActionQueue().numberOfInsertions() + " insertions, " +
					session.getActionQueue().numberOfUpdates() + " updates, " +
					session.getActionQueue().numberOfDeletions() + " deletions to " +
					persistenceContext.getEntityEntries().size() + " objects"
				);
			log.debug( "Flushed: " +
					session.getActionQueue().numberOfCollectionCreations() + " (re)creations, " +
					session.getActionQueue().numberOfCollectionUpdates() + " updates, " +
					session.getActionQueue().numberOfCollectionRemovals() + " removals to " +
					persistenceContext.getCollectionEntries().size() + " collections"
				);
			new Printer( session.getFactory() ).toString( 
					persistenceContext.getEntitiesByKey().values().iterator(), 
					session.getEntityMode() 
				);
		}
	
protected java.lang.ObjectgetAnything()

 return null; 
protected org.hibernate.engine.CascadingActiongetCascadingAction()

		return CascadingAction.SAVE_UPDATE;
	
protected voidperformExecutions(org.hibernate.event.EventSource session)
Execute all SQL and second-level cache updates, in a special order so that foreign-key constraints cannot be violated:
  1. Inserts, in the order they were performed
  2. Updates
  3. Deletion of collection elements
  4. Insertion of collection elements
  5. Deletes, in the order they were performed


		log.trace("executing flush");

		try {
			session.getJDBCContext().getConnectionManager().flushBeginning();
			// we need to lock the collection caches before
			// executing entity inserts/updates in order to
			// account for bidi associations
			session.getActionQueue().prepareActions();
			session.getActionQueue().executeActions();
		}
		catch (HibernateException he) {
			log.error("Could not synchronize database state with session", he);
			throw he;
		}
		finally {
			session.getJDBCContext().getConnectionManager().flushEnding();
		}
	
protected voidpostFlush(org.hibernate.engine.SessionImplementor session)
1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call Interceptor.postFlush()


		log.trace( "post flush" );

		final PersistenceContext persistenceContext = session.getPersistenceContext();
		persistenceContext.getCollectionsByKey().clear();
		persistenceContext.getBatchFetchQueue()
				.clearSubselects(); //the database has changed now, so the subselect results need to be invalidated

		Iterator iter = persistenceContext.getCollectionEntries().entrySet().iterator();
		while ( iter.hasNext() ) {
			Map.Entry me = (Map.Entry) iter.next();
			CollectionEntry collectionEntry = (CollectionEntry) me.getValue();
			PersistentCollection persistentCollection = (PersistentCollection) me.getKey();
			collectionEntry.postFlush(persistentCollection);
			if ( collectionEntry.getLoadedPersister() == null ) {
				//if the collection is dereferenced, remove from the session cache
				//iter.remove(); //does not work, since the entrySet is not backed by the set
				persistenceContext.getCollectionEntries()
						.remove(persistentCollection);
			}
			else {
				//otherwise recreate the mapping between the collection and its key
				CollectionKey collectionKey = new CollectionKey( 
						collectionEntry.getLoadedPersister(), 
						collectionEntry.getLoadedKey(), 
						session.getEntityMode() 
					);
				persistenceContext.getCollectionsByKey()
						.put(collectionKey, persistentCollection);
			}
		}
		
		session.getInterceptor().postFlush( new LazyIterator( persistenceContext.getEntitiesByKey() ) );

	
private voidprepareCollectionFlushes(org.hibernate.engine.SessionImplementor session)
Initialize the flags of the CollectionEntry, including the dirty check.


		// Initialize dirty flags for arrays + collections with composite elements
		// and reset reached, doupdate, etc.
		
		log.debug("dirty checking collections");

		final List list = IdentityMap.entries( session.getPersistenceContext().getCollectionEntries() );
		final int size = list.size();
		for ( int i = 0; i < size; i++ ) {
			Map.Entry e = ( Map.Entry ) list.get( i );
			( (CollectionEntry) e.getValue() ).preFlush( (PersistentCollection) e.getKey() );
		}
	
private voidprepareEntityFlushes(org.hibernate.event.EventSource session)
process cascade save/update at the start of a flush to discover any newly referenced entity that must be passed to saveOrUpdate(), and also apply orphan delete

		
		log.debug("processing flush-time cascades");

		final Map.Entry[] list = IdentityMap.concurrentEntries( session.getPersistenceContext().getEntityEntries() );
		//safe from concurrent modification because of how entryList() is implemented on IdentityMap
		final int size = list.length;
		final Object anything = getAnything();
		for ( int i=0; i<size; i++ ) {
			Map.Entry me = list[i];
			EntityEntry entry = (EntityEntry) me.getValue();
			Status status = entry.getStatus();
			if ( status == Status.MANAGED || status == Status.SAVING ) {
				cascadeOnFlush( session, entry.getPersister(), me.getKey(), anything );
			}
		}