/** * 1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call * Interceptor.postFlush() */ protected void postFlush(SessionImplementor session) throws HibernateException { LOG.trace("Post flush"); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.getCollectionsByKey().clear(); // the database has changed now, so the subselect results need to be invalidated // the batch fetching queues should also be cleared - especially the collection batch fetching // one persistenceContext.getBatchFetchQueue().clear(); for (Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries(persistenceContext.getCollectionEntries())) { CollectionEntry collectionEntry = me.getValue(); PersistentCollection persistentCollection = me.getKey(); collectionEntry.postFlush(persistentCollection); if (collectionEntry.getLoadedPersister() == null) { // if the collection is dereferenced, unset its session reference and remove from the // session cache // iter.remove(); //does not work, since the entrySet is not backed by the set persistentCollection.unsetSession(session); persistenceContext.getCollectionEntries().remove(persistentCollection); } else { // otherwise recreate the mapping between the collection and its key CollectionKey collectionKey = new CollectionKey(collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey()); persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection); } } }
/** Initialize the flags of the CollectionEntry, including the dirty check. */ private void prepareCollectionFlushes(PersistenceContext persistenceContext) throws HibernateException { // Initialize dirty flags for arrays + collections with composite elements // and reset reached, doupdate, etc. LOG.debug("Dirty checking collections"); for (Map.Entry<PersistentCollection, CollectionEntry> entry : IdentityMap.concurrentEntries( (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries())) { entry.getValue().preFlush(entry.getKey()); } }
/** * process cascade save/update at the start of a flush to discover any newly referenced entity * that must be passed to saveOrUpdate(), and also apply orphan delete */ private void prepareEntityFlushes(EventSource session) throws HibernateException { LOG.debugf("Processing flush-time cascades"); final Map.Entry[] list = IdentityMap.concurrentEntries(session.getPersistenceContext().getEntityEntries()); // safe from concurrent modification because of how entryList() is implemented on IdentityMap final int size = list.length; final Object anything = getAnything(); for (int i = 0; i < size; i++) { Map.Entry me = list[i]; EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if (status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY) { cascadeOnFlush(session, entry.getPersister(), me.getKey(), anything); } } }
/** * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable * collections */ private void flushEntities(FlushEvent event) throws HibernateException { LOG.trace("Flushing entities and processing referenced collections"); // Among other things, updateReachables() will recursively load all // collections that are moving roles. This might cause entities to // be loaded. // So this needs to be safe from concurrent modification problems. // It is safe because of how IdentityMap implements entrySet() final EventSource source = event.getSession(); final Map.Entry[] list = IdentityMap.concurrentEntries(source.getPersistenceContext().getEntityEntries()); final int size = list.length; for (int i = 0; i < size; i++) { // Update the status of the object and if necessary, schedule an update Map.Entry me = list[i]; EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if (status != Status.LOADING && status != Status.GONE) { final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry); final EventListenerGroup<FlushEntityEventListener> listenerGroup = source .getFactory() .getServiceRegistry() .getService(EventListenerRegistry.class) .getEventListenerGroup(EventType.FLUSH_ENTITY); for (FlushEntityEventListener listener : listenerGroup.listeners()) { listener.onFlushEntity(entityEvent); } } } source.getActionQueue().sortActions(); }
/** * process any unreferenced collections and then inspect all known collections, scheduling * creates/removes/updates */ @SuppressWarnings("unchecked") private int flushCollections( final EventSource session, final PersistenceContext persistenceContext) throws HibernateException { LOG.trace("Processing unreferenced collections"); final Map.Entry<PersistentCollection, CollectionEntry>[] entries = IdentityMap.concurrentEntries( (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries()); final int count = entries.length; for (Map.Entry<PersistentCollection, CollectionEntry> me : entries) { CollectionEntry ce = me.getValue(); if (!ce.isReached() && !ce.isIgnore()) { Collections.processUnreachableCollection(me.getKey(), session); } } // Schedule updates to collections: LOG.trace("Scheduling collection removes/(re)creates/updates"); ActionQueue actionQueue = session.getActionQueue(); for (Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries( (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries())) { PersistentCollection coll = me.getKey(); CollectionEntry ce = me.getValue(); if (ce.isDorecreate()) { session.getInterceptor().onCollectionRecreate(coll, ce.getCurrentKey()); actionQueue.addAction( new CollectionRecreateAction( coll, ce.getCurrentPersister(), ce.getCurrentKey(), session)); } if (ce.isDoremove()) { session.getInterceptor().onCollectionRemove(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionRemoveAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } if (ce.isDoupdate()) { session.getInterceptor().onCollectionUpdate(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionUpdateAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } // todo : I'm not sure the !wasInitialized part should really be part of this check if (!coll.wasInitialized() && coll.hasQueuedOperations()) { actionQueue.addAction( new QueuedOperationCollectionAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), session)); } } actionQueue.sortCollectionActions(); return count; }