/** * process any unreferenced collections and then inspect all known collections, scheduling * creates/removes/updates */ private void flushCollections(EventSource session) throws HibernateException { if (LOG.isTraceEnabled()) { LOG.trace("Processing unreferenced collections"); } List list = IdentityMap.entries(session.getPersistenceContext().getCollectionEntries()); int size = list.size(); for (int i = 0; i < size; i++) { Map.Entry me = (Map.Entry) list.get(i); CollectionEntry ce = (CollectionEntry) me.getValue(); if (!ce.isReached() && !ce.isIgnore()) { Collections.processUnreachableCollection((PersistentCollection) me.getKey(), session); } } // Schedule updates to collections: if (LOG.isTraceEnabled()) { LOG.trace("Scheduling collection removes/(re)creates/updates"); } list = IdentityMap.entries(session.getPersistenceContext().getCollectionEntries()); size = list.size(); ActionQueue actionQueue = session.getActionQueue(); for (int i = 0; i < size; i++) { Map.Entry me = (Map.Entry) list.get(i); PersistentCollection coll = (PersistentCollection) me.getKey(); CollectionEntry ce = (CollectionEntry) me.getValue(); if (ce.isDorecreate()) { session.getInterceptor().onCollectionRecreate(coll, ce.getCurrentKey()); actionQueue.addAction( new CollectionRecreateAction( coll, ce.getCurrentPersister(), ce.getCurrentKey(), session)); } if (ce.isDoremove()) { session.getInterceptor().onCollectionRemove(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionRemoveAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } if (ce.isDoupdate()) { session.getInterceptor().onCollectionUpdate(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionUpdateAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } } actionQueue.sortCollectionActions(); }
/** * 1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call * Interceptor.postFlush() */ protected void postFlush(SessionImplementor session) throws HibernateException { LOG.trace("Post flush"); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.getCollectionsByKey().clear(); persistenceContext .getBatchFetchQueue() .clearSubselects(); // the database has changed now, so the subselect results need to be // invalidated Iterator iter = persistenceContext.getCollectionEntries().entrySet().iterator(); while (iter.hasNext()) { Map.Entry me = (Map.Entry) iter.next(); CollectionEntry collectionEntry = (CollectionEntry) me.getValue(); PersistentCollection persistentCollection = (PersistentCollection) me.getKey(); collectionEntry.postFlush(persistentCollection); if (collectionEntry.getLoadedPersister() == null) { // if the collection is dereferenced, remove from the session cache // iter.remove(); //does not work, since the entrySet is not backed by the set persistenceContext.getCollectionEntries().remove(persistentCollection); } else { // otherwise recreate the mapping between the collection and its key CollectionKey collectionKey = new CollectionKey(collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey()); persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection); } } session.getInterceptor().postFlush(new LazyIterator(persistenceContext.getEntitiesByKey())); }
/** * Compile the query (generate the SQL). * * @throws org.hibernate.MappingException Indicates problems resolving things referenced in the * query. * @throws org.hibernate.QueryException Generally some form of syntatic failure. */ private void compile() throws QueryException, MappingException { LOG.trace("Compiling query"); try { ParserHelper.parse( new PreprocessingParser(tokenReplacements), queryString, ParserHelper.HQL_SEPARATORS, this); renderSQL(); } catch (QueryException qe) { qe.setQueryString(queryString); throw qe; } catch (MappingException me) { throw me; } catch (Exception e) { LOG.debug("Unexpected query compilation problem", e); e.printStackTrace(); QueryException qe = new QueryException("Incorrect query syntax", e); qe.setQueryString(queryString); throw qe; } postInstantiate(); compiled = true; }
/** * Handle the given update event. * * @param event The update event to be handled. */ public void onSaveOrUpdate(SaveOrUpdateEvent event) { final SessionImplementor source = event.getSession(); final Object object = event.getObject(); final Serializable requestedId = event.getRequestedId(); if (requestedId != null) { // assign the requested id to the proxy, *before* // reassociating the proxy if (object instanceof HibernateProxy) { ((HibernateProxy) object).getHibernateLazyInitializer().setIdentifier(requestedId); } } // For an uninitialized proxy, noop, don't even need to return an id, since it is never a save() if (reassociateIfUninitializedProxy(object, source)) LOG.trace("Reassociated uninitialized proxy"); else { // initialize properties of the event: final Object entity = source.getPersistenceContext().unproxyAndReassociate(object); event.setEntity(entity); event.setEntry(source.getPersistenceContext().getEntry(entity)); // return the id in the event object event.setResultId(performSaveOrUpdate(event)); } }
/** * 1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call * Interceptor.postFlush() */ protected void postFlush(SessionImplementor session) throws HibernateException { LOG.trace("Post flush"); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.getCollectionsByKey().clear(); // the database has changed now, so the subselect results need to be invalidated // the batch fetching queues should also be cleared - especially the collection batch fetching // one persistenceContext.getBatchFetchQueue().clear(); for (Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries(persistenceContext.getCollectionEntries())) { CollectionEntry collectionEntry = me.getValue(); PersistentCollection persistentCollection = me.getKey(); collectionEntry.postFlush(persistentCollection); if (collectionEntry.getLoadedPersister() == null) { // if the collection is dereferenced, unset its session reference and remove from the // session cache // iter.remove(); //does not work, since the entrySet is not backed by the set persistentCollection.unsetSession(session); persistenceContext.getCollectionEntries().remove(persistentCollection); } else { // otherwise recreate the mapping between the collection and its key CollectionKey collectionKey = new CollectionKey(collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey()); persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection); } } }
/** * Coordinates the processing necessary to get things ready for executions as db calls by preping * the session caches and moving the appropriate entities and collections to their respective * execution queues. * * @param event The flush event. * @throws HibernateException Error flushing caches to execution queues. */ protected void flushEverythingToExecutions(FlushEvent event) throws HibernateException { LOG.trace("Flushing session"); EventSource session = event.getSession(); final PersistenceContext persistenceContext = session.getPersistenceContext(); session.getInterceptor().preFlush(new LazyIterator(persistenceContext.getEntitiesByKey())); prepareEntityFlushes(session, persistenceContext); // we could move this inside if we wanted to // tolerate collection initializations during // collection dirty checking: prepareCollectionFlushes(persistenceContext); // now, any collections that are initialized // inside this block do not get updated - they // are ignored until the next flush persistenceContext.setFlushing(true); try { int entityCount = flushEntities(event, persistenceContext); int collectionCount = flushCollections(session, persistenceContext); event.setNumberOfEntitiesProcessed(entityCount); event.setNumberOfCollectionsProcessed(collectionCount); } finally { persistenceContext.setFlushing(false); } // some statistics logFlushResults(event); }
public void injectSessionFactory(SessionFactoryImplementor factory) { if (this.factory != null) { LOG.scopingTypesToSessionFactoryAfterAlreadyScoped(this.factory, factory); } else { LOG.trace("Scoping types to session factory " + factory); } this.factory = factory; }
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException { LOG.trace("Ignoring persistent instance"); EntityEntry entityEntry = event.getEntry(); if (entityEntry == null) { throw new AssertionFailure("entity was transient or detached"); } else { if (entityEntry.getStatus() == Status.DELETED) { throw new AssertionFailure("entity was deleted"); } final SessionFactoryImplementor factory = event.getSession().getFactory(); Serializable requestedId = event.getRequestedId(); Serializable savedId; if (requestedId == null) { savedId = entityEntry.getId(); } else { final boolean isEqual = !entityEntry .getPersister() .getIdentifierType() .isEqual( requestedId, entityEntry.getId(), event.getSession().getEntityMode(), factory); if (isEqual) { throw new PersistentObjectException( "object passed to save() was already persistent: " + MessageHelper.infoString(entityEntry.getPersister(), requestedId, factory)); } savedId = requestedId; } if (LOG.isTraceEnabled()) LOG.trace( "Object already associated with session: " + MessageHelper.infoString(entityEntry.getPersister(), savedId, factory)); return savedId; } }
@Override public <T> T delegateWork(WorkExecutorVisitable<T> work, boolean transacted) throws HibernateException { boolean wasAutoCommit = false; try { // todo : should we use a connection proxy here? Connection connection = connectionProvider().getConnection(); try { if (transacted) { if (connection.getAutoCommit()) { wasAutoCommit = true; connection.setAutoCommit(false); } } T result = work.accept(new WorkExecutor<T>(), connection); if (transacted) { connection.commit(); } return result; } catch (Exception e) { try { if (transacted && !connection.isClosed()) { connection.rollback(); } } catch (Exception ignore) { LOG.unableToRollbackConnection(ignore); } if (e instanceof HibernateException) { throw (HibernateException) e; } else if (e instanceof SQLException) { throw sqlExceptionHelper().convert((SQLException) e, "error performing isolated work"); } else { throw new HibernateException("error performing isolated work", e); } } finally { if (transacted && wasAutoCommit) { try { connection.setAutoCommit(true); } catch (Exception ignore) { LOG.trace("was unable to reset connection back to auto-commit"); } } try { connectionProvider().closeConnection(connection); } catch (Exception ignore) { LOG.unableToReleaseIsolatedConnection(ignore); } } } catch (SQLException sqle) { throw sqlExceptionHelper().convert(sqle, "unable to obtain isolated JDBC connection"); } }
/** * 1. record the collection role that this collection is referenced by 2. decide if the collection * needs deleting/creating/updating (but don't actually schedule the action yet) */ @SuppressWarnings({"JavaDoc"}) private static void prepareCollectionForUpdate( PersistentCollection collection, CollectionEntry entry, SessionFactoryImplementor factory) { if (entry.isProcessed()) { throw new AssertionFailure("collection was processed twice by flush()"); } entry.setProcessed(true); final CollectionPersister loadedPersister = entry.getLoadedPersister(); final CollectionPersister currentPersister = entry.getCurrentPersister(); if (loadedPersister != null || currentPersister != null) { // it is or was referenced _somewhere_ boolean ownerChanged = loadedPersister != currentPersister || // if either its role changed, !currentPersister .getKeyType() .isEqual( // or its key changed entry.getLoadedKey(), entry.getCurrentKey(), factory); if (ownerChanged) { // do a check final boolean orphanDeleteAndRoleChanged = loadedPersister != null && currentPersister != null && loadedPersister.hasOrphanDelete(); if (orphanDeleteAndRoleChanged) { throw new HibernateException( "Don't change the reference to a collection with cascade=\"all-delete-orphan\": " + loadedPersister.getRole()); } // do the work if (currentPersister != null) { entry.setDorecreate(true); // we will need to create new entries } if (loadedPersister != null) { entry.setDoremove(true); // we will need to remove ye olde entries if (entry.isDorecreate()) { LOG.trace("Forcing collection initialization"); collection.forceInitialization(); } } } else if (collection.isDirty()) { // the collection's elements have changed entry.setDoupdate(true); } } }
/** {@inheritDoc} */ public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException { if (value == null) { if (LOG.isTraceEnabled()) { LOG.trace( String.format( NULL_BIND_MSG_TEMPLATE, index, JdbcTypeNameMapper.getTypeName(sqlDescriptor.getSqlType()))); } st.setNull(index, sqlDescriptor.getSqlType()); } else { if (LOG.isTraceEnabled()) { LOG.trace( String.format( BIND_MSG_TEMPLATE, index, JdbcTypeNameMapper.getTypeName(sqlDescriptor.getSqlType()), getJavaDescriptor().extractLoggableRepresentation(value))); } doBind(st, value, index, options); } }
/* 34: */ /* 35: */ public Date seed(SessionImplementor session) /* 36: */ { /* 37: 67 */ if (session == null) /* 38: */ { /* 39: 68 */ LOG.trace("Incoming session was null; using current jvm time"); /* 40: 69 */ return super.seed(session); /* 41: */ } /* 42: 71 */ if (!session.getFactory().getDialect().supportsCurrentTimestampSelection()) /* 43: */ { /* 44: 72 */ LOG.debug( "Falling back to vm-based timestamp, as dialect does not support current timestamp selection"); /* 45: 73 */ return super.seed(session); /* 46: */ } /* 47: 76 */ return getCurrentTimestamp(session); /* 48: */ }
/** * We encountered a delete request on a transient instance. * * <p>This is a deviation from historical Hibernate (pre-3.2) behavior to align with the JPA spec, * which states that transient entities can be passed to remove operation in which case cascades * still need to be performed. * * @param session The session which is the source of the event * @param entity The entity being delete processed * @param cascadeDeleteEnabled Is cascading of deletes enabled * @param persister The entity persister * @param transientEntities A cache of already visited transient entities (to avoid infinite * recursion). */ protected void deleteTransientEntity( EventSource session, Object entity, boolean cascadeDeleteEnabled, EntityPersister persister, Set transientEntities) { LOG.handlingTransientEntity(); if (transientEntities.contains(entity)) { LOG.trace("Already handled transient entity; skipping"); return; } transientEntities.add(entity); cascadeBeforeDelete(session, persister, entity, null, transientEntities); cascadeAfterDelete(session, persister, entity, transientEntities); }
/** * Execute all SQL and second-level cache updates, in a special order so that foreign-key * constraints cannot be violated: * * <ol> * <li>Inserts, in the order they were performed * <li>Updates * <li>Deletion of collection elements * <li>Insertion of collection elements * <li>Deletes, in the order they were performed * </ol> */ protected void performExecutions(EventSource session) throws HibernateException { LOG.trace("Executing flush"); try { session.getTransactionCoordinator().getJdbcCoordinator().flushBeginning(); // we need to lock the collection caches before // executing entity inserts/updates in order to // account for bidi associations session.getActionQueue().prepareActions(); session.getActionQueue().executeActions(); } finally { session.getTransactionCoordinator().getJdbcCoordinator().flushEnding(); } }
private void logDirtyProperties( Serializable id, int[] dirtyProperties, EntityPersister persister) { if (LOG.isTraceEnabled() && dirtyProperties != null && dirtyProperties.length > 0) { final String[] allPropertyNames = persister.getPropertyNames(); final String[] dirtyPropertyNames = new String[dirtyProperties.length]; for (int i = 0; i < dirtyProperties.length; i++) { dirtyPropertyNames[i] = allPropertyNames[dirtyProperties[i]]; } LOG.trace( "Found dirty properties [" + MessageHelper.infoString(persister.getEntityName(), id) + "] : " + dirtyPropertyNames); } }
@Override public void releaseResources() { log.trace("Releasing JDBC resources"); for (Map.Entry<Statement, Set<ResultSet>> entry : xref.entrySet()) { if (entry.getValue() != null) { closeAll(entry.getValue()); } close(entry.getKey()); } xref.clear(); closeAll(unassociatedResultSets); if (blobs != null) { for (Blob blob : blobs) { try { blob.free(); } catch (SQLException e) { log.debugf("Unable to free JDBC Blob reference [%s]", e.getMessage()); } } blobs.clear(); } if (clobs != null) { for (Clob clob : clobs) { try { clob.free(); } catch (SQLException e) { log.debugf("Unable to free JDBC Clob reference [%s]", e.getMessage()); } } clobs.clear(); } if (nclobs != null) { for (NClob nclob : nclobs) { try { nclob.free(); } catch (SQLException e) { log.debugf("Unable to free JDBC NClob reference [%s]", e.getMessage()); } } nclobs.clear(); } }
/** * The given save-update event named a detached entity. * * <p>Here, we will perform the update processing. * * @param event The update event to be handled. */ protected void entityIsDetached(SaveOrUpdateEvent event) { LOG.trace("Updating detached instance"); if (event.getSession().getPersistenceContext().isEntryFor(event.getEntity())) { // TODO: assertion only, could be optimized away throw new AssertionFailure("entity was persistent"); } Object entity = event.getEntity(); EntityPersister persister = event.getSession().getEntityPersister(event.getEntityName(), entity); event.setRequestedId( getUpdateId(entity, persister, event.getRequestedId(), event.getSession())); performUpdate(event, entity, persister); }
/** * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable * collections */ private int flushEntities(final FlushEvent event, final PersistenceContext persistenceContext) throws HibernateException { LOG.trace("Flushing entities and processing referenced collections"); final EventSource source = event.getSession(); final Iterable<FlushEntityEventListener> flushListeners = source .getFactory() .getServiceRegistry() .getService(EventListenerRegistry.class) .getEventListenerGroup(EventType.FLUSH_ENTITY) .listeners(); // Among other things, updateReachables() will recursively load all // collections that are moving roles. This might cause entities to // be loaded. // So this needs to be safe from concurrent modification problems. final Map.Entry<Object, EntityEntry>[] entityEntries = persistenceContext.reentrantSafeEntityEntries(); final int count = entityEntries.length; for (Map.Entry<Object, EntityEntry> me : entityEntries) { // Update the status of the object and if necessary, schedule an update EntityEntry entry = me.getValue(); Status status = entry.getStatus(); if (status != Status.LOADING && status != Status.GONE) { final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry); for (FlushEntityEventListener listener : flushListeners) { listener.onFlushEntity(entityEvent); } } } source.getActionQueue().sortActions(); return count; }
private <R extends Service> R initializeService(ServiceBinding<R> serviceBinding) { if (LOG.isTraceEnabled()) { LOG.trace("Initializing service [role=" + serviceBinding.getServiceRole().getName() + "]"); } // PHASE 1 : create service R service = createService(serviceBinding); if (service == null) { return null; } // PHASE 2 : configure service (***potentially recursive***) configureService(service); // PHASE 3 : Start service startService(serviceBinding); return service; }
/** * Execute all SQL (and second-level cache updates) in a special order so that foreign-key * constraints cannot be violated: * * <ol> * <li>Inserts, in the order they were performed * <li>Updates * <li>Deletion of collection elements * <li>Insertion of collection elements * <li>Deletes, in the order they were performed * </ol> * * @param session The session being flushed */ protected void performExecutions(EventSource session) { LOG.trace("Executing flush"); // IMPL NOTE : here we alter the flushing flag of the persistence context to allow // during-flush callbacks more leniency in regards to initializing proxies and // lazy collections during their processing. // For more information, see HHH-2763 try { session.getJdbcCoordinator().flushBeginning(); session.getPersistenceContext().setFlushing(true); // we need to lock the collection caches beforeQuery executing entity inserts/updates in order // to // account for bi-directional associations session.getActionQueue().prepareActions(); session.getActionQueue().executeActions(); } finally { session.getPersistenceContext().setFlushing(false); session.getJdbcCoordinator().flushEnding(); } }
/** * The given save-update event named a transient entity. * * <p>Here, we will perform the save processing. * * @param event The save event to be handled. * @return The entity's identifier after saving. */ protected Serializable entityIsTransient(SaveOrUpdateEvent event) { LOG.trace("Saving transient instance"); final EventSource source = event.getSession(); EntityEntry entityEntry = event.getEntry(); if (entityEntry != null) { if (entityEntry.getStatus() == Status.DELETED) { source.forceFlush(entityEntry); } else { throw new AssertionFailure("entity was persistent"); } } Serializable id = saveWithGeneratedOrRequestedId(event); source.getPersistenceContext().reassociateProxy(event.getObject(), id); return id; }
/** * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable * collections */ private void flushEntities(FlushEvent event) throws HibernateException { LOG.trace("Flushing entities and processing referenced collections"); // Among other things, updateReachables() will recursively load all // collections that are moving roles. This might cause entities to // be loaded. // So this needs to be safe from concurrent modification problems. // It is safe because of how IdentityMap implements entrySet() final EventSource source = event.getSession(); final Map.Entry[] list = IdentityMap.concurrentEntries(source.getPersistenceContext().getEntityEntries()); final int size = list.length; for (int i = 0; i < size; i++) { // Update the status of the object and if necessary, schedule an update Map.Entry me = list[i]; EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if (status != Status.LOADING && status != Status.GONE) { final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry); final EventListenerGroup<FlushEntityEventListener> listenerGroup = source .getFactory() .getServiceRegistry() .getService(EventListenerRegistry.class) .getEventListenerGroup(EventType.FLUSH_ENTITY); for (FlushEntityEventListener listener : listenerGroup.listeners()) { listener.onFlushEntity(entityEvent); } } } source.getActionQueue().sortActions(); }
protected void performUpdate(SaveOrUpdateEvent event, Object entity, EntityPersister persister) throws HibernateException { if (!persister.isMutable()) LOG.trace("Immutable instance passed to performUpdate()"); if (LOG.isTraceEnabled()) LOG.trace( "Updating " + MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory())); final EventSource source = event.getSession(); final EntityKey key = source.generateEntityKey(event.getRequestedId(), persister); source.getPersistenceContext().checkUniqueness(key, entity); if (invokeUpdateLifecycle(entity, persister, source)) { reassociate(event, event.getObject(), event.getRequestedId(), persister); return; } // this is a transient object with existing persistent state not loaded by the session new OnUpdateVisitor(source, event.getRequestedId(), entity).process(entity, persister); // TODO: put this stuff back in to read snapshot from // the second-level cache (needs some extra work) /*Object[] cachedState = null; if ( persister.hasCache() ) { CacheEntry entry = (CacheEntry) persister.getCache() .get( event.getRequestedId(), source.getTimestamp() ); cachedState = entry==null ? null : entry.getState(); //TODO: half-assemble this stuff }*/ source .getPersistenceContext() .addEntity( entity, (persister.isMutable() ? Status.MANAGED : Status.READ_ONLY), null, // cachedState, key, persister.getVersion(entity, source.getEntityMode()), LockMode.NONE, true, persister, false, true // assume true, since we don't really know, and it doesn't matter ); persister.afterReassociate(entity, source); if (LOG.isTraceEnabled()) LOG.trace( "Updating " + MessageHelper.infoString(persister, event.getRequestedId(), source.getFactory())); cascadeOnUpdate(event, persister, entity); }
/** * process any unreferenced collections and then inspect all known collections, scheduling * creates/removes/updates */ @SuppressWarnings("unchecked") private int flushCollections( final EventSource session, final PersistenceContext persistenceContext) throws HibernateException { LOG.trace("Processing unreferenced collections"); final Map.Entry<PersistentCollection, CollectionEntry>[] entries = IdentityMap.concurrentEntries( (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries()); final int count = entries.length; for (Map.Entry<PersistentCollection, CollectionEntry> me : entries) { CollectionEntry ce = me.getValue(); if (!ce.isReached() && !ce.isIgnore()) { Collections.processUnreachableCollection(me.getKey(), session); } } // Schedule updates to collections: LOG.trace("Scheduling collection removes/(re)creates/updates"); ActionQueue actionQueue = session.getActionQueue(); for (Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries( (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries())) { PersistentCollection coll = me.getKey(); CollectionEntry ce = me.getValue(); if (ce.isDorecreate()) { session.getInterceptor().onCollectionRecreate(coll, ce.getCurrentKey()); actionQueue.addAction( new CollectionRecreateAction( coll, ce.getCurrentPersister(), ce.getCurrentKey(), session)); } if (ce.isDoremove()) { session.getInterceptor().onCollectionRemove(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionRemoveAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } if (ce.isDoupdate()) { session.getInterceptor().onCollectionUpdate(coll, ce.getLoadedKey()); actionQueue.addAction( new CollectionUpdateAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), ce.isSnapshotEmpty(coll), session)); } // todo : I'm not sure the !wasInitialized part should really be part of this check if (!coll.wasInitialized() && coll.hasQueuedOperations()) { actionQueue.addAction( new QueuedOperationCollectionAction( coll, ce.getLoadedPersister(), ce.getLoadedKey(), session)); } } actionQueue.sortCollectionActions(); return count; }
/** * Handle the given delete event. This is the cascaded form. * * @param event The delete event. * @param transientEntities The cache of entities already deleted * @throws HibernateException */ public void onDelete(DeleteEvent event, Set transientEntities) throws HibernateException { final EventSource source = event.getSession(); final PersistenceContext persistenceContext = source.getPersistenceContext(); Object entity = persistenceContext.unproxyAndReassociate(event.getObject()); EntityEntry entityEntry = persistenceContext.getEntry(entity); final EntityPersister persister; final Serializable id; final Object version; if (entityEntry == null) { LOG.trace("Entity was not persistent in delete processing"); persister = source.getEntityPersister(event.getEntityName(), entity); if (ForeignKeys.isTransient(persister.getEntityName(), entity, null, source)) { deleteTransientEntity( source, entity, event.isCascadeDeleteEnabled(), persister, transientEntities); // EARLY EXIT!!! return; } performDetachedEntityDeletionCheck(event); id = persister.getIdentifier(entity, source); if (id == null) { throw new TransientObjectException( "the detached instance passed to delete() had a null identifier"); } final EntityKey key = source.generateEntityKey(id, persister); persistenceContext.checkUniqueness(key, entity); new OnUpdateVisitor(source, id, entity).process(entity, persister); version = persister.getVersion(entity); entityEntry = persistenceContext.addEntity( entity, (persister.isMutable() ? Status.MANAGED : Status.READ_ONLY), persister.getPropertyValues(entity), key, version, LockMode.NONE, true, persister, false); } else { LOG.trace("Deleting a persistent instance"); if (entityEntry.getStatus() == Status.DELETED || entityEntry.getStatus() == Status.GONE) { LOG.trace("Object was already deleted"); return; } persister = entityEntry.getPersister(); id = entityEntry.getId(); version = entityEntry.getVersion(); } /*if ( !persister.isMutable() ) { throw new HibernateException( "attempted to delete an object of immutable class: " + MessageHelper.infoString(persister) ); }*/ if (invokeDeleteLifecycle(source, entity, persister)) { return; } deleteEntity( source, entity, entityEntry, event.isCascadeDeleteEnabled(), event.isOrphanRemovalBeforeUpdates(), persister, transientEntities); if (source.getFactory().getSettings().isIdentifierRollbackEnabled()) { persister.resetIdentifier(entity, id, version, source); } }
private boolean scheduleUpdate(final FlushEntityEvent event) { final EntityEntry entry = event.getEntityEntry(); final EventSource session = event.getSession(); final Object entity = event.getEntity(); final Status status = entry.getStatus(); final EntityMode entityMode = session.getEntityMode(); final EntityPersister persister = entry.getPersister(); final Object[] values = event.getPropertyValues(); if (LOG.isTraceEnabled()) { if (status == Status.DELETED) { if (!persister.isMutable()) LOG.trace( "Updating immutable, deleted entity: " + MessageHelper.infoString(persister, entry.getId(), session.getFactory())); else if (!entry.isModifiableEntity()) LOG.trace( "Updating non-modifiable, deleted entity: " + MessageHelper.infoString(persister, entry.getId(), session.getFactory())); else LOG.trace( "Updating deleted entity: " + MessageHelper.infoString(persister, entry.getId(), session.getFactory())); } else LOG.trace( "Updating entity: " + MessageHelper.infoString(persister, entry.getId(), session.getFactory())); } final boolean intercepted = !entry.isBeingReplicated() && handleInterception(event); // increment the version number (if necessary) final Object nextVersion = getNextVersion(event); // if it was dirtied by a collection only int[] dirtyProperties = event.getDirtyProperties(); if (event.isDirtyCheckPossible() && dirtyProperties == null) { if (!intercepted && !event.hasDirtyCollection()) { throw new AssertionFailure("dirty, but no dirty properties"); } dirtyProperties = ArrayHelper.EMPTY_INT_ARRAY; } // check nullability but do not doAfterTransactionCompletion command execute // we'll use scheduled updates for that. new Nullability(session).checkNullability(values, persister, true); // schedule the update // note that we intentionally do _not_ pass in currentPersistentState! session .getActionQueue() .addAction( new EntityUpdateAction( entry.getId(), values, dirtyProperties, event.hasDirtyCollection(), (status == Status.DELETED && !entry.isModifiableEntity() ? persister.getPropertyValues(entity, entityMode) : entry.getLoadedState()), entry.getVersion(), nextVersion, entity, entry.getRowId(), persister, session)); return intercepted; }