Esempio n. 1
0
  private static void processDereferencedCollection(
      PersistentCollection coll, SessionImplementor session) {
    final PersistenceContext persistenceContext = session.getPersistenceContext();
    CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
    final CollectionPersister loadedPersister = entry.getLoadedPersister();

    if (LOG.isDebugEnabled() && loadedPersister != null) {
      LOG.debugf(
          "Collection dereferenced: %s",
          MessageHelper.collectionInfoString(
              loadedPersister, entry.getLoadedKey(), session.getFactory()));
    }

    // do a check
    boolean hasOrphanDelete = loadedPersister != null && loadedPersister.hasOrphanDelete();
    if (hasOrphanDelete) {
      Serializable ownerId =
          loadedPersister.getOwnerEntityPersister().getIdentifier(coll.getOwner(), session);
      if (ownerId == null) {
        // the owning entity may have been deleted and its identifier unset due to
        // identifier-rollback; in which case, try to look up its identifier from
        // the persistence context
        if (session.getFactory().getSettings().isIdentifierRollbackEnabled()) {
          EntityEntry ownerEntry = persistenceContext.getEntry(coll.getOwner());
          if (ownerEntry != null) {
            ownerId = ownerEntry.getId();
          }
        }
        if (ownerId == null) {
          throw new AssertionFailure(
              "Unable to determine collection owner identifier for orphan-delete processing");
        }
      }
      EntityKey key = session.generateEntityKey(ownerId, loadedPersister.getOwnerEntityPersister());
      Object owner = persistenceContext.getEntity(key);
      if (owner == null) {
        throw new AssertionFailure(
            "collection owner not associated with session: " + loadedPersister.getRole());
      }
      EntityEntry e = persistenceContext.getEntry(owner);
      // only collections belonging to deleted entities are allowed to be dereferenced in the case
      // of orphan delete
      if (e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE) {
        throw new HibernateException(
            "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: "
                + loadedPersister.getRole());
      }
    }

    // do the work
    entry.setCurrentPersister(null);
    entry.setCurrentKey(null);
    prepareCollectionForUpdate(coll, entry, session.getFactory());
  }
 private void checkEmptyAuditSessionCache(Session session, String... auditEntityNames) {
   List<String> entityNames = Arrays.asList(auditEntityNames);
   PersistenceContext persistenceContext = ((SessionImplementor) session).getPersistenceContext();
   for (Object entry : persistenceContext.getEntityEntries().values()) {
     EntityEntry entityEntry = (EntityEntry) entry;
     if (entityNames.contains(entityEntry.getEntityName())) {
       assert false
           : "Audit data shall not be stored in the session level cache. This causes performance issues.";
     }
     Assert.assertFalse(
         "Revision entity shall not be stored in the session level cache. This causes performance issues.",
         DefaultRevisionEntity.class.getName().equals(entityEntry.getEntityName()));
   }
 }
  @Override
  public void doBeforeTransactionCompletion(SessionImplementor session) {
    final EntityPersister persister = entry.getPersister();

    final Object latestVersion = persister.getCurrentVersion(entry.getId(), session);
    if (!entry.getVersion().equals(latestVersion)) {
      throw new OptimisticLockException(
          object,
          "Newer version ["
              + latestVersion
              + "] of entity ["
              + MessageHelper.infoString(entry.getEntityName(), entry.getId())
              + "] found in database");
    }
  }
  /**
   * process cascade save/update at the start of a flush to discover any newly referenced entity
   * that must be passed to saveOrUpdate(), and also apply orphan delete
   */
  private void prepareEntityFlushes(EventSource session, PersistenceContext persistenceContext)
      throws HibernateException {

    LOG.debug("Processing flush-time cascades");

    final Object anything = getAnything();
    // safe from concurrent modification because of how concurrentEntries() is implemented on
    // IdentityMap
    for (Map.Entry<Object, EntityEntry> me : persistenceContext.reentrantSafeEntityEntries()) {
      //		for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries()
      // ) ) {
      EntityEntry entry = (EntityEntry) me.getValue();
      Status status = entry.getStatus();
      if (status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY) {
        cascadeOnFlush(session, entry.getPersister(), me.getKey(), anything);
      }
    }
  }
  /**
   * process cascade save/update at the start of a flush to discover any newly referenced entity
   * that must be passed to saveOrUpdate(), and also apply orphan delete
   */
  private void prepareEntityFlushes(EventSource session) throws HibernateException {

    LOG.debugf("Processing flush-time cascades");

    final Map.Entry[] list =
        IdentityMap.concurrentEntries(session.getPersistenceContext().getEntityEntries());
    // safe from concurrent modification because of how entryList() is implemented on IdentityMap
    final int size = list.length;
    final Object anything = getAnything();
    for (int i = 0; i < size; i++) {
      Map.Entry me = list[i];
      EntityEntry entry = (EntityEntry) me.getValue();
      Status status = entry.getStatus();
      if (status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY) {
        cascadeOnFlush(session, entry.getPersister(), me.getKey(), anything);
      }
    }
  }
  /**
   * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable
   * collections
   */
  private int flushEntities(final FlushEvent event, final PersistenceContext persistenceContext)
      throws HibernateException {

    LOG.trace("Flushing entities and processing referenced collections");

    final EventSource source = event.getSession();
    final Iterable<FlushEntityEventListener> flushListeners =
        source
            .getFactory()
            .getServiceRegistry()
            .getService(EventListenerRegistry.class)
            .getEventListenerGroup(EventType.FLUSH_ENTITY)
            .listeners();

    // Among other things, updateReachables() will recursively load all
    // collections that are moving roles. This might cause entities to
    // be loaded.

    // So this needs to be safe from concurrent modification problems.

    final Map.Entry<Object, EntityEntry>[] entityEntries =
        persistenceContext.reentrantSafeEntityEntries();
    final int count = entityEntries.length;

    for (Map.Entry<Object, EntityEntry> me : entityEntries) {

      // Update the status of the object and if necessary, schedule an update

      EntityEntry entry = me.getValue();
      Status status = entry.getStatus();

      if (status != Status.LOADING && status != Status.GONE) {
        final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry);
        for (FlushEntityEventListener listener : flushListeners) {
          listener.onFlushEntity(entityEvent);
        }
      }
    }

    source.getActionQueue().sortActions();

    return count;
  }
  /**
   * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable
   * collections
   */
  private void flushEntities(FlushEvent event) throws HibernateException {

    LOG.trace("Flushing entities and processing referenced collections");

    // Among other things, updateReachables() will recursively load all
    // collections that are moving roles. This might cause entities to
    // be loaded.

    // So this needs to be safe from concurrent modification problems.
    // It is safe because of how IdentityMap implements entrySet()

    final EventSource source = event.getSession();

    final Map.Entry[] list =
        IdentityMap.concurrentEntries(source.getPersistenceContext().getEntityEntries());
    final int size = list.length;
    for (int i = 0; i < size; i++) {

      // Update the status of the object and if necessary, schedule an update

      Map.Entry me = list[i];
      EntityEntry entry = (EntityEntry) me.getValue();
      Status status = entry.getStatus();

      if (status != Status.LOADING && status != Status.GONE) {
        final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry);
        final EventListenerGroup<FlushEntityEventListener> listenerGroup =
            source
                .getFactory()
                .getServiceRegistry()
                .getService(EventListenerRegistry.class)
                .getEventListenerGroup(EventType.FLUSH_ENTITY);
        for (FlushEntityEventListener listener : listenerGroup.listeners()) {
          listener.onFlushEntity(entityEvent);
        }
      }
    }

    source.getActionQueue().sortActions();
  }
  /**
   * Determine whether the entity is persistent, detached, or transient
   *
   * @param entity The entity to check
   * @param entityName The name of the entity
   * @param entry The entity's entry in the persistence context
   * @param source The originating session.
   * @return The state.
   */
  protected EntityState getEntityState(
      Object entity,
      String entityName,
      EntityEntry entry, // pass this as an argument only to avoid double looking
      SessionImplementor source) {

    final boolean traceEnabled = LOG.isTraceEnabled();
    if (entry != null) { // the object is persistent

      // the entity is associated with the session, so check its status
      if (entry.getStatus() != Status.DELETED) {
        // do nothing for persistent instances
        if (traceEnabled) {
          LOG.tracev("Persistent instance of: {0}", getLoggableName(entityName, entity));
        }
        return EntityState.PERSISTENT;
      }
      // ie. e.status==DELETED
      if (traceEnabled) {
        LOG.tracev("Deleted instance of: {0}", getLoggableName(entityName, entity));
      }
      return EntityState.DELETED;
    }
    // the object is transient or detached

    // the entity is not associated with the session, so
    // try interceptor and unsaved-value

    if (ForeignKeys.isTransient(entityName, entity, getAssumedUnsaved(), source)) {
      if (traceEnabled) {
        LOG.tracev("Transient instance of: {0}", getLoggableName(entityName, entity));
      }
      return EntityState.TRANSIENT;
    }
    if (traceEnabled) {
      LOG.tracev("Detached instance of: {0}", getLoggableName(entityName, entity));
    }
    return EntityState.DETACHED;
  }
  @Override
  public void execute() throws HibernateException {
    final Serializable id = getId();
    final EntityPersister persister = getPersister();
    final SessionImplementor session = getSession();
    final Object instance = getInstance();

    final boolean veto = preUpdate();

    final SessionFactoryImplementor factory = session.getFactory();
    Object previousVersion = this.previousVersion;
    if (persister.isVersionPropertyGenerated()) {
      // we need to grab the version value from the entity, otherwise
      // we have issues with generated-version entities that may have
      // multiple actions queued during the same flush
      previousVersion = persister.getVersion(instance);
    }

    final Object ck;
    if (persister.hasCache()) {
      final EntityRegionAccessStrategy cache = persister.getCacheAccessStrategy();
      ck = cache.generateCacheKey(id, persister, factory, session.getTenantIdentifier());
      lock = cache.lockItem(session, ck, previousVersion);
    } else {
      ck = null;
    }

    if (!veto) {
      persister.update(
          id,
          state,
          dirtyFields,
          hasDirtyCollection,
          previousState,
          previousVersion,
          instance,
          rowId,
          session);
    }

    final EntityEntry entry = session.getPersistenceContext().getEntry(instance);
    if (entry == null) {
      throw new AssertionFailure("possible nonthreadsafe access to session");
    }

    if (entry.getStatus() == Status.MANAGED || persister.isVersionPropertyGenerated()) {
      // get the updated snapshot of the entity state by cloning current state;
      // it is safe to copy in place, since by this time no-one else (should have)
      // has a reference  to the array
      TypeHelper.deepCopy(
          state, persister.getPropertyTypes(), persister.getPropertyCheckability(), state, session);
      if (persister.hasUpdateGeneratedProperties()) {
        // this entity defines proeprty generation, so process those generated
        // values...
        persister.processUpdateGeneratedProperties(id, instance, state, session);
        if (persister.isVersionPropertyGenerated()) {
          nextVersion = Versioning.getVersion(state, persister);
        }
      }
      // have the entity entry doAfterTransactionCompletion post-update processing, passing it the
      // update state and the new version (if one).
      entry.postUpdate(instance, state, nextVersion);
    }

    if (persister.hasCache()) {
      if (persister.isCacheInvalidationRequired() || entry.getStatus() != Status.MANAGED) {
        persister.getCacheAccessStrategy().remove(session, ck);
      } else {
        // TODO: inefficient if that cache is just going to ignore the updated state!
        final CacheEntry ce = persister.buildCacheEntry(instance, state, nextVersion, getSession());
        cacheEntry = persister.getCacheEntryStructure().structure(ce);

        final boolean put = cacheUpdate(persister, previousVersion, ck);
        if (put && factory.getStatistics().isStatisticsEnabled()) {
          factory
              .getStatisticsImplementor()
              .secondLevelCachePut(getPersister().getCacheAccessStrategy().getRegion().getName());
        }
      }
    }

    session
        .getPersistenceContext()
        .getNaturalIdHelper()
        .manageSharedNaturalIdCrossReference(
            persister, id, state, previousNaturalIdValues, CachedNaturalIdValueSource.UPDATE);

    postUpdate();

    if (factory.getStatistics().isStatisticsEnabled() && !veto) {
      factory.getStatisticsImplementor().updateEntity(getPersister().getEntityName());
    }
  }
  @Override
  public void delete(Serializable id, Object version, Object object, SessionImplementor session)
      throws HibernateException {
    final int span = getTableSpan();
    if (span > 1) {
      throw new HibernateException(
          "Hibernate OGM does not yet support entities spanning multiple tables");
    }
    final EntityMetamodel entityMetamodel = getEntityMetamodel();
    boolean isImpliedOptimisticLocking = !entityMetamodel.isVersioned() && isAllOrDirtyOptLocking();
    Object[] loadedState = null;
    if (isImpliedOptimisticLocking) {
      // need to treat this as if it where optimistic-lock="all" (dirty does *not* make sense);
      // first we need to locate the "loaded" state
      //
      // Note, it potentially could be a proxy, so doAfterTransactionCompletion the location the
      // safe way...
      org.hibernate.engine.spi.EntityKey key = session.generateEntityKey(id, this);
      Object entity = session.getPersistenceContext().getEntity(key);
      if (entity != null) {
        EntityEntry entry = session.getPersistenceContext().getEntry(entity);
        loadedState = entry.getLoadedState();
      }
    }

    final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session);
    final Tuple resultset = gridDialect.getTuple(key, this.getTupleContext());
    final SessionFactoryImplementor factory = getFactory();
    if (isImpliedOptimisticLocking && loadedState != null) {
      // we need to utilize dynamic delete statements
      for (int j = span - 1; j >= 0; j--) {
        boolean[] versionability = getPropertyVersionability();

        // TODO do a diff on the properties value from resultset
        GridType[] types = gridPropertyTypes;

        for (int i = 0; i < entityMetamodel.getPropertySpan(); i++) {
          boolean include = isPropertyOfTable(i, j) && versionability[i];
          if (include) {
            final GridType type = types[i];
            final Object snapshotValue =
                type.nullSafeGet(resultset, getPropertyColumnNames(i), session, object);
            // TODO support other entity modes
            if (!type.isEqual(loadedState[i], snapshotValue, factory)) {
              if (factory.getStatistics().isStatisticsEnabled()) {
                factory.getStatisticsImplementor().optimisticFailure(getEntityName());
              }
              throw new StaleObjectStateException(getEntityName(), id);
            }
          }
        }
      }
    } else {
      if (entityMetamodel.isVersioned()) {
        checkVersionAndRaiseSOSE(id, version, session, resultset);
      }
    }

    for (int j = span - 1; j >= 0; j--) {
      if (isInverseTable(j)) {
        return;
      }
      if (log.isTraceEnabled()) {
        log.trace("Deleting entity: " + MessageHelper.infoString(this, id, factory));
        if (j == 0 && isVersioned()) {
          log.trace("Version: " + version);
        }
      }

      // delete association information
      // needs to be executed before the tuple removal because the AtomicMap in ISPN is cleared upon
      // removal
      new EntityDehydrator()
          .gridDialect(gridDialect)
          .gridPropertyTypes(gridPropertyTypes)
          .gridIdentifierType(gridIdentifierType)
          .id(id)
          .persister(this)
          .resultset(resultset)
          .session(session)
          .tableIndex(j)
          .onlyRemovePropertyMetadata()
          .dehydrate();

      gridDialect.removeTuple(key, getTupleContext());
    }
  }
 // TODO make AbstractEntityPersister#isModifiableEntity protected instead
 private boolean isModifiableEntity(EntityEntry entry) {
   return (entry == null ? isMutable() : entry.isModifiableEntity());
 }
Esempio n. 12
0
  @Override
  public void execute() throws HibernateException {
    nullifyTransientReferencesIfNotAlready();

    final EntityPersister persister = getPersister();
    final SessionImplementor session = getSession();
    final Object instance = getInstance();
    final Serializable id = getId();

    final boolean veto = preInsert();

    // Don't need to lock the cache here, since if someone
    // else inserted the same pk first, the insert would fail

    if (!veto) {

      persister.insert(id, getState(), instance, session);

      final EntityEntry entry = session.getPersistenceContext().getEntry(instance);
      if (entry == null) {
        throw new AssertionFailure("possible non-threadsafe access to session");
      }

      entry.postInsert(getState());

      if (persister.hasInsertGeneratedProperties()) {
        persister.processInsertGeneratedProperties(id, instance, getState(), session);
        if (persister.isVersionPropertyGenerated()) {
          version = Versioning.getVersion(getState(), persister);
        }
        entry.postUpdate(instance, getState(), version);
      }

      getSession().getPersistenceContext().registerInsertedKey(getPersister(), getId());
    }

    final SessionFactoryImplementor factory = getSession().getFactory();

    if (isCachePutEnabled(persister, session)) {
      final CacheEntry ce = persister.buildCacheEntry(instance, getState(), version, session);
      cacheEntry = persister.getCacheEntryStructure().structure(ce);
      final CacheKey ck =
          session.generateCacheKey(
              id, persister.getIdentifierType(), persister.getRootEntityName());
      final boolean put = persister.getCacheAccessStrategy().insert(ck, cacheEntry, version);

      if (put && factory.getStatistics().isStatisticsEnabled()) {
        factory
            .getStatisticsImplementor()
            .secondLevelCachePut(getPersister().getCacheAccessStrategy().getRegion().getName());
      }
    }

    handleNaturalIdPostSaveNotifications(id);

    postInsert();

    if (factory.getStatistics().isStatisticsEnabled() && !veto) {
      factory.getStatisticsImplementor().insertEntity(getPersister().getEntityName());
    }

    markExecuted();
  }
  /**
   * Handle the given delete event. This is the cascaded form.
   *
   * @param event The delete event.
   * @param transientEntities The cache of entities already deleted
   * @throws HibernateException
   */
  public void onDelete(DeleteEvent event, Set transientEntities) throws HibernateException {

    final EventSource source = event.getSession();

    final PersistenceContext persistenceContext = source.getPersistenceContext();
    Object entity = persistenceContext.unproxyAndReassociate(event.getObject());

    EntityEntry entityEntry = persistenceContext.getEntry(entity);
    final EntityPersister persister;
    final Serializable id;
    final Object version;

    if (entityEntry == null) {
      LOG.trace("Entity was not persistent in delete processing");

      persister = source.getEntityPersister(event.getEntityName(), entity);

      if (ForeignKeys.isTransient(persister.getEntityName(), entity, null, source)) {
        deleteTransientEntity(
            source, entity, event.isCascadeDeleteEnabled(), persister, transientEntities);
        // EARLY EXIT!!!
        return;
      }
      performDetachedEntityDeletionCheck(event);

      id = persister.getIdentifier(entity, source);

      if (id == null) {
        throw new TransientObjectException(
            "the detached instance passed to delete() had a null identifier");
      }

      final EntityKey key = source.generateEntityKey(id, persister);

      persistenceContext.checkUniqueness(key, entity);

      new OnUpdateVisitor(source, id, entity).process(entity, persister);

      version = persister.getVersion(entity);

      entityEntry =
          persistenceContext.addEntity(
              entity,
              (persister.isMutable() ? Status.MANAGED : Status.READ_ONLY),
              persister.getPropertyValues(entity),
              key,
              version,
              LockMode.NONE,
              true,
              persister,
              false);
    } else {
      LOG.trace("Deleting a persistent instance");

      if (entityEntry.getStatus() == Status.DELETED || entityEntry.getStatus() == Status.GONE) {
        LOG.trace("Object was already deleted");
        return;
      }
      persister = entityEntry.getPersister();
      id = entityEntry.getId();
      version = entityEntry.getVersion();
    }

    /*if ( !persister.isMutable() ) {
    	throw new HibernateException(
    			"attempted to delete an object of immutable class: " +
    			MessageHelper.infoString(persister)
    		);
    }*/

    if (invokeDeleteLifecycle(source, entity, persister)) {
      return;
    }

    deleteEntity(
        source,
        entity,
        entityEntry,
        event.isCascadeDeleteEnabled(),
        event.isOrphanRemovalBeforeUpdates(),
        persister,
        transientEntities);

    if (source.getFactory().getSettings().isIdentifierRollbackEnabled()) {
      persister.resetIdentifier(entity, id, version, source);
    }
  }
  /**
   * Perform the entity deletion. Well, as with most operations, does not really perform it; just
   * schedules an action/execution with the {@link org.hibernate.engine.spi.ActionQueue} for
   * execution during flush.
   *
   * @param session The originating session
   * @param entity The entity to delete
   * @param entityEntry The entity's entry in the {@link PersistenceContext}
   * @param isCascadeDeleteEnabled Is delete cascading enabled?
   * @param persister The entity persister.
   * @param transientEntities A cache of already deleted entities.
   */
  protected final void deleteEntity(
      final EventSource session,
      final Object entity,
      final EntityEntry entityEntry,
      final boolean isCascadeDeleteEnabled,
      final boolean isOrphanRemovalBeforeUpdates,
      final EntityPersister persister,
      final Set transientEntities) {

    if (LOG.isTraceEnabled()) {
      LOG.tracev(
          "Deleting {0}",
          MessageHelper.infoString(persister, entityEntry.getId(), session.getFactory()));
    }

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    final Type[] propTypes = persister.getPropertyTypes();
    final Object version = entityEntry.getVersion();

    final Object[] currentState;
    if (entityEntry.getLoadedState() == null) {
      // ie. the entity came in from update()
      currentState = persister.getPropertyValues(entity);
    } else {
      currentState = entityEntry.getLoadedState();
    }

    final Object[] deletedState = createDeletedState(persister, currentState, session);
    entityEntry.setDeletedState(deletedState);

    session
        .getInterceptor()
        .onDelete(
            entity, entityEntry.getId(), deletedState, persister.getPropertyNames(), propTypes);

    // before any callbacks, etc, so subdeletions see that this deletion happened first
    persistenceContext.setEntryStatus(entityEntry, Status.DELETED);
    final EntityKey key = session.generateEntityKey(entityEntry.getId(), persister);

    cascadeBeforeDelete(session, persister, entity, entityEntry, transientEntities);

    new ForeignKeys.Nullifier(entity, true, false, session)
        .nullifyTransientReferences(entityEntry.getDeletedState(), propTypes);
    new Nullability(session).checkNullability(entityEntry.getDeletedState(), persister, true);
    persistenceContext.getNullifiableEntityKeys().add(key);

    if (isOrphanRemovalBeforeUpdates) {
      // TODO: The removeOrphan concept is a temporary "hack" for HHH-6484.  This should be removed
      // once action/task
      // ordering is improved.
      session
          .getActionQueue()
          .addAction(
              new OrphanRemovalAction(
                  entityEntry.getId(),
                  deletedState,
                  version,
                  entity,
                  persister,
                  isCascadeDeleteEnabled,
                  session));
    } else {
      // Ensures that containing deletions happen before sub-deletions
      session
          .getActionQueue()
          .addAction(
              new EntityDeleteAction(
                  entityEntry.getId(),
                  deletedState,
                  version,
                  entity,
                  persister,
                  isCascadeDeleteEnabled,
                  session));
    }

    cascadeAfterDelete(session, persister, entity, transientEntities);

    // the entry will be removed after the flush, and will no longer
    // override the stale snapshot
    // This is now handled by removeEntity() in EntityDeleteAction
    // persistenceContext.removeDatabaseSnapshot(key);
  }
  /**
   * Performs all the actual work needed to save an entity (well to get the save moved to the
   * execution queue).
   *
   * @param entity The entity to be saved
   * @param key The id to be used for saving the entity (or null, in the case of identity columns)
   * @param persister The entity's persister instance.
   * @param useIdentityColumn Should an identity column be used for id generation?
   * @param anything Generally cascade-specific information.
   * @param source The session which is the source of the current event.
   * @param requiresImmediateIdAccess Is access to the identifier required immediately after the
   *     completion of the save? persist(), for example, does not require this...
   * @return The id used to save the entity; may be null depending on the type of id generator used
   *     and the requiresImmediateIdAccess value
   */
  protected Serializable performSaveOrReplicate(
      Object entity,
      EntityKey key,
      EntityPersister persister,
      boolean useIdentityColumn,
      Object anything,
      EventSource source,
      boolean requiresImmediateIdAccess) {

    Serializable id = key == null ? null : key.getIdentifier();

    boolean inTxn = source.isTransactionInProgress();
    boolean shouldDelayIdentityInserts = !inTxn && !requiresImmediateIdAccess;

    // Put a placeholder in entries, so we don't recurse back and try to save() the
    // same object again. QUESTION: should this be done before onSave() is called?
    // likewise, should it be done before onUpdate()?
    EntityEntry original =
        source
            .getPersistenceContext()
            .addEntry(
                entity,
                Status.SAVING,
                null,
                null,
                id,
                null,
                LockMode.WRITE,
                useIdentityColumn,
                persister,
                false,
                false);

    cascadeBeforeSave(source, persister, entity, anything);

    Object[] values = persister.getPropertyValuesToInsert(entity, getMergeMap(anything), source);
    Type[] types = persister.getPropertyTypes();

    boolean substitute = substituteValuesIfNecessary(entity, id, values, persister, source);

    if (persister.hasCollections()) {
      substitute = substitute || visitCollectionsBeforeSave(entity, id, values, types, source);
    }

    if (substitute) {
      persister.setPropertyValues(entity, values);
    }

    TypeHelper.deepCopy(values, types, persister.getPropertyUpdateability(), values, source);

    AbstractEntityInsertAction insert =
        addInsertAction(
            values, id, entity, persister, useIdentityColumn, source, shouldDelayIdentityInserts);

    // postpone initializing id in case the insert has non-nullable transient dependencies
    // that are not resolved until cascadeAfterSave() is executed
    cascadeAfterSave(source, persister, entity, anything);
    if (useIdentityColumn && insert.isEarlyInsert()) {
      if (!EntityIdentityInsertAction.class.isInstance(insert)) {
        throw new IllegalStateException(
            "Insert should be using an identity column, but action is of unexpected type: "
                + insert.getClass().getName());
      }
      id = ((EntityIdentityInsertAction) insert).getGeneratedId();

      insert.handleNaturalIdPostSaveNotifications(id);
    }

    EntityEntry newEntry = source.getPersistenceContext().getEntry(entity);

    if (newEntry != original) {
      EntityEntryExtraState extraState = newEntry.getExtraState(EntityEntryExtraState.class);
      if (extraState == null) {
        newEntry.addExtraState(original.getExtraState(EntityEntryExtraState.class));
      }
    }

    return id;
  }