/**
   * Coordinates the processing necessary to get things ready for executions as db calls by preping
   * the session caches and moving the appropriate entities and collections to their respective
   * execution queues.
   *
   * @param event The flush event.
   * @throws HibernateException Error flushing caches to execution queues.
   */
  protected void flushEverythingToExecutions(FlushEvent event) throws HibernateException {

    LOG.trace("Flushing session");

    EventSource session = event.getSession();

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    session.getInterceptor().preFlush(new LazyIterator(persistenceContext.getEntitiesByKey()));

    prepareEntityFlushes(session, persistenceContext);
    // we could move this inside if we wanted to
    // tolerate collection initializations during
    // collection dirty checking:
    prepareCollectionFlushes(persistenceContext);
    // now, any collections that are initialized
    // inside this block do not get updated - they
    // are ignored until the next flush

    persistenceContext.setFlushing(true);
    try {
      int entityCount = flushEntities(event, persistenceContext);
      int collectionCount = flushCollections(session, persistenceContext);

      event.setNumberOfEntitiesProcessed(entityCount);
      event.setNumberOfCollectionsProcessed(collectionCount);
    } finally {
      persistenceContext.setFlushing(false);
    }

    // some statistics
    logFlushResults(event);
  }
  @Override
  public void initializeObjects(
      List<EntityInfo> entityInfos,
      LinkedHashMap<EntityInfoLoadKey, Object> idToObjectMap,
      ObjectInitializationContext objectInitializationContext) {
    // Do not call isTimeOut here as the caller might be the last biggie on the list.
    final int numberOfObjectsToInitialize = entityInfos.size();

    if (numberOfObjectsToInitialize == 0) {
      if (log.isTraceEnabled()) {
        log.tracef("No object to initialize");
      }
      return;
    }

    SessionImplementor sessionImplementor =
        (SessionImplementor) objectInitializationContext.getSession();
    String entityName =
        objectInitializationContext
            .getSession()
            .getSessionFactory()
            .getClassMetadata(objectInitializationContext.getEntityType())
            .getEntityName();
    EntityPersister persister = sessionImplementor.getFactory().getEntityPersister(entityName);
    PersistenceContext persistenceContext = sessionImplementor.getPersistenceContext();

    // check the persistence context
    List<EntityInfo> remainingEntityInfos = new ArrayList<>(numberOfObjectsToInitialize);
    for (EntityInfo entityInfo : entityInfos) {
      if (ObjectLoaderHelper.areDocIdAndEntityIdIdentical(
          entityInfo, objectInitializationContext.getSession())) {
        EntityKey entityKey = sessionImplementor.generateEntityKey(entityInfo.getId(), persister);
        Object o = persistenceContext.getEntity(entityKey);
        if (o == null) {
          remainingEntityInfos.add(entityInfo);
        } else {
          EntityInfoLoadKey key = new EntityInfoLoadKey(entityInfo.getClazz(), entityInfo.getId());
          idToObjectMap.put(key, o);
        }
      } else {
        // if document id !=  entity id we can't use PC lookup
        remainingEntityInfos.add(entityInfo);
      }
    }

    // update entityInfos to only contains the remaining ones
    final int remainingSize = remainingEntityInfos.size();
    if (log.isTraceEnabled()) {
      log.tracef(
          "Initialized %d objects out of %d in the persistence context",
          (Integer) (numberOfObjectsToInitialize - remainingSize),
          (Integer) numberOfObjectsToInitialize);
    }

    if (remainingSize > 0) {
      delegate.initializeObjects(remainingEntityInfos, idToObjectMap, objectInitializationContext);
    }
  }
Ejemplo n.º 3
0
  private static void processDereferencedCollection(
      PersistentCollection coll, SessionImplementor session) {
    final PersistenceContext persistenceContext = session.getPersistenceContext();
    CollectionEntry entry = persistenceContext.getCollectionEntry(coll);
    final CollectionPersister loadedPersister = entry.getLoadedPersister();

    if (LOG.isDebugEnabled() && loadedPersister != null) {
      LOG.debugf(
          "Collection dereferenced: %s",
          MessageHelper.collectionInfoString(
              loadedPersister, entry.getLoadedKey(), session.getFactory()));
    }

    // do a check
    boolean hasOrphanDelete = loadedPersister != null && loadedPersister.hasOrphanDelete();
    if (hasOrphanDelete) {
      Serializable ownerId =
          loadedPersister.getOwnerEntityPersister().getIdentifier(coll.getOwner(), session);
      if (ownerId == null) {
        // the owning entity may have been deleted and its identifier unset due to
        // identifier-rollback; in which case, try to look up its identifier from
        // the persistence context
        if (session.getFactory().getSettings().isIdentifierRollbackEnabled()) {
          EntityEntry ownerEntry = persistenceContext.getEntry(coll.getOwner());
          if (ownerEntry != null) {
            ownerId = ownerEntry.getId();
          }
        }
        if (ownerId == null) {
          throw new AssertionFailure(
              "Unable to determine collection owner identifier for orphan-delete processing");
        }
      }
      EntityKey key = session.generateEntityKey(ownerId, loadedPersister.getOwnerEntityPersister());
      Object owner = persistenceContext.getEntity(key);
      if (owner == null) {
        throw new AssertionFailure(
            "collection owner not associated with session: " + loadedPersister.getRole());
      }
      EntityEntry e = persistenceContext.getEntry(owner);
      // only collections belonging to deleted entities are allowed to be dereferenced in the case
      // of orphan delete
      if (e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE) {
        throw new HibernateException(
            "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: "
                + loadedPersister.getRole());
      }
    }

    // do the work
    entry.setCurrentPersister(null);
    entry.setCurrentKey(null);
    prepareCollectionForUpdate(coll, entry, session.getFactory());
  }
 private void checkEmptyAuditSessionCache(Session session, String... auditEntityNames) {
   List<String> entityNames = Arrays.asList(auditEntityNames);
   PersistenceContext persistenceContext = ((SessionImplementor) session).getPersistenceContext();
   for (Object entry : persistenceContext.getEntityEntries().values()) {
     EntityEntry entityEntry = (EntityEntry) entry;
     if (entityNames.contains(entityEntry.getEntityName())) {
       assert false
           : "Audit data shall not be stored in the session level cache. This causes performance issues.";
     }
     Assert.assertFalse(
         "Revision entity shall not be stored in the session level cache. This causes performance issues.",
         DefaultRevisionEntity.class.getName().equals(entityEntry.getEntityName()));
   }
 }
  /**
   * 1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call
   * Interceptor.postFlush()
   */
  protected void postFlush(SessionImplementor session) throws HibernateException {

    LOG.trace("Post flush");

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    persistenceContext.getCollectionsByKey().clear();
    persistenceContext
        .getBatchFetchQueue()
        .clearSubselects(); // the database has changed now, so the subselect results need to be
                            // invalidated

    Iterator iter = persistenceContext.getCollectionEntries().entrySet().iterator();
    while (iter.hasNext()) {
      Map.Entry me = (Map.Entry) iter.next();
      CollectionEntry collectionEntry = (CollectionEntry) me.getValue();
      PersistentCollection persistentCollection = (PersistentCollection) me.getKey();
      collectionEntry.postFlush(persistentCollection);
      if (collectionEntry.getLoadedPersister() == null) {
        // if the collection is dereferenced, remove from the session cache
        // iter.remove(); //does not work, since the entrySet is not backed by the set
        persistenceContext.getCollectionEntries().remove(persistentCollection);
      } else {
        // otherwise recreate the mapping between the collection and its key
        CollectionKey collectionKey =
            new CollectionKey(collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey());
        persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection);
      }
    }

    session.getInterceptor().postFlush(new LazyIterator(persistenceContext.getEntitiesByKey()));
  }
Ejemplo n.º 6
0
  private static void processNeverReferencedCollection(
      PersistentCollection coll, SessionImplementor session) throws HibernateException {

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    CollectionEntry entry = persistenceContext.getCollectionEntry(coll);

    if (LOG.isDebugEnabled()) {
      LOG.debugf(
          "Found collection with unloaded owner: %s",
          MessageHelper.collectionInfoString(
              entry.getLoadedPersister(), entry.getLoadedKey(), session.getFactory()));
    }

    entry.setCurrentPersister(entry.getLoadedPersister());
    entry.setCurrentKey(entry.getLoadedKey());

    prepareCollectionForUpdate(coll, entry, session.getFactory());
  }
  /**
   * 1. Recreate the collection key -> collection map 2. rebuild the collection entries 3. call
   * Interceptor.postFlush()
   */
  protected void postFlush(SessionImplementor session) throws HibernateException {

    LOG.trace("Post flush");

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    persistenceContext.getCollectionsByKey().clear();

    // the database has changed now, so the subselect results need to be invalidated
    // the batch fetching queues should also be cleared - especially the collection batch fetching
    // one
    persistenceContext.getBatchFetchQueue().clear();

    for (Map.Entry<PersistentCollection, CollectionEntry> me :
        IdentityMap.concurrentEntries(persistenceContext.getCollectionEntries())) {
      CollectionEntry collectionEntry = me.getValue();
      PersistentCollection persistentCollection = me.getKey();
      collectionEntry.postFlush(persistentCollection);
      if (collectionEntry.getLoadedPersister() == null) {
        // if the collection is dereferenced, unset its session reference and remove from the
        // session cache
        // iter.remove(); //does not work, since the entrySet is not backed by the set
        persistentCollection.unsetSession(session);
        persistenceContext.getCollectionEntries().remove(persistentCollection);
      } else {
        // otherwise recreate the mapping between the collection and its key
        CollectionKey collectionKey =
            new CollectionKey(collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey());
        persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection);
      }
    }
  }
 @SuppressWarnings(value = {"unchecked"})
 private void logFlushResults(FlushEvent event) {
   if (!LOG.isDebugEnabled()) {
     return;
   }
   final EventSource session = event.getSession();
   final PersistenceContext persistenceContext = session.getPersistenceContext();
   LOG.debugf(
       "Flushed: %s insertions, %s updates, %s deletions to %s objects",
       session.getActionQueue().numberOfInsertions(),
       session.getActionQueue().numberOfUpdates(),
       session.getActionQueue().numberOfDeletions(),
       persistenceContext.getNumberOfManagedEntities());
   LOG.debugf(
       "Flushed: %s (re)creations, %s updates, %s removals to %s collections",
       session.getActionQueue().numberOfCollectionCreations(),
       session.getActionQueue().numberOfCollectionUpdates(),
       session.getActionQueue().numberOfCollectionRemovals(),
       persistenceContext.getCollectionEntries().size());
   new EntityPrinter(session.getFactory())
       .toString(persistenceContext.getEntitiesByKey().entrySet());
 }
  /** Initialize the flags of the CollectionEntry, including the dirty check. */
  private void prepareCollectionFlushes(PersistenceContext persistenceContext)
      throws HibernateException {

    // Initialize dirty flags for arrays + collections with composite elements
    // and reset reached, doupdate, etc.

    LOG.debug("Dirty checking collections");

    for (Map.Entry<PersistentCollection, CollectionEntry> entry :
        IdentityMap.concurrentEntries(
            (Map<PersistentCollection, CollectionEntry>)
                persistenceContext.getCollectionEntries())) {
      entry.getValue().preFlush(entry.getKey());
    }
  }
  /**
   * process cascade save/update at the start of a flush to discover any newly referenced entity
   * that must be passed to saveOrUpdate(), and also apply orphan delete
   */
  private void prepareEntityFlushes(EventSource session, PersistenceContext persistenceContext)
      throws HibernateException {

    LOG.debug("Processing flush-time cascades");

    final Object anything = getAnything();
    // safe from concurrent modification because of how concurrentEntries() is implemented on
    // IdentityMap
    for (Map.Entry<Object, EntityEntry> me : persistenceContext.reentrantSafeEntityEntries()) {
      //		for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries()
      // ) ) {
      EntityEntry entry = (EntityEntry) me.getValue();
      Status status = entry.getStatus();
      if (status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY) {
        cascadeOnFlush(session, entry.getPersister(), me.getKey(), anything);
      }
    }
  }
  /**
   * 1. detect any dirty entities 2. schedule any entity updates 3. search out any reachable
   * collections
   */
  private int flushEntities(final FlushEvent event, final PersistenceContext persistenceContext)
      throws HibernateException {

    LOG.trace("Flushing entities and processing referenced collections");

    final EventSource source = event.getSession();
    final Iterable<FlushEntityEventListener> flushListeners =
        source
            .getFactory()
            .getServiceRegistry()
            .getService(EventListenerRegistry.class)
            .getEventListenerGroup(EventType.FLUSH_ENTITY)
            .listeners();

    // Among other things, updateReachables() will recursively load all
    // collections that are moving roles. This might cause entities to
    // be loaded.

    // So this needs to be safe from concurrent modification problems.

    final Map.Entry<Object, EntityEntry>[] entityEntries =
        persistenceContext.reentrantSafeEntityEntries();
    final int count = entityEntries.length;

    for (Map.Entry<Object, EntityEntry> me : entityEntries) {

      // Update the status of the object and if necessary, schedule an update

      EntityEntry entry = me.getValue();
      Status status = entry.getStatus();

      if (status != Status.LOADING && status != Status.GONE) {
        final FlushEntityEvent entityEvent = new FlushEntityEvent(source, me.getKey(), entry);
        for (FlushEntityEventListener listener : flushListeners) {
          listener.onFlushEntity(entityEvent);
        }
      }
    }

    source.getActionQueue().sortActions();

    return count;
  }
  /**
   * process any unreferenced collections and then inspect all known collections, scheduling
   * creates/removes/updates
   */
  @SuppressWarnings("unchecked")
  private int flushCollections(
      final EventSource session, final PersistenceContext persistenceContext)
      throws HibernateException {
    LOG.trace("Processing unreferenced collections");

    final Map.Entry<PersistentCollection, CollectionEntry>[] entries =
        IdentityMap.concurrentEntries(
            (Map<PersistentCollection, CollectionEntry>) persistenceContext.getCollectionEntries());

    final int count = entries.length;

    for (Map.Entry<PersistentCollection, CollectionEntry> me : entries) {
      CollectionEntry ce = me.getValue();
      if (!ce.isReached() && !ce.isIgnore()) {
        Collections.processUnreachableCollection(me.getKey(), session);
      }
    }

    // Schedule updates to collections:

    LOG.trace("Scheduling collection removes/(re)creates/updates");

    ActionQueue actionQueue = session.getActionQueue();
    for (Map.Entry<PersistentCollection, CollectionEntry> me :
        IdentityMap.concurrentEntries(
            (Map<PersistentCollection, CollectionEntry>)
                persistenceContext.getCollectionEntries())) {
      PersistentCollection coll = me.getKey();
      CollectionEntry ce = me.getValue();

      if (ce.isDorecreate()) {
        session.getInterceptor().onCollectionRecreate(coll, ce.getCurrentKey());
        actionQueue.addAction(
            new CollectionRecreateAction(
                coll, ce.getCurrentPersister(), ce.getCurrentKey(), session));
      }
      if (ce.isDoremove()) {
        session.getInterceptor().onCollectionRemove(coll, ce.getLoadedKey());
        actionQueue.addAction(
            new CollectionRemoveAction(
                coll,
                ce.getLoadedPersister(),
                ce.getLoadedKey(),
                ce.isSnapshotEmpty(coll),
                session));
      }
      if (ce.isDoupdate()) {
        session.getInterceptor().onCollectionUpdate(coll, ce.getLoadedKey());
        actionQueue.addAction(
            new CollectionUpdateAction(
                coll,
                ce.getLoadedPersister(),
                ce.getLoadedKey(),
                ce.isSnapshotEmpty(coll),
                session));
      }

      // todo : I'm not sure the !wasInitialized part should really be part of this check
      if (!coll.wasInitialized() && coll.hasQueuedOperations()) {
        actionQueue.addAction(
            new QueuedOperationCollectionAction(
                coll, ce.getLoadedPersister(), ce.getLoadedKey(), session));
      }
    }

    actionQueue.sortCollectionActions();

    return count;
  }
  /**
   * Handle the given delete event. This is the cascaded form.
   *
   * @param event The delete event.
   * @param transientEntities The cache of entities already deleted
   * @throws HibernateException
   */
  public void onDelete(DeleteEvent event, Set transientEntities) throws HibernateException {

    final EventSource source = event.getSession();

    final PersistenceContext persistenceContext = source.getPersistenceContext();
    Object entity = persistenceContext.unproxyAndReassociate(event.getObject());

    EntityEntry entityEntry = persistenceContext.getEntry(entity);
    final EntityPersister persister;
    final Serializable id;
    final Object version;

    if (entityEntry == null) {
      LOG.trace("Entity was not persistent in delete processing");

      persister = source.getEntityPersister(event.getEntityName(), entity);

      if (ForeignKeys.isTransient(persister.getEntityName(), entity, null, source)) {
        deleteTransientEntity(
            source, entity, event.isCascadeDeleteEnabled(), persister, transientEntities);
        // EARLY EXIT!!!
        return;
      }
      performDetachedEntityDeletionCheck(event);

      id = persister.getIdentifier(entity, source);

      if (id == null) {
        throw new TransientObjectException(
            "the detached instance passed to delete() had a null identifier");
      }

      final EntityKey key = source.generateEntityKey(id, persister);

      persistenceContext.checkUniqueness(key, entity);

      new OnUpdateVisitor(source, id, entity).process(entity, persister);

      version = persister.getVersion(entity);

      entityEntry =
          persistenceContext.addEntity(
              entity,
              (persister.isMutable() ? Status.MANAGED : Status.READ_ONLY),
              persister.getPropertyValues(entity),
              key,
              version,
              LockMode.NONE,
              true,
              persister,
              false);
    } else {
      LOG.trace("Deleting a persistent instance");

      if (entityEntry.getStatus() == Status.DELETED || entityEntry.getStatus() == Status.GONE) {
        LOG.trace("Object was already deleted");
        return;
      }
      persister = entityEntry.getPersister();
      id = entityEntry.getId();
      version = entityEntry.getVersion();
    }

    /*if ( !persister.isMutable() ) {
    	throw new HibernateException(
    			"attempted to delete an object of immutable class: " +
    			MessageHelper.infoString(persister)
    		);
    }*/

    if (invokeDeleteLifecycle(source, entity, persister)) {
      return;
    }

    deleteEntity(
        source,
        entity,
        entityEntry,
        event.isCascadeDeleteEnabled(),
        event.isOrphanRemovalBeforeUpdates(),
        persister,
        transientEntities);

    if (source.getFactory().getSettings().isIdentifierRollbackEnabled()) {
      persister.resetIdentifier(entity, id, version, source);
    }
  }
  /**
   * Perform the entity deletion. Well, as with most operations, does not really perform it; just
   * schedules an action/execution with the {@link org.hibernate.engine.spi.ActionQueue} for
   * execution during flush.
   *
   * @param session The originating session
   * @param entity The entity to delete
   * @param entityEntry The entity's entry in the {@link PersistenceContext}
   * @param isCascadeDeleteEnabled Is delete cascading enabled?
   * @param persister The entity persister.
   * @param transientEntities A cache of already deleted entities.
   */
  protected final void deleteEntity(
      final EventSource session,
      final Object entity,
      final EntityEntry entityEntry,
      final boolean isCascadeDeleteEnabled,
      final boolean isOrphanRemovalBeforeUpdates,
      final EntityPersister persister,
      final Set transientEntities) {

    if (LOG.isTraceEnabled()) {
      LOG.tracev(
          "Deleting {0}",
          MessageHelper.infoString(persister, entityEntry.getId(), session.getFactory()));
    }

    final PersistenceContext persistenceContext = session.getPersistenceContext();
    final Type[] propTypes = persister.getPropertyTypes();
    final Object version = entityEntry.getVersion();

    final Object[] currentState;
    if (entityEntry.getLoadedState() == null) {
      // ie. the entity came in from update()
      currentState = persister.getPropertyValues(entity);
    } else {
      currentState = entityEntry.getLoadedState();
    }

    final Object[] deletedState = createDeletedState(persister, currentState, session);
    entityEntry.setDeletedState(deletedState);

    session
        .getInterceptor()
        .onDelete(
            entity, entityEntry.getId(), deletedState, persister.getPropertyNames(), propTypes);

    // before any callbacks, etc, so subdeletions see that this deletion happened first
    persistenceContext.setEntryStatus(entityEntry, Status.DELETED);
    final EntityKey key = session.generateEntityKey(entityEntry.getId(), persister);

    cascadeBeforeDelete(session, persister, entity, entityEntry, transientEntities);

    new ForeignKeys.Nullifier(entity, true, false, session)
        .nullifyTransientReferences(entityEntry.getDeletedState(), propTypes);
    new Nullability(session).checkNullability(entityEntry.getDeletedState(), persister, true);
    persistenceContext.getNullifiableEntityKeys().add(key);

    if (isOrphanRemovalBeforeUpdates) {
      // TODO: The removeOrphan concept is a temporary "hack" for HHH-6484.  This should be removed
      // once action/task
      // ordering is improved.
      session
          .getActionQueue()
          .addAction(
              new OrphanRemovalAction(
                  entityEntry.getId(),
                  deletedState,
                  version,
                  entity,
                  persister,
                  isCascadeDeleteEnabled,
                  session));
    } else {
      // Ensures that containing deletions happen before sub-deletions
      session
          .getActionQueue()
          .addAction(
              new EntityDeleteAction(
                  entityEntry.getId(),
                  deletedState,
                  version,
                  entity,
                  persister,
                  isCascadeDeleteEnabled,
                  session));
    }

    cascadeAfterDelete(session, persister, entity, transientEntities);

    // the entry will be removed after the flush, and will no longer
    // override the stale snapshot
    // This is now handled by removeEntity() in EntityDeleteAction
    // persistenceContext.removeDatabaseSnapshot(key);
  }