public void noCascade( EventSource session, Object child, Object parent, EntityPersister persister, int propertyIndex) { if (child == null) { return; } Type type = persister.getPropertyTypes()[propertyIndex]; if (type.isEntityType()) { String childEntityName = ((EntityType) type).getAssociatedEntityName(session.getFactory()); if (!isInManagedState(child, session) && !(child instanceof HibernateProxy) // a proxy cannot be transient and it breaks // ForeignKeys.isTransient && ForeignKeys.isTransient(childEntityName, child, null, session)) { String parentEntiytName = persister.getEntityName(); String propertyName = persister.getPropertyNames()[propertyIndex]; throw new TransientObjectException( "object references an unsaved transient instance - " + "save the transient instance before flushing: " + parentEntiytName + "." + propertyName + " -> " + childEntityName); } } }
private void checkNaturalId( EntityPersister persister, EntityEntry entry, Object[] current, Object[] loaded, EntityMode entityMode, SessionImplementor session) { if (persister.hasNaturalIdentifier() && entry.getStatus() != Status.READ_ONLY) { Object[] snapshot = null; Type[] types = persister.getPropertyTypes(); int[] props = persister.getNaturalIdentifierProperties(); boolean[] updateable = persister.getPropertyUpdateability(); for (int i = 0; i < props.length; i++) { int prop = props[i]; if (!updateable[prop]) { Object loadedVal; if (loaded == null) { if (snapshot == null) { snapshot = session.getPersistenceContext().getNaturalIdSnapshot(entry.getId(), persister); } loadedVal = snapshot[i]; } else { loadedVal = loaded[prop]; } if (!types[prop].isEqual(current[prop], loadedVal, entityMode)) { throw new HibernateException( "immutable natural identifier of an instance of " + persister.getEntityName() + " was altered"); } } } } }
/** * Flushes a single entity's state to the database, by scheduling an update action, if necessary */ public void onFlushEntity(FlushEntityEvent event) throws HibernateException { final Object entity = event.getEntity(); final EntityEntry entry = event.getEntityEntry(); final EventSource session = event.getSession(); final EntityPersister persister = entry.getPersister(); final Status status = entry.getStatus(); final EntityMode entityMode = session.getEntityMode(); final Type[] types = persister.getPropertyTypes(); final boolean mightBeDirty = entry.requiresDirtyCheck(entity); final Object[] values = getValues(entity, entry, entityMode, mightBeDirty, session); event.setPropertyValues(values); // TODO: avoid this for non-new instances where mightBeDirty==false boolean substitute = wrapCollections(session, persister, types, values); if (isUpdateNecessary(event, mightBeDirty)) { substitute = scheduleUpdate(event) || substitute; } if (status != Status.DELETED) { // now update the object .. has to be outside the main if block above (because of collections) if (substitute) persister.setPropertyValues(entity, values, entityMode); // Search for collections by reachability, updating their role. // We don't want to touch collections reachable from a deleted object if (persister.hasCollections()) { new FlushVisitor(session, entity).processEntityPropertyValues(values, types); } } }
private Object[] createDeletedState( EntityPersister persister, Object[] currentState, EventSource session) { Type[] propTypes = persister.getPropertyTypes(); final Object[] deletedState = new Object[propTypes.length]; // TypeFactory.deepCopy( currentState, propTypes, persister.getPropertyUpdateability(), // deletedState, session ); boolean[] copyability = new boolean[propTypes.length]; java.util.Arrays.fill(copyability, true); TypeHelper.deepCopy(currentState, propTypes, copyability, deletedState, session); return deletedState; }
/** * Construct a new key for a caching natural identifier resolutions into the second level cache. * Note that an entity name should always be the root entity name, not a subclass entity name. * * @param naturalIdValues The naturalIdValues associated with the cached data * @param persister The persister for the entity * @param session The originating session */ public NaturalIdCacheKey( final Object[] naturalIdValues, final EntityPersister persister, final SessionImplementor session) { this.entityName = persister.getRootEntityName(); this.tenantId = session.getTenantIdentifier(); this.naturalIdValues = new Serializable[naturalIdValues.length]; final SessionFactoryImplementor factory = session.getFactory(); final int[] naturalIdPropertyIndexes = persister.getNaturalIdentifierProperties(); final Type[] propertyTypes = persister.getPropertyTypes(); final int prime = 31; int result = 1; result = prime * result + ((this.entityName == null) ? 0 : this.entityName.hashCode()); result = prime * result + ((this.tenantId == null) ? 0 : this.tenantId.hashCode()); for (int i = 0; i < naturalIdValues.length; i++) { final Type type = propertyTypes[naturalIdPropertyIndexes[i]]; final Object value = naturalIdValues[i]; result = prime * result + (value != null ? type.getHashCode(value, factory) : 0); this.naturalIdValues[i] = type.disassemble(value, session, null); } this.hashCode = result; this.toString = new ValueHolder<String>( new ValueHolder.DeferredInitializer<String>() { @Override public String initialize() { // Complex toString is needed as naturalIds for entities are not simply based on a // single value like primary keys // the only same way to differentiate the keys is to included the disassembled // values in the string. final StringBuilder toStringBuilder = new StringBuilder(entityName).append("##NaturalId["); for (int i = 0; i < naturalIdValues.length; i++) { toStringBuilder.append(naturalIdValues[i]); if (i + 1 < naturalIdValues.length) { toStringBuilder.append(", "); } } toStringBuilder.append("]"); return toStringBuilder.toString(); } }); }
private boolean hasDirtyCollections( FlushEntityEvent event, EntityPersister persister, Status status) { if (isCollectionDirtyCheckNecessary(persister, status)) { DirtyCollectionSearchVisitor visitor = new DirtyCollectionSearchVisitor( event.getSession(), persister.getPropertyVersionability()); visitor.processEntityPropertyValues(event.getPropertyValues(), persister.getPropertyTypes()); boolean hasDirtyCollections = visitor.wasDirtyCollectionFound(); event.setHasDirtyCollection(hasDirtyCollections); return hasDirtyCollections; } else { return false; } }
@Override protected boolean invokeInterceptor( SessionImplementor session, Object entity, EntityEntry entry, Object[] values, EntityPersister persister) { boolean isDirty = false; if (entry.getStatus() != Status.DELETED) { if (callbackHandler.preUpdate(entity)) { isDirty = copyState(entity, persister.getPropertyTypes(), values, session.getFactory()); } } return super.invokeInterceptor(session, entity, entry, values, persister) || isDirty; }
protected boolean invokeInterceptor( SessionImplementor session, Object entity, EntityEntry entry, final Object[] values, EntityPersister persister) { return session .getInterceptor() .onFlushDirty( entity, entry.getId(), values, entry.getLoadedState(), persister.getPropertyNames(), persister.getPropertyTypes()); }
/** * Perform any property value substitution that is necessary (interceptor callback, version * initialization...) * * @param entity The entity * @param id The entity identifier * @param values The snapshot entity state * @param persister The entity persister * @param source The originating session * @return True if the snapshot state changed such that reinjection of the values into the entity * is required. */ protected boolean substituteValuesIfNecessary( Object entity, Serializable id, Object[] values, EntityPersister persister, SessionImplementor source) { boolean substitute = source .getInterceptor() .onSave(entity, id, values, persister.getPropertyNames(), persister.getPropertyTypes()); // keep the existing version number in the case of replicate! if (persister.isVersioned()) { substitute = Versioning.seedVersion( values, persister.getVersionProperty(), persister.getVersionType(), source) || substitute; } return substitute; }
private Object assembleCacheEntry( final CacheEntry entry, final Serializable id, final EntityPersister persister, final LoadEvent event) throws HibernateException { final Object optionalObject = event.getInstanceToLoad(); final EventSource session = event.getSession(); final SessionFactoryImplementor factory = session.getFactory(); if (log.isTraceEnabled()) { log.trace( "assembling entity from second-level cache: " + MessageHelper.infoString(persister, id, factory)); } EntityPersister subclassPersister = factory.getEntityPersister(entry.getSubclass()); Object result = optionalObject == null ? session.instantiate(subclassPersister, id) : optionalObject; // make it circular-reference safe TwoPhaseLoad.addUninitializedCachedEntity( new EntityKey(id, subclassPersister, session.getEntityMode()), result, subclassPersister, LockMode.NONE, entry.areLazyPropertiesUnfetched(), entry.getVersion(), session); Type[] types = subclassPersister.getPropertyTypes(); Object[] values = entry.assemble( result, id, subclassPersister, session.getInterceptor(), session); // intializes result by side-effect TypeFactory.deepCopy( values, types, subclassPersister.getPropertyUpdateability(), values, session); Object version = Versioning.getVersion(values, subclassPersister); if (log.isTraceEnabled()) log.trace("Cached Version: " + version); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.addEntry( result, Status.MANAGED, values, null, id, version, LockMode.NONE, true, subclassPersister, false, entry.areLazyPropertiesUnfetched()); subclassPersister.afterInitialize(result, entry.areLazyPropertiesUnfetched(), session); persistenceContext.initializeNonLazyCollections(); // upgrade the lock if necessary: // lock(result, lockMode); // PostLoad is needed for EJB3 // TODO: reuse the PostLoadEvent... PostLoadEvent postLoadEvent = new PostLoadEvent(session).setEntity(result).setId(id).setPersister(persister); PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); for (int i = 0; i < listeners.length; i++) { listeners[i].onPostLoad(postLoadEvent); } return result; }
@Override public void execute() throws HibernateException { final Serializable id = getId(); final EntityPersister persister = getPersister(); final SessionImplementor session = getSession(); final Object instance = getInstance(); final boolean veto = preUpdate(); final SessionFactoryImplementor factory = session.getFactory(); Object previousVersion = this.previousVersion; if (persister.isVersionPropertyGenerated()) { // we need to grab the version value from the entity, otherwise // we have issues with generated-version entities that may have // multiple actions queued during the same flush previousVersion = persister.getVersion(instance); } final Object ck; if (persister.hasCache()) { final EntityRegionAccessStrategy cache = persister.getCacheAccessStrategy(); ck = cache.generateCacheKey(id, persister, factory, session.getTenantIdentifier()); lock = cache.lockItem(session, ck, previousVersion); } else { ck = null; } if (!veto) { persister.update( id, state, dirtyFields, hasDirtyCollection, previousState, previousVersion, instance, rowId, session); } final EntityEntry entry = session.getPersistenceContext().getEntry(instance); if (entry == null) { throw new AssertionFailure("possible nonthreadsafe access to session"); } if (entry.getStatus() == Status.MANAGED || persister.isVersionPropertyGenerated()) { // get the updated snapshot of the entity state by cloning current state; // it is safe to copy in place, since by this time no-one else (should have) // has a reference to the array TypeHelper.deepCopy( state, persister.getPropertyTypes(), persister.getPropertyCheckability(), state, session); if (persister.hasUpdateGeneratedProperties()) { // this entity defines proeprty generation, so process those generated // values... persister.processUpdateGeneratedProperties(id, instance, state, session); if (persister.isVersionPropertyGenerated()) { nextVersion = Versioning.getVersion(state, persister); } } // have the entity entry doAfterTransactionCompletion post-update processing, passing it the // update state and the new version (if one). entry.postUpdate(instance, state, nextVersion); } if (persister.hasCache()) { if (persister.isCacheInvalidationRequired() || entry.getStatus() != Status.MANAGED) { persister.getCacheAccessStrategy().remove(session, ck); } else { // TODO: inefficient if that cache is just going to ignore the updated state! final CacheEntry ce = persister.buildCacheEntry(instance, state, nextVersion, getSession()); cacheEntry = persister.getCacheEntryStructure().structure(ce); final boolean put = cacheUpdate(persister, previousVersion, ck); if (put && factory.getStatistics().isStatisticsEnabled()) { factory .getStatisticsImplementor() .secondLevelCachePut(getPersister().getCacheAccessStrategy().getRegion().getName()); } } } session .getPersistenceContext() .getNaturalIdHelper() .manageSharedNaturalIdCrossReference( persister, id, state, previousNaturalIdValues, CachedNaturalIdValueSource.UPDATE); postUpdate(); if (factory.getStatistics().isStatisticsEnabled() && !veto) { factory.getStatisticsImplementor().updateEntity(getPersister().getEntityName()); } }
/** * Perform the entity deletion. Well, as with most operations, does not really perform it; just * schedules an action/execution with the {@link org.hibernate.engine.spi.ActionQueue} for * execution during flush. * * @param session The originating session * @param entity The entity to delete * @param entityEntry The entity's entry in the {@link PersistenceContext} * @param isCascadeDeleteEnabled Is delete cascading enabled? * @param persister The entity persister. * @param transientEntities A cache of already deleted entities. */ protected final void deleteEntity( final EventSource session, final Object entity, final EntityEntry entityEntry, final boolean isCascadeDeleteEnabled, final boolean isOrphanRemovalBeforeUpdates, final EntityPersister persister, final Set transientEntities) { if (LOG.isTraceEnabled()) { LOG.tracev( "Deleting {0}", MessageHelper.infoString(persister, entityEntry.getId(), session.getFactory())); } final PersistenceContext persistenceContext = session.getPersistenceContext(); final Type[] propTypes = persister.getPropertyTypes(); final Object version = entityEntry.getVersion(); final Object[] currentState; if (entityEntry.getLoadedState() == null) { // ie. the entity came in from update() currentState = persister.getPropertyValues(entity); } else { currentState = entityEntry.getLoadedState(); } final Object[] deletedState = createDeletedState(persister, currentState, session); entityEntry.setDeletedState(deletedState); session .getInterceptor() .onDelete( entity, entityEntry.getId(), deletedState, persister.getPropertyNames(), propTypes); // before any callbacks, etc, so subdeletions see that this deletion happened first persistenceContext.setEntryStatus(entityEntry, Status.DELETED); final EntityKey key = session.generateEntityKey(entityEntry.getId(), persister); cascadeBeforeDelete(session, persister, entity, entityEntry, transientEntities); new ForeignKeys.Nullifier(entity, true, false, session) .nullifyTransientReferences(entityEntry.getDeletedState(), propTypes); new Nullability(session).checkNullability(entityEntry.getDeletedState(), persister, true); persistenceContext.getNullifiableEntityKeys().add(key); if (isOrphanRemovalBeforeUpdates) { // TODO: The removeOrphan concept is a temporary "hack" for HHH-6484. This should be removed // once action/task // ordering is improved. session .getActionQueue() .addAction( new OrphanRemovalAction( entityEntry.getId(), deletedState, version, entity, persister, isCascadeDeleteEnabled, session)); } else { // Ensures that containing deletions happen before sub-deletions session .getActionQueue() .addAction( new EntityDeleteAction( entityEntry.getId(), deletedState, version, entity, persister, isCascadeDeleteEnabled, session)); } cascadeAfterDelete(session, persister, entity, transientEntities); // the entry will be removed after the flush, and will no longer // override the stale snapshot // This is now handled by removeEntity() in EntityDeleteAction // persistenceContext.removeDatabaseSnapshot(key); }
/** * Performs all the actual work needed to save an entity (well to get the save moved to the * execution queue). * * @param entity The entity to be saved * @param key The id to be used for saving the entity (or null, in the case of identity columns) * @param persister The entity's persister instance. * @param useIdentityColumn Should an identity column be used for id generation? * @param anything Generally cascade-specific information. * @param source The session which is the source of the current event. * @param requiresImmediateIdAccess Is access to the identifier required immediately after the * completion of the save? persist(), for example, does not require this... * @return The id used to save the entity; may be null depending on the type of id generator used * and the requiresImmediateIdAccess value */ protected Serializable performSaveOrReplicate( Object entity, EntityKey key, EntityPersister persister, boolean useIdentityColumn, Object anything, EventSource source, boolean requiresImmediateIdAccess) { Serializable id = key == null ? null : key.getIdentifier(); boolean inTxn = source.isTransactionInProgress(); boolean shouldDelayIdentityInserts = !inTxn && !requiresImmediateIdAccess; // Put a placeholder in entries, so we don't recurse back and try to save() the // same object again. QUESTION: should this be done before onSave() is called? // likewise, should it be done before onUpdate()? EntityEntry original = source .getPersistenceContext() .addEntry( entity, Status.SAVING, null, null, id, null, LockMode.WRITE, useIdentityColumn, persister, false, false); cascadeBeforeSave(source, persister, entity, anything); Object[] values = persister.getPropertyValuesToInsert(entity, getMergeMap(anything), source); Type[] types = persister.getPropertyTypes(); boolean substitute = substituteValuesIfNecessary(entity, id, values, persister, source); if (persister.hasCollections()) { substitute = substitute || visitCollectionsBeforeSave(entity, id, values, types, source); } if (substitute) { persister.setPropertyValues(entity, values); } TypeHelper.deepCopy(values, types, persister.getPropertyUpdateability(), values, source); AbstractEntityInsertAction insert = addInsertAction( values, id, entity, persister, useIdentityColumn, source, shouldDelayIdentityInserts); // postpone initializing id in case the insert has non-nullable transient dependencies // that are not resolved until cascadeAfterSave() is executed cascadeAfterSave(source, persister, entity, anything); if (useIdentityColumn && insert.isEarlyInsert()) { if (!EntityIdentityInsertAction.class.isInstance(insert)) { throw new IllegalStateException( "Insert should be using an identity column, but action is of unexpected type: " + insert.getClass().getName()); } id = ((EntityIdentityInsertAction) insert).getGeneratedId(); insert.handleNaturalIdPostSaveNotifications(id); } EntityEntry newEntry = source.getPersistenceContext().getEntry(entity); if (newEntry != original) { EntityEntryExtraState extraState = newEntry.getExtraState(EntityEntryExtraState.class); if (extraState == null) { newEntry.addExtraState(original.getExtraState(EntityEntryExtraState.class)); } } return id; }
/** * Perform the second step of 2-phase load. Fully initialize the entity instance. * * <p>After processing a JDBC result set, we "resolve" all the associations between the entities * which were instantiated and had their state "hydrated" into an array */ public static void initializeEntity( final Object entity, final boolean readOnly, final SessionImplementor session, final PreLoadEvent preLoadEvent, final PostLoadEvent postLoadEvent) throws HibernateException { // TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!) final PersistenceContext persistenceContext = session.getPersistenceContext(); EntityEntry entityEntry = persistenceContext.getEntry(entity); if (entityEntry == null) { throw new AssertionFailure("possible non-threadsafe access to the session"); } EntityPersister persister = entityEntry.getPersister(); Serializable id = entityEntry.getId(); Object[] hydratedState = entityEntry.getLoadedState(); if (log.isDebugEnabled()) log.debug( "resolving associations for " + MessageHelper.infoString(persister, id, session.getFactory())); Type[] types = persister.getPropertyTypes(); for (int i = 0; i < hydratedState.length; i++) { final Object value = hydratedState[i]; if (value != LazyPropertyInitializer.UNFETCHED_PROPERTY && value != BackrefPropertyAccessor.UNKNOWN) { hydratedState[i] = types[i].resolve(value, session, entity); } } // Must occur after resolving identifiers! if (session.isEventSource()) { preLoadEvent.setEntity(entity).setState(hydratedState).setId(id).setPersister(persister); PreLoadEventListener[] listeners = session.getListeners().getPreLoadEventListeners(); for (int i = 0; i < listeners.length; i++) { listeners[i].onPreLoad(preLoadEvent); } } persister.setPropertyValues(entity, hydratedState, session.getEntityMode()); final SessionFactoryImplementor factory = session.getFactory(); if (persister.hasCache() && session.getCacheMode().isPutEnabled()) { if (log.isDebugEnabled()) log.debug( "adding entity to second-level cache: " + MessageHelper.infoString(persister, id, session.getFactory())); Object version = Versioning.getVersion(hydratedState, persister); CacheEntry entry = new CacheEntry( hydratedState, persister, entityEntry.isLoadedWithLazyPropertiesUnfetched(), version, session, entity); CacheKey cacheKey = new CacheKey( id, persister.getIdentifierType(), persister.getRootEntityName(), session.getEntityMode(), session.getFactory()); boolean put = persister .getCache() .put( cacheKey, persister.getCacheEntryStructure().structure(entry), session.getTimestamp(), version, persister.isVersioned() ? persister.getVersionType().getComparator() : null, useMinimalPuts( session, entityEntry)); // we could use persister.hasLazyProperties() instead of true if (put && factory.getStatistics().isStatisticsEnabled()) { factory .getStatisticsImplementor() .secondLevelCachePut(persister.getCache().getRegionName()); } } if (readOnly || !persister.isMutable()) { // no need to take a snapshot - this is a // performance optimization, but not really // important, except for entities with huge // mutable property values persistenceContext.setEntryStatus(entityEntry, Status.READ_ONLY); } else { // take a snapshot TypeFactory.deepCopy( hydratedState, persister.getPropertyTypes(), persister.getPropertyUpdateability(), hydratedState, // after setting values to object, entityMode session); persistenceContext.setEntryStatus(entityEntry, Status.MANAGED); } persister.afterInitialize(entity, entityEntry.isLoadedWithLazyPropertiesUnfetched(), session); if (session.isEventSource()) { postLoadEvent.setEntity(entity).setId(id).setPersister(persister); PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); for (int i = 0; i < listeners.length; i++) { listeners[i].onPostLoad(postLoadEvent); } } if (log.isDebugEnabled()) log.debug( "done materializing entity " + MessageHelper.infoString(persister, id, session.getFactory())); if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().loadEntity(persister.getEntityName()); } }
/** Perform a dirty check, and attach the results to the event */ protected void dirtyCheck(FlushEntityEvent event) throws HibernateException { final Object entity = event.getEntity(); final Object[] values = event.getPropertyValues(); final SessionImplementor session = event.getSession(); final EntityEntry entry = event.getEntityEntry(); final EntityPersister persister = entry.getPersister(); final Serializable id = entry.getId(); final Object[] loadedState = entry.getLoadedState(); int[] dirtyProperties = session .getInterceptor() .findDirty( entity, id, values, loadedState, persister.getPropertyNames(), persister.getPropertyTypes()); event.setDatabaseSnapshot(null); final boolean interceptorHandledDirtyCheck; boolean cannotDirtyCheck; if (dirtyProperties == null) { // Interceptor returned null, so do the dirtycheck ourself, if possible interceptorHandledDirtyCheck = false; cannotDirtyCheck = loadedState == null; // object loaded by update() if (!cannotDirtyCheck) { // dirty check against the usual snapshot of the entity dirtyProperties = persister.findDirty(values, loadedState, entity, session); } else if (entry.getStatus() == Status.DELETED && !event.getEntityEntry().isModifiableEntity()) { // A non-modifiable (e.g., read-only or immutable) entity needs to be have // references to transient entities set to null before being deleted. No other // fields should be updated. if (values != entry.getDeletedState()) { throw new IllegalStateException( "Entity has status Status.DELETED but values != entry.getDeletedState"); } // Even if loadedState == null, we can dirty-check by comparing currentState and // entry.getDeletedState() because the only fields to be updated are those that // refer to transient entities that are being set to null. // - currentState contains the entity's current property values. // - entry.getDeletedState() contains the entity's current property values with // references to transient entities set to null. // - dirtyProperties will only contain properties that refer to transient entities final Object[] currentState = persister.getPropertyValues(event.getEntity(), event.getSession().getEntityMode()); dirtyProperties = persister.findDirty(entry.getDeletedState(), currentState, entity, session); cannotDirtyCheck = false; } else { // dirty check against the database snapshot, if possible/necessary final Object[] databaseSnapshot = getDatabaseSnapshot(session, persister, id); if (databaseSnapshot != null) { dirtyProperties = persister.findModified(databaseSnapshot, values, entity, session); cannotDirtyCheck = false; event.setDatabaseSnapshot(databaseSnapshot); } } } else { // the Interceptor handled the dirty checking cannotDirtyCheck = false; interceptorHandledDirtyCheck = true; } logDirtyProperties(id, dirtyProperties, persister); event.setDirtyProperties(dirtyProperties); event.setDirtyCheckHandledByInterceptor(interceptorHandledDirtyCheck); event.setDirtyCheckPossible(!cannotDirtyCheck); }