/** * Coordinates the efforts to load a given entity. First, an attempt is made to load the entity * from the session-level cache. If not found there, an attempt is made to locate it in * second-level cache. Lastly, an attempt is made to load it directly from the datasource. * * @param event The load event * @return The loaded entity, or null. */ protected Serializable resolveNaturalId(final ResolveNaturalIdEvent event) { final EntityPersister persister = event.getEntityPersister(); final boolean traceEnabled = LOG.isTraceEnabled(); if (traceEnabled) LOG.tracev( "Attempting to resolve: {0}", MessageHelper.infoString( persister, event.getNaturalIdValues(), event.getSession().getFactory())); Serializable entityId = resolveFromCache(event); if (entityId != null) { if (traceEnabled) LOG.tracev( "Resolved object in cache: {0}", MessageHelper.infoString( persister, event.getNaturalIdValues(), event.getSession().getFactory())); return entityId; } if (traceEnabled) LOG.tracev( "Object not resolved in any cache: {0}", MessageHelper.infoString( persister, event.getNaturalIdValues(), event.getSession().getFactory())); return loadFromDatasource(event); }
@Override public void initialize(Serializable id, SessionImplementor session) throws HibernateException { if (log.isDebugEnabled()) { log.debugf( "Loading collection: %s", MessageHelper.collectionInfoString(collectionPersister, id, getFactory())); } final Serializable[] ids = new Serializable[] {id}; try { final QueryParameters qp = new QueryParameters(); qp.setPositionalParameterTypes(new Type[] {collectionPersister.getKeyType()}); qp.setPositionalParameterValues(ids); qp.setCollectionKeys(ids); executeLoad(session, qp, staticLoadQuery, true, null); } catch (SQLException sqle) { throw getFactory() .getSQLExceptionHelper() .convert( sqle, "could not initialize a collection: " + MessageHelper.collectionInfoString(collectionPersister, id, getFactory()), staticLoadQuery.getSqlStatement()); } log.debug("Done loading collection"); }
public final Serializable performInsert( String insertSQL, SessionImplementor session, Binder binder) { try { // prepare and execute the insert PreparedStatement insert = session .getTransactionCoordinator() .getJdbcCoordinator() .getStatementPreparer() .prepareStatement(insertSQL, PreparedStatement.NO_GENERATED_KEYS); try { binder.bindValues(insert); insert.executeUpdate(); } finally { insert.close(); } } catch (SQLException sqle) { throw session .getFactory() .getSQLExceptionHelper() .convert(sqle, "could not insert: " + MessageHelper.infoString(persister), insertSQL); } final String selectSQL = getSelectSQL(); try { // fetch the generated id in a separate query PreparedStatement idSelect = session .getTransactionCoordinator() .getJdbcCoordinator() .getStatementPreparer() .prepareStatement(selectSQL, false); try { bindParameters(session, idSelect, binder.getEntity()); ResultSet rs = idSelect.executeQuery(); try { return getResult(session, rs, binder.getEntity()); } finally { rs.close(); } } finally { idSelect.close(); } } catch (SQLException sqle) { throw session .getFactory() .getSQLExceptionHelper() .convert( sqle, "could not retrieve generated id after insert: " + MessageHelper.infoString(persister), insertSQL); } }
/** * Coordinates the efforts to load a given entity. First, an attempt is made to load the entity * from the session-level cache. If not found there, an attempt is made to locate it in * second-level cache. Lastly, an attempt is made to load it directly from the datasource. * * @return The loaded entity. * @throws HibernateException */ protected Object doLoad( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options) throws HibernateException { if (log.isTraceEnabled()) { log.trace( "attempting to resolve: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } Object entity = loadFromSessionCache(event, keyToLoad, options); if (entity != null) { if (log.isTraceEnabled()) { log.trace( "resolved object in session cache: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } return entity; } // Entity not found in session; before going any further, see if we // already determined that this entity does not exist /*if ( event.getSession().getPersistenceContext().isNonExistant(keyToLoad) ) { if ( log.isTraceEnabled() ) log.trace("entity does not exist"); return null; }*/ entity = loadFromSecondLevelCache(event, persister, options); if (entity != null) { if (log.isTraceEnabled()) { log.trace( "resolved object in second-level cache: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } return entity; } if (log.isTraceEnabled()) { log.trace( "object not resolved in any cache: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } return loadFromDatasource(event, persister, keyToLoad, options); }
@Override public void insert(Serializable id, Object[] fields, Object object, SessionImplementor session) throws HibernateException { final int span = getTableSpan(); // insert operations are always dynamic in OGM boolean[] propertiesToInsert = getPropertiesToInsert(fields); for (int j = 0; j < span; j++) { if (isInverseTable(j)) { return; } // note: it is conceptually possible that a UserType could map null to // a non-null value, so the following is arguable: if (isNullableTable(j) && isAllNull(fields, j)) { return; } if (log.isTraceEnabled()) { log.trace("Inserting entity: " + MessageHelper.infoString(this, id, getFactory())); if (j == 0 && isVersioned()) { log.trace("Version: " + Versioning.getVersion(fields, this)); } } final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); Tuple resultset = gridDialect.getTuple(key, this.getTupleContext()); // add the discriminator if (j == 0) { if (resultset != null) { throw new HibernateException( "trying to insert an already existing entity: " + MessageHelper.infoString(this, id, getFactory())); } if (discriminator.isNeeded()) { resultset = createNewResultSetIfNull(key, resultset, id, session); resultset.put(getDiscriminatorColumnName(), getDiscriminatorValue()); } } resultset = createNewResultSetIfNull(key, resultset, id, session); // dehydrate dehydrate( resultset, fields, propertiesToInsert, getPropertyColumnInsertable(), j, id, session); gridDialect.updateTuple(resultset, key, getTupleContext()); } }
/** This snapshot is meant to be used when updating data. */ @Override public Object[] getDatabaseSnapshot(Serializable id, SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace( "Getting current persistent state for: " + MessageHelper.infoString(this, id, getFactory())); } // snapshot is a Map in the end final Tuple resultset = getResultsetById(id, session); // if there is no resulting row, return null if (resultset == null || resultset.getSnapshot().isEmpty()) { return null; } // otherwise return the "hydrated" state (ie. associations are not resolved) GridType[] types = gridPropertyTypes; Object[] values = new Object[types.length]; boolean[] includeProperty = getPropertyUpdateability(); for (int i = 0; i < types.length; i++) { if (includeProperty[i]) { values[i] = types[i].hydrate( resultset, getPropertyAliases("", i), session, null); // null owner ok?? } } return values; }
@Override public Object initializeLazyProperty(String fieldName, Object entity, SessionImplementor session) throws HibernateException { final Serializable id = session.getContextEntityIdentifier(entity); final EntityEntry entry = session.getPersistenceContext().getEntry(entity); if (entry == null) { throw new HibernateException("entity is not associated with the session: " + id); } if (log.isTraceEnabled()) { log.trace( "initializing lazy properties of: " + MessageHelper.infoString(this, id, getFactory()) + ", field access: " + fieldName); } if (hasCache()) { CacheKey cacheKey = session.generateCacheKey(id, getIdentifierType(), getEntityName()); Object ce = getCacheAccessStrategy().get(cacheKey, session.getTimestamp()); if (ce != null) { CacheEntry cacheEntry = (CacheEntry) getCacheEntryStructure().destructure(ce, getFactory()); if (!cacheEntry.areLazyPropertiesUnfetched()) { // note early exit here: return initializeLazyPropertiesFromCache(fieldName, entity, session, entry, cacheEntry); } } } return initializeLazyPropertiesFromDatastore(fieldName, entity, session, id, entry); }
/** * Perfoms the load of an entity. * * @return The loaded entity. * @throws HibernateException */ protected Object load( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options) throws HibernateException { if (event.getInstanceToLoad() != null) { if (event.getSession().getPersistenceContext().getEntry(event.getInstanceToLoad()) != null) { throw new PersistentObjectException( "attempted to load into an instance that was already associated with the session: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } persister.setIdentifier( event.getInstanceToLoad(), event.getEntityId(), event.getSession().getEntityMode()); } Object entity = doLoad(event, persister, keyToLoad, options); boolean isOptionalInstance = event.getInstanceToLoad() != null; if (!options.isAllowNulls() || isOptionalInstance) { ObjectNotFoundException.throwIfNull(entity, event.getEntityId(), event.getEntityClassName()); } if (isOptionalInstance && entity != event.getInstanceToLoad()) { throw new NonUniqueObjectException(event.getEntityId(), event.getEntityClassName()); } return entity; }
/** * Based on configured options, will either return a pre-existing proxy, generate a new proxy, or * perform an actual load. * * @return The result of the proxy/load operation. * @throws HibernateException */ protected Object proxyOrLoad( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options) throws HibernateException { if (log.isTraceEnabled()) { log.trace( "loading entity: " + MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory())); } if (!persister.hasProxy()) { // this class has no proxies (so do a shortcut) return load(event, persister, keyToLoad, options); } else { final PersistenceContext persistenceContext = event.getSession().getPersistenceContext(); // look for a proxy Object proxy = persistenceContext.getProxy(keyToLoad); if (proxy != null) { return returnNarrowedProxy(event, persister, keyToLoad, options, persistenceContext, proxy); } else { if (options.isAllowProxyCreation()) { return createProxyIfNecessary(event, persister, keyToLoad, options, persistenceContext); } else { // return a newly loaded object return load(event, persister, keyToLoad, options); } } } }
/** * Initialize the role of the collection. * * @param collection The collection to be updated by reachability. * @param type The type of the collection. * @param entity The owner of the collection. * @param session The session from which this request originates */ public static void processReachableCollection( PersistentCollection collection, CollectionType type, Object entity, SessionImplementor session) { collection.setOwner(entity); CollectionEntry ce = session.getPersistenceContext().getCollectionEntry(collection); if (ce == null) { // refer to comment in StatefulPersistenceContext.addCollection() throw new HibernateException( "Found two representations of same collection: " + type.getRole()); } // The CollectionEntry.isReached() stuff is just to detect any silly users // who set up circular or shared references between/to collections. if (ce.isReached()) { // We've been here before throw new HibernateException("Found shared references to a collection: " + type.getRole()); } ce.setReached(true); SessionFactoryImplementor factory = session.getFactory(); CollectionPersister persister = factory.getCollectionPersister(type.getRole()); ce.setCurrentPersister(persister); ce.setCurrentKey( type.getKeyOfOwner(entity, session)); // TODO: better to pass the id in as an argument? if (LOG.isDebugEnabled()) { if (collection.wasInitialized()) LOG.debugf( "Collection found: %s, was: %s (initialized)", MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory), MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), factory)); else LOG.debugf( "Collection found: %s, was: %s (uninitialized)", MessageHelper.collectionInfoString(persister, ce.getCurrentKey(), factory), MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), factory)); } prepareCollectionForUpdate(collection, ce, factory); }
@Override public void lock( Serializable id, Object version, Object object, int timeout, SessionImplementor session) { final String sql = determineSql(timeout); SessionFactoryImplementor factory = session.getFactory(); try { try { PreparedStatement st = session .getTransactionCoordinator() .getJdbcCoordinator() .getStatementPreparer() .prepareStatement(sql); try { getLockable().getIdentifierType().nullSafeSet(st, id, 1, session); if (getLockable().isVersioned()) { getLockable() .getVersionType() .nullSafeSet( st, version, getLockable().getIdentifierType().getColumnSpan(factory) + 1, session); } ResultSet rs = session .getTransactionCoordinator() .getJdbcCoordinator() .getResultSetReturn() .extract(st); try { if (!rs.next()) { if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().optimisticFailure(getLockable().getEntityName()); } throw new StaleObjectStateException(getLockable().getEntityName(), id); } } finally { session.getTransactionCoordinator().getJdbcCoordinator().release(rs, st); } } finally { session.getTransactionCoordinator().getJdbcCoordinator().release(st); } } catch (SQLException e) { throw session .getFactory() .getSQLExceptionHelper() .convert( e, "could not lock: " + MessageHelper.infoString(getLockable(), id, session.getFactory()), sql); } } catch (JDBCException e) { throw new PessimisticEntityLockException(object, "could not obtain pessimistic lock", e); } }
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException { LOG.trace("Ignoring persistent instance"); EntityEntry entityEntry = event.getEntry(); if (entityEntry == null) { throw new AssertionFailure("entity was transient or detached"); } else { if (entityEntry.getStatus() == Status.DELETED) { throw new AssertionFailure("entity was deleted"); } final SessionFactoryImplementor factory = event.getSession().getFactory(); Serializable requestedId = event.getRequestedId(); Serializable savedId; if (requestedId == null) { savedId = entityEntry.getId(); } else { final boolean isEqual = !entityEntry .getPersister() .getIdentifierType() .isEqual( requestedId, entityEntry.getId(), event.getSession().getEntityMode(), factory); if (isEqual) { throw new PersistentObjectException( "object passed to save() was already persistent: " + MessageHelper.infoString(entityEntry.getPersister(), requestedId, factory)); } savedId = requestedId; } if (LOG.isTraceEnabled()) LOG.trace( "Object already associated with session: " + MessageHelper.infoString(entityEntry.getPersister(), savedId, factory)); return savedId; } }
public void evictEntity(String entityName, Serializable identifier) { EntityPersister p = getEntityPersister(entityName); if (p.hasCache()) { if (log.isDebugEnabled()) { log.debug( "evicting second-level cache: " + MessageHelper.infoString(p, identifier, SessionFactoryImpl.this)); } p.getCacheAccessStrategy().evict(buildCacheKey(identifier, p)); } }
private static void processDereferencedCollection( PersistentCollection coll, SessionImplementor session) { final PersistenceContext persistenceContext = session.getPersistenceContext(); CollectionEntry entry = persistenceContext.getCollectionEntry(coll); final CollectionPersister loadedPersister = entry.getLoadedPersister(); if (LOG.isDebugEnabled() && loadedPersister != null) { LOG.debugf( "Collection dereferenced: %s", MessageHelper.collectionInfoString( loadedPersister, entry.getLoadedKey(), session.getFactory())); } // do a check boolean hasOrphanDelete = loadedPersister != null && loadedPersister.hasOrphanDelete(); if (hasOrphanDelete) { Serializable ownerId = loadedPersister.getOwnerEntityPersister().getIdentifier(coll.getOwner(), session); if (ownerId == null) { // the owning entity may have been deleted and its identifier unset due to // identifier-rollback; in which case, try to look up its identifier from // the persistence context if (session.getFactory().getSettings().isIdentifierRollbackEnabled()) { EntityEntry ownerEntry = persistenceContext.getEntry(coll.getOwner()); if (ownerEntry != null) { ownerId = ownerEntry.getId(); } } if (ownerId == null) { throw new AssertionFailure( "Unable to determine collection owner identifier for orphan-delete processing"); } } EntityKey key = session.generateEntityKey(ownerId, loadedPersister.getOwnerEntityPersister()); Object owner = persistenceContext.getEntity(key); if (owner == null) { throw new AssertionFailure( "collection owner not associated with session: " + loadedPersister.getRole()); } EntityEntry e = persistenceContext.getEntry(owner); // only collections belonging to deleted entities are allowed to be dereferenced in the case // of orphan delete if (e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE) { throw new HibernateException( "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: " + loadedPersister.getRole()); } } // do the work entry.setCurrentPersister(null); entry.setCurrentKey(null); prepareCollectionForUpdate(coll, entry, session.getFactory()); }
public void evictCollection(String role, Serializable ownerIdentifier) { CollectionPersister p = getCollectionPersister(role); if (p.hasCache()) { if (log.isDebugEnabled()) { log.debug( "evicting second-level cache: " + MessageHelper.collectionInfoString( p, ownerIdentifier, SessionFactoryImpl.this)); } CacheKey cacheKey = buildCacheKey(ownerIdentifier, p); p.getCacheAccessStrategy().evict(cacheKey); } }
/** Retrieve the version number */ @Override public Object getCurrentVersion(Serializable id, SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace("Getting version: " + MessageHelper.infoString(this, id, getFactory())); } final Tuple resultset = getResultsetById(id, session); if (resultset == null) { return null; } else { return gridVersionType.nullSafeGet(resultset, getVersionColumnName(), session, null); } }
@Override public void doBeforeTransactionCompletion(SessionImplementor session) { final EntityPersister persister = entry.getPersister(); final Object latestVersion = persister.getCurrentVersion(entry.getId(), session); if (!entry.getVersion().equals(latestVersion)) { throw new OptimisticLockException( object, "Newer version [" + latestVersion + "] of entity [" + MessageHelper.infoString(entry.getEntityName(), entry.getId()) + "] found in database"); } }
private void logDirtyProperties( Serializable id, int[] dirtyProperties, EntityPersister persister) { if (LOG.isTraceEnabled() && dirtyProperties != null && dirtyProperties.length > 0) { final String[] allPropertyNames = persister.getPropertyNames(); final String[] dirtyPropertyNames = new String[dirtyProperties.length]; for (int i = 0; i < dirtyProperties.length; i++) { dirtyPropertyNames[i] = allPropertyNames[dirtyProperties[i]]; } LOG.trace( "Found dirty properties [" + MessageHelper.infoString(persister.getEntityName(), id) + "] : " + dirtyPropertyNames); } }
/** * Unmarshall the fields of a persistent instance from a result set, without resolving * associations or collections. Question: should this really be here, or should it be sent back to * Loader? */ public Object[] hydrate( final Tuple resultset, final Serializable id, final Object object, final Loadable rootLoadable, // We probably don't need suffixedColumns, use column names instead // final String[][] suffixedPropertyColumns, final boolean allProperties, final SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace("Hydrating entity: " + MessageHelper.infoString(this, id, getFactory())); } final OgmEntityPersister rootPersister = (OgmEntityPersister) rootLoadable; final boolean hasDeferred = rootPersister.hasSequentialSelect(); boolean sequentialSelectEmpty = false; if (hasDeferred) { // note: today we don't have sequential select in OGM // check AbstractEntityPersister#hydrate for the detail } final String[] propNames = getPropertyNames(); final Type[] types = getPropertyTypes(); final Object[] values = new Object[types.length]; final boolean[] laziness = getPropertyLaziness(); final String[] propSubclassNames = getSubclassPropertySubclassNameClosure(); final boolean[] propertySelectable = getPropertySelectable(); for (int i = 0; i < types.length; i++) { values[i] = hydrateValue( resultset, session, object, i, propertySelectable, allProperties, laziness, hasDeferred, rootPersister, propNames, propSubclassNames, sequentialSelectEmpty); } return values; }
private static void processNeverReferencedCollection( PersistentCollection coll, SessionImplementor session) throws HibernateException { final PersistenceContext persistenceContext = session.getPersistenceContext(); CollectionEntry entry = persistenceContext.getCollectionEntry(coll); if (LOG.isDebugEnabled()) { LOG.debugf( "Found collection with unloaded owner: %s", MessageHelper.collectionInfoString( entry.getLoadedPersister(), entry.getLoadedKey(), session.getFactory())); } entry.setCurrentPersister(entry.getLoadedPersister()); entry.setCurrentKey(entry.getLoadedKey()); prepareCollectionForUpdate(coll, entry, session.getFactory()); }
/** @see LockingStrategy#lock */ public void lock( Serializable id, Object version, Object object, int timeout, SessionImplementor session) throws StaleObjectStateException, JDBCException { if (!lockable.isVersioned()) { throw new HibernateException( "write locks via update not supported for non-versioned entities [" + lockable.getEntityName() + "]"); } // todo : should we additionally check the current isolation mode explicitly? SessionFactoryImplementor factory = session.getFactory(); try { PreparedStatement st = session.getBatcher().prepareSelectStatement(sql); try { lockable.getVersionType().nullSafeSet(st, version, 1, session); int offset = 2; lockable.getIdentifierType().nullSafeSet(st, id, offset, session); offset += lockable.getIdentifierType().getColumnSpan(factory); if (lockable.isVersioned()) { lockable.getVersionType().nullSafeSet(st, version, offset, session); } int affected = st.executeUpdate(); if (affected < 0) { factory.getStatisticsImplementor().optimisticFailure(lockable.getEntityName()); throw new StaleObjectStateException(lockable.getEntityName(), id); } } finally { session.getBatcher().closeStatement(st); } } catch (SQLException sqle) { throw session .getFactory() .getSQLExceptionHelper() .convert( sqle, "could not lock: " + MessageHelper.infoString(lockable, id, session.getFactory()), sql); } }
private void evictCollection(PersistentCollection collection) { CollectionEntry ce = (CollectionEntry) getSession().getPersistenceContext().getCollectionEntries().remove(collection); if (log.isDebugEnabled()) log.debug( "evicting collection: " + MessageHelper.collectionInfoString( ce.getLoadedPersister(), ce.getLoadedKey(), getSession().getFactory())); if (ce.getLoadedPersister() != null && ce.getLoadedKey() != null) { // TODO: is this 100% correct? getSession() .getPersistenceContext() .getCollectionsByKey() .remove( new CollectionKey( ce.getLoadedPersister(), ce.getLoadedKey(), getSession().getEntityMode())); } }
@Override public Object forceVersionIncrement( Serializable id, Object currentVersion, SessionImplementor session) { if (!isVersioned()) { throw new AssertionFailure("cannot force version increment on non-versioned entity"); } if (isVersionPropertyGenerated()) { // the difficulty here is exactly what do we update in order to // force the version to be incremented in the db... throw new HibernateException( "LockMode.FORCE is currently not supported for generated version properties"); } Object nextVersion = getVersionType().next(currentVersion, session); if (log.isTraceEnabled()) { log.trace( "Forcing version increment [" + MessageHelper.infoString(this, id, getFactory()) + "; " + getVersionType().toLoggableString(currentVersion, getFactory()) + " -> " + getVersionType().toLoggableString(nextVersion, getFactory()) + "]"); } /* * We get the value from the grid and compare the version values before putting the next version in * Contrary to the database version, there is * TODO should we use cache.replace() it seems more expensive to pass the resultset around "just" the atomicity of the operation */ final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); final Tuple resultset = gridDialect.getTuple(key, getTupleContext()); checkVersionAndRaiseSOSE(id, currentVersion, session, resultset); gridVersionType.nullSafeSet( resultset, nextVersion, new String[] {getVersionColumnName()}, session); gridDialect.updateTuple(resultset, key, getTupleContext()); return nextVersion; }
/** @see LockingStrategy#lock */ public void lock(Serializable id, Object version, Object object, SessionImplementor session) throws StaleObjectStateException, JDBCException { SessionFactoryImplementor factory = session.getFactory(); try { PreparedStatement st = session.getBatcher().prepareSelectStatement(sql); try { lockable.getIdentifierType().nullSafeSet(st, id, 1, session); if (lockable.isVersioned()) { lockable .getVersionType() .nullSafeSet( st, version, lockable.getIdentifierType().getColumnSpan(factory) + 1, session); } ResultSet rs = st.executeQuery(); try { if (!rs.next()) { if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().optimisticFailure(lockable.getEntityName()); } throw new StaleObjectStateException(lockable.getEntityName(), id); } } finally { rs.close(); } } finally { session.getBatcher().closeStatement(st); } } catch (SQLException sqle) { throw JDBCExceptionHelper.convert( session.getFactory().getSQLExceptionConverter(), sqle, "could not lock: " + MessageHelper.infoString(lockable, id, session.getFactory()), sql); } }
/** * Prepares the save call by checking the session caches for a pre-existing entity and performing * any lifecycle callbacks. * * @param entity The entity to be saved. * @param id The id by which to save the entity. * @param persister The entity's persister instance. * @param useIdentityColumn Is an identity column being used? * @param anything Generally cascade-specific information. * @param source The session from which the event originated. * @param requiresImmediateIdAccess does the event context require access to the identifier * immediately after execution of this method (if not, post-insert style id generators may be * postponed if we are outside a transaction). * @return The id used to save the entity; may be null depending on the type of id generator used * and the requiresImmediateIdAccess value */ protected Serializable performSave( Object entity, Serializable id, EntityPersister persister, boolean useIdentityColumn, Object anything, EventSource source, boolean requiresImmediateIdAccess) { if (LOG.isTraceEnabled()) { LOG.tracev("Saving {0}", MessageHelper.infoString(persister, id, source.getFactory())); } final EntityKey key; if (!useIdentityColumn) { key = source.generateEntityKey(id, persister); Object old = source.getPersistenceContext().getEntity(key); if (old != null) { if (source.getPersistenceContext().getEntry(old).getStatus() == Status.DELETED) { source.forceFlush(source.getPersistenceContext().getEntry(old)); } else { throw new NonUniqueObjectException(id, persister.getEntityName()); } } persister.setIdentifier(entity, id, source); } else { key = null; } if (invokeSaveLifecycle(entity, persister, source)) { return id; // EARLY EXIT } return performSaveOrReplicate( entity, key, persister, useIdentityColumn, anything, source, requiresImmediateIdAccess); }
private Object assembleCacheEntry( final CacheEntry entry, final Serializable id, final EntityPersister persister, final LoadEvent event) throws HibernateException { final Object optionalObject = event.getInstanceToLoad(); final EventSource session = event.getSession(); final SessionFactoryImplementor factory = session.getFactory(); if (log.isTraceEnabled()) { log.trace( "assembling entity from second-level cache: " + MessageHelper.infoString(persister, id, factory)); } EntityPersister subclassPersister = factory.getEntityPersister(entry.getSubclass()); Object result = optionalObject == null ? session.instantiate(subclassPersister, id) : optionalObject; // make it circular-reference safe TwoPhaseLoad.addUninitializedCachedEntity( new EntityKey(id, subclassPersister, session.getEntityMode()), result, subclassPersister, LockMode.NONE, entry.areLazyPropertiesUnfetched(), entry.getVersion(), session); Type[] types = subclassPersister.getPropertyTypes(); Object[] values = entry.assemble( result, id, subclassPersister, session.getInterceptor(), session); // intializes result by side-effect TypeFactory.deepCopy( values, types, subclassPersister.getPropertyUpdateability(), values, session); Object version = Versioning.getVersion(values, subclassPersister); if (log.isTraceEnabled()) log.trace("Cached Version: " + version); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.addEntry( result, Status.MANAGED, values, null, id, version, LockMode.NONE, true, subclassPersister, false, entry.areLazyPropertiesUnfetched()); subclassPersister.afterInitialize(result, entry.areLazyPropertiesUnfetched(), session); persistenceContext.initializeNonLazyCollections(); // upgrade the lock if necessary: // lock(result, lockMode); // PostLoad is needed for EJB3 // TODO: reuse the PostLoadEvent... PostLoadEvent postLoadEvent = new PostLoadEvent(session).setEntity(result).setId(id).setPersister(persister); PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); for (int i = 0; i < listeners.length; i++) { listeners[i].onPostLoad(postLoadEvent); } return result; }
protected int doUpdateRows( Serializable id, PersistentCollection collection, SessionImplementor session) throws HibernateException { if (ArrayHelper.isAllFalse(elementColumnIsSettable)) return 0; try { PreparedStatement st = null; boolean callable = isUpdateCallable(); Iterator entries = collection.entries(this); try { int i = 0; int count = 0; while (entries.hasNext()) { int offset = 1; Object entry = entries.next(); if (collection.needsUpdating(entry, i, elementType)) { if (st == null) { if (callable) { CallableStatement callstatement = session.getBatcher().prepareBatchCallableStatement(getSQLUpdateRowString()); callstatement.registerOutParameter( offset++, Types .NUMERIC); // TODO: should we require users to return number of update rows // ? (we cant make it return this without changing // collectionpersister interface) st = callstatement; } else { st = session.getBatcher().prepareBatchStatement(getSQLUpdateRowString()); } } int loc = writeElement(st, collection.getElement(entry), offset, session); if (hasIdentifier) { loc = writeIdentifier(st, collection.getIdentifier(entry, i), loc, session); } else { loc = writeKey(st, id, loc, session); if (hasIndex && !indexContainsFormula) { loc = writeIndexToWhere(st, collection.getIndex(entry, i, this), loc, session); } else { loc = writeElementToWhere(st, collection.getSnapshotElement(entry, i), loc, session); } } session.getBatcher().addToBatch(1); count++; } i++; } return count; } catch (SQLException sqle) { session.getBatcher().abortBatch(sqle); throw sqle; } } catch (SQLException sqle) { throw JDBCExceptionHelper.convert( getSQLExceptionConverter(), sqle, "could not update collection rows: " + MessageHelper.collectionInfoString(this, id, getFactory()), getSQLUpdateRowString()); } }
@Override public String toString() { return StringHelper.unqualify(getClass().getName()) + MessageHelper.infoString(collectionRole, key); }
public String getMessage() { return super.getMessage() + ": " + MessageHelper.infoString(entityName, identifier); }
public String toString() { return "CollectionKey" + MessageHelper.collectionInfoString(factory.getCollectionPersister(role), key, factory); }