/** * add an (initialized) collection that was created by another session and passed into update() * (ie. one with a snapshot and existing state on the database) */ public void addInitializedDetachedCollection( CollectionPersister collectionPersister, PersistentCollection collection) throws HibernateException { if (collection.isUnreferenced()) { // treat it just like a new collection addCollection(collection, collectionPersister); } else { CollectionEntry ce = new CollectionEntry(collection, session.getFactory()); addCollection(collection, ce, collection.getKey()); } }
public void evictCollection(Object value, CollectionType type) { final Object pc; if (type.hasHolder(getSession().getEntityMode())) { pc = getSession().getPersistenceContext().removeCollectionHolder(value); } else if (value instanceof PersistentCollection) { pc = value; } else { return; // EARLY EXIT! } PersistentCollection collection = (PersistentCollection) pc; if (collection.unsetSession(getSession())) evictCollection(collection); }
/** Add an collection to the cache, with a given collection entry */ private void addCollection(PersistentCollection coll, CollectionEntry entry, Serializable key) { collectionEntries.put(coll, entry); CollectionKey collectionKey = new CollectionKey(entry.getLoadedPersister(), key, session.getEntityMode()); PersistentCollection old = (PersistentCollection) collectionsByKey.put(collectionKey, coll); if (old != null) { if (old == coll) throw new AssertionFailure("bug adding collection twice"); // or should it actually throw an exception? old.unsetSession(session); collectionEntries.remove(old); // watch out for a case where old is still referenced // somewhere in the object graph! (which is a user error) } }
/** * Overrides the default behaviour and first checks if a PersistentCollection instance has been * initialised using the wasInitialised() method before cascading * * @param errors The Spring Errors instance * @param bean The BeanWrapper for the bean * @param persistentProperty The GrailsDomainClassProperty instance * @param propertyName The name of the property * @see org.hibernate.collection.PersistentCollection#wasInitialized() */ protected void cascadeValidationToMany( Errors errors, BeanWrapper bean, GrailsDomainClassProperty persistentProperty, String propertyName) { Object collection = bean.getPropertyValue(propertyName); if (collection != null) { if (collection instanceof PersistentCollection) { PersistentCollection persistentCollection = (PersistentCollection) collection; if (persistentCollection.wasInitialized()) { super.cascadeValidationToMany(errors, bean, persistentProperty, propertyName); } } else { super.cascadeValidationToMany(errors, bean, persistentProperty, propertyName); } } }
/** * Get the collection entry for a collection passed to filter, which might be a collection * wrapper, an array, or an unwrapped collection. Return null if there is no entry. */ public CollectionEntry getCollectionEntryOrNull(Object collection) { PersistentCollection coll; if (collection instanceof PersistentCollection) { coll = (PersistentCollection) collection; // if (collection==null) throw new TransientObjectException("Collection was not yet // persistent"); } else { coll = getCollectionHolder(collection); if (coll == null) { // it might be an unwrapped collection reference! // try to find a wrapper (slowish) Iterator wrappers = IdentityMap.keyIterator(collectionEntries); while (wrappers.hasNext()) { PersistentCollection pc = (PersistentCollection) wrappers.next(); if (pc.isWrapper(collection)) { coll = pc; break; } } } } return (coll == null) ? null : getCollectionEntry(coll); }
protected int doUpdateRows( Serializable id, PersistentCollection collection, SessionImplementor session) throws HibernateException { if (ArrayHelper.isAllFalse(elementColumnIsSettable)) return 0; try { PreparedStatement st = null; boolean callable = isUpdateCallable(); Iterator entries = collection.entries(this); try { int i = 0; int count = 0; while (entries.hasNext()) { int offset = 1; Object entry = entries.next(); if (collection.needsUpdating(entry, i, elementType)) { if (st == null) { if (callable) { CallableStatement callstatement = session.getBatcher().prepareBatchCallableStatement(getSQLUpdateRowString()); callstatement.registerOutParameter( offset++, Types .NUMERIC); // TODO: should we require users to return number of update rows // ? (we cant make it return this without changing // collectionpersister interface) st = callstatement; } else { st = session.getBatcher().prepareBatchStatement(getSQLUpdateRowString()); } } int loc = writeElement(st, collection.getElement(entry), offset, session); if (hasIdentifier) { loc = writeIdentifier(st, collection.getIdentifier(entry, i), loc, session); } else { loc = writeKey(st, id, loc, session); if (hasIndex && !indexContainsFormula) { loc = writeIndexToWhere(st, collection.getIndex(entry, i, this), loc, session); } else { loc = writeElementToWhere(st, collection.getSnapshotElement(entry, i), loc, session); } } session.getBatcher().addToBatch(1); count++; } i++; } return count; } catch (SQLException sqle) { session.getBatcher().abortBatch(sqle); throw sqle; } } catch (SQLException sqle) { throw JDBCExceptionHelper.convert( getSQLExceptionConverter(), sqle, "could not update collection rows: " + MessageHelper.collectionInfoString(this, id, getFactory()), getSQLUpdateRowString()); } }
protected int doUpdateRows( Serializable id, PersistentCollection collection, SessionImplementor session) throws HibernateException { // we finish all the "removes" first to take care of possible unique // constraints and so that we can take better advantage of batching try { int count = 0; if (isRowDeleteEnabled()) { boolean useBatch = true; PreparedStatement st = null; // update removed rows fks to null try { int i = 0; Iterator entries = collection.entries(this); int offset = 1; Expectation expectation = Expectations.NONE; while (entries.hasNext()) { Object entry = entries.next(); if (collection.needsUpdating( entry, i, elementType)) { // will still be issued when it used to be null if (st == null) { String sql = getSQLDeleteRowString(); if (isDeleteCallable()) { expectation = Expectations.appropriateExpectation(getDeleteCheckStyle()); useBatch = expectation.canBeBatched(); st = useBatch ? session.getBatcher().prepareBatchCallableStatement(sql) : session.getBatcher().prepareCallableStatement(sql); offset += expectation.prepare(st); } else { st = session.getBatcher().prepareBatchStatement(getSQLDeleteRowString()); } } int loc = writeKey(st, id, offset, session); writeElementToWhere(st, collection.getSnapshotElement(entry, i), loc, session); if (useBatch) { session.getBatcher().addToBatch(expectation); } else { expectation.verifyOutcome(st.executeUpdate(), st, -1); } count++; } i++; } } catch (SQLException sqle) { if (useBatch) { session.getBatcher().abortBatch(sqle); } throw sqle; } finally { if (!useBatch) { session.getBatcher().closeStatement(st); } } } if (isRowInsertEnabled()) { Expectation expectation = Expectations.appropriateExpectation(getInsertCheckStyle()); boolean callable = isInsertCallable(); boolean useBatch = expectation.canBeBatched(); String sql = getSQLInsertRowString(); PreparedStatement st = null; // now update all changed or added rows fks try { int i = 0; Iterator entries = collection.entries(this); while (entries.hasNext()) { Object entry = entries.next(); int offset = 1; if (collection.needsUpdating(entry, i, elementType)) { if (useBatch) { if (st == null) { if (callable) { st = session.getBatcher().prepareBatchCallableStatement(sql); } else { st = session.getBatcher().prepareBatchStatement(sql); } } } else { if (callable) { st = session.getBatcher().prepareCallableStatement(sql); } else { st = session.getBatcher().prepareStatement(sql); } } offset += expectation.prepare(st); int loc = writeKey(st, id, offset, session); if (hasIndex && !indexContainsFormula) { loc = writeIndexToWhere(st, collection.getIndex(entry, i, this), loc, session); } writeElementToWhere(st, collection.getElement(entry), loc, session); if (useBatch) { session.getBatcher().addToBatch(expectation); } else { expectation.verifyOutcome(st.executeUpdate(), st, -1); } count++; } i++; } } catch (SQLException sqle) { if (useBatch) { session.getBatcher().abortBatch(sqle); } throw sqle; } finally { if (!useBatch) { session.getBatcher().closeStatement(st); } } } return count; } catch (SQLException sqle) { throw getFactory() .getSQLExceptionHelper() .convert( sqle, "could not update collection rows: " + MessageHelper.collectionInfoString(this, id, getFactory()), getSQLInsertRowString()); } }
/** * Register a <tt>PersistentCollection</tt> object for an array. Associates a holder with an array * - MUST be called after loading array, since the array instance is not created until endLoad(). */ public void addCollectionHolder(PersistentCollection holder) { // TODO:refactor + make this method private arrayHolders.put(holder.getValue(), holder); }
/** add a detached uninitialized collection */ public void addUninitializedDetachedCollection( CollectionPersister persister, PersistentCollection collection) { CollectionEntry ce = new CollectionEntry(persister, collection.getKey()); addCollection(collection, ce, collection.getKey()); }
public static StatefulPersistenceContext deserialize( ObjectInputStream ois, SessionImplementor session) throws IOException, ClassNotFoundException { log.trace("deserializing persistent-context"); StatefulPersistenceContext rtn = new StatefulPersistenceContext(session); // during deserialization, we need to reconnect all proxies and // collections to this session, as well as the EntityEntry and // CollectionEntry instances; these associations are transient // because serialization is used for different things. try { // todo : we can actually just determine this from the incoming EntityEntry-s rtn.hasNonReadOnlyEntities = ois.readBoolean(); int count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByKey entries"); rtn.entitiesByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitiesByUniqueKey entries"); rtn.entitiesByUniqueKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitiesByUniqueKey.put(EntityUniqueKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] proxiesByKey entries"); rtn.proxiesByKey = new ReferenceMap( ReferenceMap.HARD, ReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f); for (int i = 0; i < count; i++) { EntityKey ek = EntityKey.deserialize(ois, session); Object proxy = ois.readObject(); if (proxy instanceof HibernateProxy) { ((HibernateProxy) proxy).getHibernateLazyInitializer().setSession(session); rtn.proxiesByKey.put(ek, proxy); } else { log.trace("encountered prunded proxy"); } // otherwise, the proxy was pruned during the serialization process } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entitySnapshotsByKey entries"); rtn.entitySnapshotsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.entitySnapshotsByKey.put(EntityKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] entityEntries entries"); rtn.entityEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { Object entity = ois.readObject(); EntityEntry entry = EntityEntry.deserialize(ois, session); rtn.entityEntries.put(entity, entry); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionsByKey entries"); rtn.collectionsByKey = new HashMap(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.collectionsByKey.put(CollectionKey.deserialize(ois, session), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] collectionEntries entries"); rtn.collectionEntries = IdentityMap.instantiateSequenced(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { final PersistentCollection pc = (PersistentCollection) ois.readObject(); final CollectionEntry ce = CollectionEntry.deserialize(ois, session); pc.setCurrentSession(session); rtn.collectionEntries.put(pc, ce); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] arrayHolders entries"); rtn.arrayHolders = IdentityMap.instantiate(count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count); for (int i = 0; i < count; i++) { rtn.arrayHolders.put(ois.readObject(), ois.readObject()); } count = ois.readInt(); log.trace("staring deserialization of [" + count + "] nullifiableEntityKeys entries"); rtn.nullifiableEntityKeys = new HashSet(); for (int i = 0; i < count; i++) { rtn.nullifiableEntityKeys.add(EntityKey.deserialize(ois, session)); } } catch (HibernateException he) { throw new InvalidObjectException(he.getMessage()); } return rtn; }