/** * Loads the appropriate {@link DatastoreProvider}. Driven by the {@link #DATASTORE_PROVIDER} * property which can receive * * <ul> * <li>a {@code DatastoreProvider} instance * <li>a {@code DatastoreProvider} class * <li>a string representing the {@code DatastoreProvider} class * <li>a string representing one of the datastore provider shortcuts * </ul> * * If the property is not set, Infinispan is used by default. * * <p>This is a {@link SessionFactoryServiceInitiator} because a {@code DatastoreProvider} can be a * {@link org.hibernate.ogm.datastore.StartStoppable} service - the {@link * org.hibernate.service.spi.SessionFactoryServiceRegistry} calls {@code StartStoppable} and passes * the {@link org.hibernate.SessionFactory} * * @author Emmanuel Bernard <*****@*****.**> * @author Davide D'Alto <*****@*****.**> */ public final class DatastoreProviderInitiator implements SessionFactoryServiceInitiator<DatastoreProvider> { public static final String DATASTORE_PROVIDER = "hibernate.ogm.datastore.provider"; public static final DatastoreProviderInitiator INSTANCE = new DatastoreProviderInitiator(); private static final Log log = LoggerFactory.make(); private static final String DEFAULT_DATASTORE_PROVIDER = "infinispan"; @Override public Class<DatastoreProvider> getServiceInitiated() { return DatastoreProvider.class; } @Override public DatastoreProvider initiateService( SessionFactoryImplementor sessionFactory, Configuration configuration, ServiceRegistryImplementor registry) { ConfigurationPropertyReader propertyReader = new ConfigurationPropertyReader( configuration, registry.getService(ClassLoaderService.class)); DatastoreProvider datastoreProvider = propertyReader .property(DATASTORE_PROVIDER, DatastoreProvider.class) .withDefaultImplementation(DEFAULT_DATASTORE_PROVIDER) .withShortNameResolver(DatastoreProviderShortNameResolver.INSTANCE) .getValue(); log.useDatastoreProvider(datastoreProvider.getClass().getName()); return datastoreProvider; } @Override public DatastoreProvider initiateService( SessionFactoryImplementor sessionFactory, MetadataImplementor metadata, ServiceRegistryImplementor registry) { throw new UnsupportedOperationException( "Cannot create " + DatastoreProvider.class.getName() + " service using metadata"); } private static class DatastoreProviderShortNameResolver implements ShortNameResolver { private static final DatastoreProviderShortNameResolver INSTANCE = new DatastoreProviderShortNameResolver(); @Override public boolean isShortName(String name) { return AvailableDatastoreProvider.isShortName(name); } @Override public String resolve(String shortName) { return AvailableDatastoreProvider.byShortName(shortName).getDatastoreProviderClassName(); } } }
/** * @author Sanne Grinovero <[email protected]> (C) 2011 Red Hat Inc. * @author Emmanuel Bernard <[email protected]> */ public class MapPessimisticWriteLockingStrategy implements LockingStrategy { private static final Log log = LoggerFactory.make(); protected final Lockable lockable; protected final LockMode lockMode; protected final GridType identifierGridType; private volatile MapDatastoreProvider provider; public MapPessimisticWriteLockingStrategy(Lockable lockable, LockMode lockMode) { this.lockable = lockable; this.lockMode = lockMode; TypeTranslator typeTranslator = lockable.getFactory().getServiceRegistry().getService(TypeTranslator.class); this.identifierGridType = typeTranslator.getType(lockable.getIdentifierType()); } @Override public void lock( Serializable id, Object version, Object object, int timeout, SessionImplementor session) throws StaleObjectStateException, JDBCException { MapDatastoreProvider dataStore = getProvider(session); EntityKey key = EntityKeyBuilder.fromData( ((OgmEntityPersister) lockable).getRootEntityKeyMetadata(), identifierGridType, id, session); dataStore.writeLock(key, timeout); // FIXME check the version number as well and raise an optimistic lock exception if there is an // issue JPA 2 spec: 3.4.4.2 // (Comment by Emmanuel) } protected final MapDatastoreProvider getProvider(SessionImplementor session) { if (provider == null) { DatastoreProvider service = session.getFactory().getServiceRegistry().getService(DatastoreProvider.class); if (service instanceof MapDatastoreProvider) { provider = (MapDatastoreProvider) service; } else { log.unexpectedDatastoreProvider(service.getClass(), MapDatastoreProvider.class); } } return provider; } }
/** * A queue for {@link Operation}. * * <p>It keeps track of the element that are going to be affected by an {@link * UpdateTupleOperation}. The queue can be closed, in that case it will throw an exception when * trying to add or poll an operation. * * @author Guillaume Scheibel <*****@*****.**> * @author Davide D'Alto <*****@*****.**> */ public class OperationsQueue { /** A queue that it is always closed */ public static final OperationsQueue CLOSED_QUEUE = new OperationsQueue() { @Override public boolean isClosed() { return true; } }; private static final Log log = LoggerFactory.make(); private final Queue<Operation> operations = new LinkedList<Operation>(); private final Set<EntityKey> entityKeys = new HashSet<EntityKey>(); private boolean closed = false; public void add(UpdateTupleOperation operation) { validate(); entityKeys.add(operation.getEntityKey()); addOperation(operation); } public void add(Operation operation) { validate(); addOperation(operation); } private void validate() { if (isClosed()) { throw log.closedOperationQueue(); } } private void addOperation(Operation operation) { log.debug("Add batched operation " + operation); operations.add(operation); } public Operation poll() { validate(); Operation operation = operations.poll(); entityKeys.remove(operation); return operation; } public void close() { entityKeys.clear(); operations.clear(); closed = true; } public boolean isClosed() { return closed; } /** * @param key the {@link EntityKey} that identify the element * @return true if an {@link UpdateTupleOperation} is bound to the key, false otherwise */ public boolean contains(EntityKey key) { return entityKeys.contains(key); } /** @return the length of the queue */ public int size() { return operations.size(); } }
/** * Basic functionality for persisting an entity using OGM. TODO most of the non persister code SIC * comes from {@link org.hibernate.persister.entity.UnionSubclassEntityPersister} * * @see javax.persistence.InheritanceType * @author Emmanuel Bernard * @author Davide D'Alto */ public abstract class OgmEntityPersister extends AbstractEntityPersister implements EntityPersister { private static final int TABLE_SPAN = 1; private static final Log log = LoggerFactory.make(); private final EntityDiscriminator discriminator; private final String tableName; private final String[] constraintOrderedTableNames; private final String[][] constraintOrderedKeyColumnNames; private final String[] spaces; private final String[] subclassSpaces; private final GridType[] gridPropertyTypes; private final GridType gridVersionType; private final GridType gridIdentifierType; private final String jpaEntityName; private final TupleContext tupleContext; // service references private final GridDialect gridDialect; private final EntityKeyMetadata entityKeyMetadata; private final Map<String, AssociationKeyMetadata> associationKeyMetadataPerPropertyName; private final OptionsService optionsService; OgmEntityPersister( final PersistentClass persistentClass, final EntityRegionAccessStrategy cacheAccessStrategy, final NaturalIdRegionAccessStrategy naturalIdRegionAccessStrategy, final SessionFactoryImplementor factory, final Mapping mapping, final EntityDiscriminator discriminator) throws HibernateException { super(persistentClass, cacheAccessStrategy, naturalIdRegionAccessStrategy, factory); if (log.isTraceEnabled()) { log.tracef("Creating OgmEntityPersister for %s", persistentClass.getClassName()); } ServiceRegistryImplementor serviceRegistry = factory.getServiceRegistry(); this.gridDialect = serviceRegistry.getService(GridDialect.class); this.optionsService = serviceRegistry.getService(OptionsService.class); tableName = persistentClass .getTable() .getQualifiedName( factory.getDialect(), factory.getSettings().getDefaultCatalogName(), factory.getSettings().getDefaultSchemaName()); this.discriminator = discriminator; // SPACES // TODO: i'm not sure, but perhaps we should exclude // abstract denormalized tables? int spacesSize = 1 + persistentClass.getSynchronizedTables().size(); spaces = new String[spacesSize]; spaces[0] = tableName; @SuppressWarnings("unchecked") Iterator<String> syncTablesIter = persistentClass.getSynchronizedTables().iterator(); for (int i = 1; i < spacesSize; i++) { spaces[i] = syncTablesIter.next(); } HashSet<String> subclassTables = new HashSet<String>(); Iterator<Table> tableIter = persistentClass.getSubclassTableClosureIterator(); while (tableIter.hasNext()) { Table table = tableIter.next(); subclassTables.add( table.getQualifiedName( factory.getDialect(), factory.getSettings().getDefaultCatalogName(), factory.getSettings().getDefaultSchemaName())); } subclassSpaces = ArrayHelper.toStringArray(subclassTables); if (isMultiTable()) { int idColumnSpan = getIdentifierColumnSpan(); ArrayList<String> tableNames = new ArrayList<String>(); ArrayList<String[]> keyColumns = new ArrayList<String[]>(); if (!isAbstract()) { tableNames.add(tableName); keyColumns.add(getIdentifierColumnNames()); } @SuppressWarnings("unchecked") Iterator<Table> iter = persistentClass.getSubclassTableClosureIterator(); while (iter.hasNext()) { Table tab = iter.next(); if (!tab.isAbstractUnionTable()) { String tableName = tab.getQualifiedName( factory.getDialect(), factory.getSettings().getDefaultCatalogName(), factory.getSettings().getDefaultSchemaName()); tableNames.add(tableName); String[] key = new String[idColumnSpan]; @SuppressWarnings("unchecked") Iterator<Column> citer = tab.getPrimaryKey().getColumnIterator(); for (int k = 0; k < idColumnSpan; k++) { key[k] = citer.next().getQuotedName(factory.getDialect()); } keyColumns.add(key); } } constraintOrderedTableNames = ArrayHelper.toStringArray(tableNames); constraintOrderedKeyColumnNames = ArrayHelper.to2DStringArray(keyColumns); } else { constraintOrderedTableNames = new String[] {tableName}; constraintOrderedKeyColumnNames = new String[][] {getIdentifierColumnNames()}; } initPropertyPaths(mapping); // Grid related metadata TypeTranslator typeTranslator = serviceRegistry.getService(TypeTranslator.class); final Type[] types = getPropertyTypes(); final int length = types.length; gridPropertyTypes = new GridType[length]; for (int index = 0; index < length; index++) { gridPropertyTypes[index] = typeTranslator.getType(types[index]); } gridVersionType = typeTranslator.getType(getVersionType()); gridIdentifierType = typeTranslator.getType(getIdentifierType()); List<String> columnNames = new ArrayList<String>(); for (int propertyCount = 0; propertyCount < this.getPropertySpan(); propertyCount++) { String[] property = this.getPropertyColumnNames(propertyCount); for (int columnCount = 0; columnCount < property.length; columnCount++) { columnNames.add(property[columnCount]); } } if (discriminator.getColumnName() != null) { columnNames.add(discriminator.getColumnName()); } this.tupleContext = new TupleContext(columnNames, optionsService.context().getEntityOptions(getMappedClass())); jpaEntityName = persistentClass.getJpaEntityName(); entityKeyMetadata = new EntityKeyMetadata(getTableName(), getIdentifierColumnNames()); // load unique key association key metadata associationKeyMetadataPerPropertyName = new HashMap<String, AssociationKeyMetadata>(); initAssociationKeyMetadata(); initCustomSQLStrings(); } // Required to avoid null pointer errors when super.postInstantiate() is called private void initCustomSQLStrings() { customSQLInsert = new String[TABLE_SPAN]; customSQLUpdate = new String[TABLE_SPAN]; customSQLDelete = new String[TABLE_SPAN]; } private void initAssociationKeyMetadata() { for (int index = 0; index < getPropertySpan(); index++) { final Type uniqueKeyType = getPropertyTypes()[index]; if (uniqueKeyType.isEntityType()) { String[] propertyColumnNames = getPropertyColumnNames(index); AssociationKeyMetadata metadata = new AssociationKeyMetadata(getTableName(), propertyColumnNames); metadata.setRowKeyColumnNames( buildRowKeyColumnNamesForStarToOne(this, propertyColumnNames)); associationKeyMetadataPerPropertyName.put(getPropertyNames()[index], metadata); } } } @Override protected void createUniqueKeyLoaders() throws MappingException { // Avoid the execution of super.createUniqueLoaders() } @Override protected void doPostInstantiate() {} public GridType getGridIdentifierType() { return gridIdentifierType; } public EntityKeyMetadata getEntityKeyMetadata() { return entityKeyMetadata; } public EntityKeyMetadata getRootEntityKeyMetadata() { // we only support single table and table per concrete class strategies // in this case the root to lock to is the entity itself // see its use in read locking strategy. return entityKeyMetadata; } /** This snapshot is meant to be used when updating data. */ @Override public Object[] getDatabaseSnapshot(Serializable id, SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace( "Getting current persistent state for: " + MessageHelper.infoString(this, id, getFactory())); } // snapshot is a Map in the end final Tuple resultset = getResultsetById(id, session); // if there is no resulting row, return null if (resultset == null || resultset.getSnapshot().isEmpty()) { return null; } // otherwise return the "hydrated" state (ie. associations are not resolved) GridType[] types = gridPropertyTypes; Object[] values = new Object[types.length]; boolean[] includeProperty = getPropertyUpdateability(); for (int i = 0; i < types.length; i++) { if (includeProperty[i]) { values[i] = types[i].hydrate( resultset, getPropertyAliases("", i), session, null); // null owner ok?? } } return values; } private Tuple getResultsetById(Serializable id, SessionImplementor session) { final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); final Tuple resultset = gridDialect.getTuple(key, this.getTupleContext()); return resultset; } @Override public Object initializeLazyProperty(String fieldName, Object entity, SessionImplementor session) throws HibernateException { final Serializable id = session.getContextEntityIdentifier(entity); final EntityEntry entry = session.getPersistenceContext().getEntry(entity); if (entry == null) { throw new HibernateException("entity is not associated with the session: " + id); } if (log.isTraceEnabled()) { log.trace( "initializing lazy properties of: " + MessageHelper.infoString(this, id, getFactory()) + ", field access: " + fieldName); } if (hasCache()) { CacheKey cacheKey = session.generateCacheKey(id, getIdentifierType(), getEntityName()); Object ce = getCacheAccessStrategy().get(cacheKey, session.getTimestamp()); if (ce != null) { CacheEntry cacheEntry = (CacheEntry) getCacheEntryStructure().destructure(ce, getFactory()); if (!cacheEntry.areLazyPropertiesUnfetched()) { // note early exit here: return initializeLazyPropertiesFromCache(fieldName, entity, session, entry, cacheEntry); } } } return initializeLazyPropertiesFromDatastore(fieldName, entity, session, id, entry); } // FIXME cache should use Core Types or Grid Types? // Make superclasses method protected?? private Object initializeLazyPropertiesFromCache( final String fieldName, final Object entity, final SessionImplementor session, final EntityEntry entry, final CacheEntry cacheEntry) { throw new NotSupportedException("OGM-9", "Lazy properties not supported in OGM"); } private Object initializeLazyPropertiesFromDatastore( final String fieldName, final Object entity, final SessionImplementor session, final Serializable id, final EntityEntry entry) { throw new NotSupportedException("OGM-9", "Lazy properties not supported in OGM"); } /** Retrieve the version number */ @Override public Object getCurrentVersion(Serializable id, SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace("Getting version: " + MessageHelper.infoString(this, id, getFactory())); } final Tuple resultset = getResultsetById(id, session); if (resultset == null) { return null; } else { return gridVersionType.nullSafeGet(resultset, getVersionColumnName(), session, null); } } @Override public Object forceVersionIncrement( Serializable id, Object currentVersion, SessionImplementor session) { if (!isVersioned()) { throw new AssertionFailure("cannot force version increment on non-versioned entity"); } if (isVersionPropertyGenerated()) { // the difficulty here is exactly what do we update in order to // force the version to be incremented in the db... throw new HibernateException( "LockMode.FORCE is currently not supported for generated version properties"); } Object nextVersion = getVersionType().next(currentVersion, session); if (log.isTraceEnabled()) { log.trace( "Forcing version increment [" + MessageHelper.infoString(this, id, getFactory()) + "; " + getVersionType().toLoggableString(currentVersion, getFactory()) + " -> " + getVersionType().toLoggableString(nextVersion, getFactory()) + "]"); } /* * We get the value from the grid and compare the version values before putting the next version in * Contrary to the database version, there is * TODO should we use cache.replace() it seems more expensive to pass the resultset around "just" the atomicity of the operation */ final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); final Tuple resultset = gridDialect.getTuple(key, getTupleContext()); checkVersionAndRaiseSOSE(id, currentVersion, session, resultset); gridVersionType.nullSafeSet( resultset, nextVersion, new String[] {getVersionColumnName()}, session); gridDialect.updateTuple(resultset, key, getTupleContext()); return nextVersion; } @Override public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) { return new DynamicFilterAliasGenerator(new String[] {tableName}, rootAlias); } // TODO move that code to the EntityLoader as it is in AbstractEntityPersister? @Override public Object loadByUniqueKey(String propertyName, Object uniqueKey, SessionImplementor session) throws HibernateException { // we get the property type for an associated entity final int propertyIndex = getPropertyIndex(propertyName); final GridType gridUniqueKeyType = getUniqueKeyTypeFromAssociatedEntity(propertyIndex, propertyName); // get the associated property index (to get its column names) // find the ids per unique property name AssociationKeyMetadata associationKeyMetadata = associationKeyMetadataPerPropertyName.get(propertyName); if (associationKeyMetadata == null) { throw new AssertionFailure("loadByUniqueKey on a non EntityType:" + propertyName); } AssociationPersister associationPersister = new AssociationPersister(getMappedClass()) .gridDialect(gridDialect) .key(uniqueKey) .keyGridType(gridUniqueKeyType) // does not set .collectionPersister as it does not make sense here for an entity .associationKeyMetadata(associationKeyMetadata) .session(session) .propertyType(getPropertyTypes()[propertyIndex]); final Association ids = associationPersister.getAssociationOrNull(); if (ids == null || ids.size() == 0) { return null; } else if (ids.size() == 1) { // EntityLoader#loadByUniqueKey uses a null object and LockMode.NONE // there is only one element in the list, so get the first Tuple tuple = ids.get(ids.getKeys().iterator().next()); final Serializable id = (Serializable) getGridIdentifierType().nullSafeGet(tuple, getIdentifierColumnNames(), session, null); return load(id, null, LockMode.NONE, session); } else { throw new AssertionFailure( "Loading by unique key but finding several matches: table:" + getTableName() + " property: " + propertyName + " value: " + uniqueKey); } } private GridType getUniqueKeyTypeFromAssociatedEntity(int propertyIndex, String propertyName) { GridType gridUniqueKeyType; // get the unique key type and if it's an entity type, get it's // identifier type final Type uniqueKeyType = getPropertyTypes()[propertyIndex]; if (uniqueKeyType.isEntityType()) { // we run under the assumption that we are fully in an OGM world EntityType entityType = (EntityType) uniqueKeyType; final OgmEntityPersister entityPersister = (OgmEntityPersister) entityType.getAssociatedJoinable(getFactory()); gridUniqueKeyType = entityPersister.getGridIdentifierType(); } else { throw new AssertionFailure("loadByUniqueKey on a non EntityType:" + propertyName); } return gridUniqueKeyType; } @Override protected void createLoaders() { Map<Object, Object> loaders = getLoaders(); loaders.put(LockMode.NONE, createEntityLoader(LockMode.NONE)); UniqueEntityLoader readLoader = createEntityLoader(LockMode.READ); loaders.put(LockMode.READ, readLoader); // TODO: inexact, what we really need to know is: are any outer joins used? boolean disableForUpdate = getSubclassTableSpan() > 1 && hasSubclasses() && !getFactory().getDialect().supportsOuterJoinForUpdate(); loaders.put( LockMode.UPGRADE, disableForUpdate ? readLoader : createEntityLoader(LockMode.UPGRADE)); loaders.put( LockMode.UPGRADE_NOWAIT, disableForUpdate ? readLoader : createEntityLoader(LockMode.UPGRADE_NOWAIT)); loaders.put(LockMode.FORCE, disableForUpdate ? readLoader : createEntityLoader(LockMode.FORCE)); loaders.put( LockMode.PESSIMISTIC_READ, disableForUpdate ? readLoader : createEntityLoader(LockMode.PESSIMISTIC_READ)); loaders.put( LockMode.PESSIMISTIC_WRITE, disableForUpdate ? readLoader : createEntityLoader(LockMode.PESSIMISTIC_WRITE)); loaders.put( LockMode.PESSIMISTIC_FORCE_INCREMENT, disableForUpdate ? readLoader : createEntityLoader(LockMode.PESSIMISTIC_FORCE_INCREMENT)); loaders.put(LockMode.OPTIMISTIC, createEntityLoader(LockMode.OPTIMISTIC)); loaders.put( LockMode.OPTIMISTIC_FORCE_INCREMENT, createEntityLoader(LockMode.OPTIMISTIC_FORCE_INCREMENT)); // FIXME handle cascading merge and refresh loaders.put( "merge", createEntityLoader(LockMode.READ) // new CascadeEntityLoader( this, CascadingAction.MERGE, getFactory() ) ); loaders.put( "refresh", createEntityLoader(LockMode.READ) // new CascadeEntityLoader( this, CascadingAction.REFRESH, getFactory() ) ); } @Override protected UniqueEntityLoader createEntityLoader( LockMode lockMode, LoadQueryInfluencers loadQueryInfluencers) throws MappingException { // FIXME add support to lock mode and loadQueryInfluencers return new OgmLoader(new OgmEntityPersister[] {this}); } @Override protected UniqueEntityLoader createEntityLoader( LockOptions lockOptions, LoadQueryInfluencers loadQueryInfluencers) throws MappingException { // FIXME add support to lock options and loadQueryInfluencers return new OgmLoader(new OgmEntityPersister[] {this}); } @Override protected UniqueEntityLoader createEntityLoader(LockMode lockMode) throws MappingException { return createEntityLoader(lockMode, LoadQueryInfluencers.NONE); } // TODO verify what to do with #check: Expectation seems to be very JDBC centric /** * Unmarshall the fields of a persistent instance from a result set, without resolving * associations or collections. Question: should this really be here, or should it be sent back to * Loader? */ public Object[] hydrate( final Tuple resultset, final Serializable id, final Object object, final Loadable rootLoadable, // We probably don't need suffixedColumns, use column names instead // final String[][] suffixedPropertyColumns, final boolean allProperties, final SessionImplementor session) throws HibernateException { if (log.isTraceEnabled()) { log.trace("Hydrating entity: " + MessageHelper.infoString(this, id, getFactory())); } final OgmEntityPersister rootPersister = (OgmEntityPersister) rootLoadable; final boolean hasDeferred = rootPersister.hasSequentialSelect(); boolean sequentialSelectEmpty = false; if (hasDeferred) { // note: today we don't have sequential select in OGM // check AbstractEntityPersister#hydrate for the detail } final String[] propNames = getPropertyNames(); final Type[] types = getPropertyTypes(); final Object[] values = new Object[types.length]; final boolean[] laziness = getPropertyLaziness(); final String[] propSubclassNames = getSubclassPropertySubclassNameClosure(); final boolean[] propertySelectable = getPropertySelectable(); for (int i = 0; i < types.length; i++) { values[i] = hydrateValue( resultset, session, object, i, propertySelectable, allProperties, laziness, hasDeferred, rootPersister, propNames, propSubclassNames, sequentialSelectEmpty); } return values; } private Object hydrateValue( Tuple resultset, SessionImplementor session, Object object, int index, boolean[] propertySelectable, boolean allProperties, boolean[] laziness, boolean hasDeferred, OgmEntityPersister rootPersister, String[] propNames, String[] propSubclassNames, boolean sequentialSelectEmpty) { Object value; if (!propertySelectable[index]) { value = BackrefPropertyAccessor.UNKNOWN; } else if (allProperties || !laziness[index]) { // decide which ResultSet to get the property value from: final boolean propertyIsDeferred = hasDeferred && rootPersister.isSubclassPropertyDeferred( propNames[index], propSubclassNames[index]); if (propertyIsDeferred && sequentialSelectEmpty) { value = null; } else { // FIXME We don't handle deferred property yet // final ResultSet propertyResultSet = propertyIsDeferred ? sequentialResultSet : rs; GridType[] gridTypes = gridPropertyTypes; final String[] cols; if (propertyIsDeferred) { cols = getPropertyAliases("", index); } else { // TODO What to do? // : suffixedPropertyColumns[i]; cols = getPropertyAliases("", index); } value = gridTypes[index].hydrate(resultset, cols, session, object); // null owner ok?? } } else { value = LazyPropertyInitializer.UNFETCHED_PROPERTY; } return value; } @Override public String[] getPropertyAliases(String suffix, int i) { // TODO do something about suffixes return getPropertyColumnNames(i); } @Override protected boolean useInsertSelectIdentity() { return false; } @Override protected Serializable insert( final Object[] fields, final boolean[] notNull, String sql, final Object object, final SessionImplementor session) throws HibernateException { throw new HibernateException("Cannot use a database generator with OGM"); } @Override protected LockingStrategy generateLocker(LockMode lockMode) { return gridDialect.getLockingStrategy(this, lockMode); } /** Update an object */ @Override public void update( final Serializable id, final Object[] fields, final int[] dirtyFields, final boolean hasDirtyCollection, final Object[] oldFields, final Object oldVersion, final Object object, final Object rowId, final SessionImplementor session) throws HibernateException { // note: dirtyFields==null means we had no snapshot, and we couldn't get one using // select-before-update // oldFields==null just means we had no snapshot to begin with (we might have used // select-before-update to get the dirtyFields) // TODO support "multi table" entities final boolean[] tableUpdateNeeded = getTableUpdateNeeded(dirtyFields, hasDirtyCollection); final int span = getTableSpan(); final boolean[] propsToUpdate; EntityEntry entry = session.getPersistenceContext().getEntry(object); // Ensure that an immutable or non-modifiable entity is not being updated unless it is // in the process of being deleted. if (entry == null && !isMutable()) { throw new IllegalStateException("Updating immutable entity that is not in session yet!"); } // we always use a dynamicUpdate model for Infinispan if (( // getEntityMetamodel().isDynamicUpdate() && dirtyFields != null)) { propsToUpdate = getPropertiesToUpdate(dirtyFields, hasDirtyCollection); // don't need to check laziness (dirty checking algorithm handles that) } else if (!isModifiableEntity(entry)) { // TODO does that apply to OGM? // We need to generate UPDATE SQL when a non-modifiable entity (e.g., read-only or immutable) // needs: // - to have references to transient entities set to null before being deleted // - to have version incremented do to a "dirty" association // If dirtyFields == null, then that means that there are no dirty properties to // to be updated; an empty array for the dirty fields needs to be passed to // getPropertiesToUpdate() instead of null. propsToUpdate = getPropertiesToUpdate( (dirtyFields == null ? ArrayHelper.EMPTY_INT_ARRAY : dirtyFields), hasDirtyCollection); // don't need to check laziness (dirty checking algorithm handles that) } else { // For the case of dynamic-update="false", or no snapshot, we update all properties // TODO handle lazy propsToUpdate = getPropertyUpdateability(object); } final SessionFactoryImplementor factory = getFactory(); if (log.isTraceEnabled()) { log.trace("Updating entity: " + MessageHelper.infoString(this, id, factory)); if (isVersioned()) { log.trace( "Existing version: " + oldVersion + " -> New version: " + fields[getVersionProperty()]); } } for (int j = 0; j < span; j++) { // Now update only the tables with dirty properties (and the table with the version number) if (tableUpdateNeeded[j]) { final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); Tuple resultset = gridDialect.getTuple(key, this.getTupleContext()); final boolean useVersion = j == 0 && isVersioned(); resultset = createNewResultSetIfNull(key, resultset, id, session); final EntityMetamodel entityMetamodel = getEntityMetamodel(); // Write any appropriate versioning conditional parameters if (useVersion && entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.VERSION) { if (checkVersion(propsToUpdate)) { checkVersionAndRaiseSOSE(id, oldVersion, session, resultset); } } else if (isAllOrDirtyOptLocking() && oldFields != null) { boolean[] versionability = getPropertyVersionability(); // TODO: is this really necessary???? boolean[] includeOldField = entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.ALL ? getPropertyUpdateability() : propsToUpdate; // TODO do a diff on the properties value from resultset and the dirty value GridType[] types = gridPropertyTypes; for (int i = 0; i < entityMetamodel.getPropertySpan(); i++) { boolean include = includeOldField[i] && isPropertyOfTable(i, j) && versionability[i]; // TODO: is this really necessary???? if (include) { final GridType type = types[i]; // FIXME what do do with settable? boolean[] settable = type.toColumnNullness(oldFields[i], factory); final Object snapshotValue = type.nullSafeGet(resultset, getPropertyColumnNames(i), session, object); comparePropertyAndRaiseSOSE( id, oldFields[i], factory, !type.isEqual(oldFields, snapshotValue, factory)); } } } // dehydrate dehydrate(resultset, fields, propsToUpdate, getPropertyColumnUpdateable(), j, id, session); gridDialect.updateTuple(resultset, key, getTupleContext()); } } } // Copied from AbstractEntityPersister private boolean isAllOrDirtyOptLocking() { EntityMetamodel entityMetamodel = getEntityMetamodel(); return entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.DIRTY || entityMetamodel.getOptimisticLockStyle() == OptimisticLockStyle.ALL; } private void comparePropertyAndRaiseSOSE( Serializable id, Object oldField, SessionFactoryImplementor factory, boolean b) { // TODO support other entity modes if (b) { if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().optimisticFailure(getEntityName()); } throw new StaleObjectStateException(getEntityName(), id); } } public void checkVersionAndRaiseSOSE( Serializable id, Object oldVersion, SessionImplementor session, Tuple resultset) { final Object resultSetVersion = gridVersionType.nullSafeGet(resultset, getVersionColumnName(), session, null); final SessionFactoryImplementor factory = getFactory(); if (!gridVersionType.isEqual(oldVersion, resultSetVersion, factory)) { if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().optimisticFailure(getEntityName()); } throw new StaleObjectStateException(getEntityName(), id); } } private void dehydrate( Tuple resultset, final Object[] fields, boolean[] includeProperties, boolean[][] includeColumns, int tableIndex, Serializable id, SessionImplementor session) { new EntityDehydrator() .fields(fields) .gridPropertyTypes(gridPropertyTypes) .gridIdentifierType(gridIdentifierType) .id(id) .includeColumns(includeColumns) .includeProperties(includeProperties) .persister(this) .resultset(resultset) .session(session) .tableIndex(tableIndex) .gridDialect(gridDialect) .dehydrate(); } // TODO copy of AbstractEntityPersister#checkVersion due to visibility private boolean checkVersion(final boolean[] includeProperty) { return includeProperty[getVersionProperty()] || getEntityMetamodel().isVersionGenerated(); } // TODO make AbstractEntityPersister#isModifiableEntity protected instead private boolean isModifiableEntity(EntityEntry entry) { return (entry == null ? isMutable() : entry.isModifiableEntity()); } @Override public Serializable insert(Object[] fields, Object object, SessionImplementor session) throws HibernateException { throw new HibernateException( "Identifier values generated by the database are not supported in Hibernate OGM"); } @Override public void insert(Serializable id, Object[] fields, Object object, SessionImplementor session) throws HibernateException { final int span = getTableSpan(); // insert operations are always dynamic in OGM boolean[] propertiesToInsert = getPropertiesToInsert(fields); for (int j = 0; j < span; j++) { if (isInverseTable(j)) { return; } // note: it is conceptually possible that a UserType could map null to // a non-null value, so the following is arguable: if (isNullableTable(j) && isAllNull(fields, j)) { return; } if (log.isTraceEnabled()) { log.trace("Inserting entity: " + MessageHelper.infoString(this, id, getFactory())); if (j == 0 && isVersioned()) { log.trace("Version: " + Versioning.getVersion(fields, this)); } } final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); Tuple resultset = gridDialect.getTuple(key, this.getTupleContext()); // add the discriminator if (j == 0) { if (resultset != null) { throw new HibernateException( "trying to insert an already existing entity: " + MessageHelper.infoString(this, id, getFactory())); } if (discriminator.isNeeded()) { resultset = createNewResultSetIfNull(key, resultset, id, session); resultset.put(getDiscriminatorColumnName(), getDiscriminatorValue()); } } resultset = createNewResultSetIfNull(key, resultset, id, session); // dehydrate dehydrate( resultset, fields, propertiesToInsert, getPropertyColumnInsertable(), j, id, session); gridDialect.updateTuple(resultset, key, getTupleContext()); } } @Override public String getDiscriminatorColumnName() { return discriminator.getColumnName(); } @Override protected String getDiscriminatorAlias() { return discriminator.getAlias(); } private Tuple createNewResultSetIfNull( EntityKey key, Tuple resultset, Serializable id, SessionImplementor session) { if (resultset == null) { resultset = gridDialect.createTuple(key, getTupleContext()); gridIdentifierType.nullSafeSet(resultset, id, getIdentifierColumnNames(), session); } return resultset; } // TODO AbstractEntityPersister#isAllNull copied because of visibility private boolean isAllNull(Object[] array, int tableNumber) { for (int i = 0; i < array.length; i++) { if (isPropertyOfTable(i, tableNumber) && array[i] != null) { return false; } } return true; } @Override public void delete(Serializable id, Object version, Object object, SessionImplementor session) throws HibernateException { final int span = getTableSpan(); if (span > 1) { throw new HibernateException( "Hibernate OGM does not yet support entities spanning multiple tables"); } final EntityMetamodel entityMetamodel = getEntityMetamodel(); boolean isImpliedOptimisticLocking = !entityMetamodel.isVersioned() && isAllOrDirtyOptLocking(); Object[] loadedState = null; if (isImpliedOptimisticLocking) { // need to treat this as if it where optimistic-lock="all" (dirty does *not* make sense); // first we need to locate the "loaded" state // // Note, it potentially could be a proxy, so doAfterTransactionCompletion the location the // safe way... org.hibernate.engine.spi.EntityKey key = session.generateEntityKey(id, this); Object entity = session.getPersistenceContext().getEntity(key); if (entity != null) { EntityEntry entry = session.getPersistenceContext().getEntry(entity); loadedState = entry.getLoadedState(); } } final EntityKey key = EntityKeyBuilder.fromPersister(this, id, session); final Tuple resultset = gridDialect.getTuple(key, this.getTupleContext()); final SessionFactoryImplementor factory = getFactory(); if (isImpliedOptimisticLocking && loadedState != null) { // we need to utilize dynamic delete statements for (int j = span - 1; j >= 0; j--) { boolean[] versionability = getPropertyVersionability(); // TODO do a diff on the properties value from resultset GridType[] types = gridPropertyTypes; for (int i = 0; i < entityMetamodel.getPropertySpan(); i++) { boolean include = isPropertyOfTable(i, j) && versionability[i]; if (include) { final GridType type = types[i]; final Object snapshotValue = type.nullSafeGet(resultset, getPropertyColumnNames(i), session, object); // TODO support other entity modes if (!type.isEqual(loadedState[i], snapshotValue, factory)) { if (factory.getStatistics().isStatisticsEnabled()) { factory.getStatisticsImplementor().optimisticFailure(getEntityName()); } throw new StaleObjectStateException(getEntityName(), id); } } } } } else { if (entityMetamodel.isVersioned()) { checkVersionAndRaiseSOSE(id, version, session, resultset); } } for (int j = span - 1; j >= 0; j--) { if (isInverseTable(j)) { return; } if (log.isTraceEnabled()) { log.trace("Deleting entity: " + MessageHelper.infoString(this, id, factory)); if (j == 0 && isVersioned()) { log.trace("Version: " + version); } } // delete association information // needs to be executed before the tuple removal because the AtomicMap in ISPN is cleared upon // removal new EntityDehydrator() .gridDialect(gridDialect) .gridPropertyTypes(gridPropertyTypes) .gridIdentifierType(gridIdentifierType) .id(id) .persister(this) .resultset(resultset) .session(session) .tableIndex(j) .onlyRemovePropertyMetadata() .dehydrate(); gridDialect.removeTuple(key, getTupleContext()); } } @Override protected int[] getSubclassColumnTableNumberClosure() { return new int[getSubclassColumnClosure().length]; } @Override protected int[] getSubclassFormulaTableNumberClosure() { return new int[getSubclassFormulaClosure().length]; } @Override public String getDiscriminatorSQLValue() { return discriminator.getSqlValue(); } @Override public String[] getConstraintOrderedTableNameClosure() { return constraintOrderedTableNames; } @Override public String[][] getContraintOrderedTableKeyColumnClosure() { return constraintOrderedKeyColumnNames; } @Override public String getSubclassTableName(int j) { if (j != 0) { throw new AssertionFailure("only one table"); } return tableName; } @Override protected String[] getSubclassTableKeyColumns(int j) { if (j != 0) { throw new AssertionFailure("only one table"); } return getIdentifierColumnNames(); } @Override protected boolean isClassOrSuperclassTable(int j) { if (j != 0) { throw new AssertionFailure("only one table"); } return true; } @Override protected int getSubclassTableSpan() { return 1; } @Override protected int getTableSpan() { return TABLE_SPAN; } @Override protected boolean isTableCascadeDeleteEnabled(int j) { return false; } @Override protected String getTableName(int j) { return tableName; } @Override protected String[] getKeyColumns(int j) { return getIdentifierColumnNames(); } @Override protected boolean isPropertyOfTable(int property, int j) { return true; } @Override protected int[] getPropertyTableNumbersInSelect() { return new int[getPropertySpan()]; } @Override protected int[] getPropertyTableNumbers() { return new int[getPropertySpan()]; } @Override protected int getSubclassPropertyTableNumber(int i) { return 0; } @Override protected String filterFragment(String alias) throws MappingException { // TODO support filter in OGM??? How??? return ""; // return hasWhere() ? // " and " + getSQLWhereString(alias) : // ""; } @Override public String getSubclassPropertyTableName(int i) { return getTableName(); // ie. the subquery! yuck! } // FIXME useful? @Override public String fromTableFragment(String alias) { return getTableName() + ' ' + alias; } @Override public String getPropertyTableName(String propertyName) { // TODO: check this.... return getTableName(); } @Override public String getTableName() { // FIXME it should be the subquery ie include the subclasses as well in SQL (union logic) return tableName; } @Override public Type getDiscriminatorType() { return IntegerType.INSTANCE; } @Override public Object getDiscriminatorValue() { return discriminator.getValue(); } @Override public String getSubclassForDiscriminatorValue(Object value) { return discriminator.provideClassByValue(value); } @Override public Serializable[] getPropertySpaces() { return spaces; } public TupleContext getTupleContext() { return tupleContext; } public String getJpaEntityName() { return jpaEntityName; } @Override public void processInsertGeneratedProperties( Serializable id, Object entity, Object[] state, SessionImplementor session) { if (!hasUpdateGeneratedProperties()) { throw new AssertionFailure("no insert-generated properties"); } processGeneratedProperties(id, entity, state, session, GenerationTiming.INSERT); } @Override public void processUpdateGeneratedProperties( Serializable id, Object entity, Object[] state, SessionImplementor session) { if (!hasUpdateGeneratedProperties()) { throw new AssertionFailure("no update-generated properties"); } processGeneratedProperties(id, entity, state, session, GenerationTiming.ALWAYS); } /** * Re-reads the given entity, refreshing any properties updated on the server-side during insert * or update. */ private void processGeneratedProperties( Serializable id, Object entity, Object[] state, SessionImplementor session, GenerationTiming matchTiming) { Tuple tuple = getResultsetById(id, session); if (tuple == null || tuple.getSnapshot().isEmpty()) { throw log.couldNotRetrieveEntityForRetrievalOfGeneratedProperties(getEntityName(), id); } int propertyIndex = -1; for (NonIdentifierAttribute attribute : getEntityMetamodel().getProperties()) { propertyIndex++; final ValueGeneration valueGeneration = attribute.getValueGenerationStrategy(); if (isReadRequired(valueGeneration, matchTiming)) { Object hydratedState = gridPropertyTypes[propertyIndex].hydrate( tuple, getPropertyAliases("", propertyIndex), session, entity); state[propertyIndex] = gridPropertyTypes[propertyIndex].resolve(hydratedState, session, entity); setPropertyValue(entity, propertyIndex, state[propertyIndex]); } } } /** * Whether the given value generation strategy requires to read the value from the database or * not. */ private boolean isReadRequired(ValueGeneration valueGeneration, GenerationTiming matchTiming) { return valueGeneration != null && valueGeneration.getValueGenerator() == null && timingsMatch(valueGeneration.getGenerationTiming(), matchTiming); } private boolean timingsMatch(GenerationTiming timing, GenerationTiming matchTiming) { return (matchTiming == GenerationTiming.INSERT && timing.includesInsert()) || (matchTiming == GenerationTiming.ALWAYS && timing.includesUpdate()); } }
/** * Extension point allowing any NoSQL native query with named and positional parameters to be * executed by OGM on the corresponding backend, returning managed entities, collections and simple * scalar values. * * @author Davide D'Alto <[email protected]> */ public class BackendCustomQuery<T extends Serializable> implements CustomQuery, Serializable { private static final Log LOG = LoggerFactory.make(); private final String queryString; private final T queryObject; private final Set<String> querySpaces; private final List<Return> customQueryReturns; private final EntityMetadataInformation singleEntityMetadataInformation; public BackendCustomQuery( String queryString, T query, NativeSQLQueryReturn[] queryReturns, Set<String> querySpaces, SessionFactoryImplementor factory) throws HibernateException { LOG.tracev("Starting processing of NoSQL query [{0}]", queryString); this.queryString = queryString; this.queryObject = query; SQLQueryReturnProcessor processor = new SQLQueryReturnProcessor(queryReturns, factory); processor.process(); customQueryReturns = Collections.unmodifiableList(processor.generateCustomReturns(false)); if (querySpaces != null) { this.querySpaces = Collections.<String>unmodifiableSet(querySpaces); } else { this.querySpaces = Collections.emptySet(); } this.singleEntityMetadataInformation = determineSingleEntityMetadataInformation(factory, customQueryReturns); } private EntityMetadataInformation determineSingleEntityMetadataInformation( SessionFactoryImplementor sessionFactory, List<Return> customQueryReturns) { EntityMetadataInformation metadataInformation = null; for (Return queryReturn : customQueryReturns) { if (queryReturn instanceof RootReturn) { if (metadataInformation != null) { return null; } RootReturn rootReturn = (RootReturn) queryReturn; OgmEntityPersister persister = (OgmEntityPersister) sessionFactory.getEntityPersister(rootReturn.getEntityName()); metadataInformation = new EntityMetadataInformation( persister.getEntityKeyMetadata(), rootReturn.getEntityName()); } } return metadataInformation; } /** @deprecated Use {@link #getQueryString()} instead. */ @Override @Deprecated public String getSQL() { return getQueryString(); } public String getQueryString() { return queryString; } /** * Returns an object-based representation of this query, if present. * * @return an object-based representation of this query, or {@code null} if this is a string-based * query. */ public T getQueryObject() { return queryObject; } @Override public Set<String> getQuerySpaces() { return querySpaces; } @Override public Map<?, ?> getNamedParameterBindPoints() { // TODO: Should this actually be something more sensible? return Collections.emptyMap(); } @Override public List<Return> getCustomQueryReturns() { return customQueryReturns; } /** * Returns the {@link EntityMetadataInformation} of the entity type selected by this query. * * @return the {@link EntityMetadataInformation} of the entity type selected by this query or * {@code null} in case this query does not select exactly one entity type (e.g. in case of * scalar values or joins (if supported in future revisions)). */ public EntityMetadataInformation getSingleEntityMetadataInformationOrNull() { return singleEntityMetadataInformation; } @Override public String toString() { return queryObject != null ? queryObject.toString() : queryString; } }
/** * A JUnit 4 runner for OGM tests. Based on a given set of entities, it manages a session factory, * which is used throughout all test methods of the given test class. * * <p>The entities of the test are to be returned by a parameterless method annotated with {@link * TestEntities} in form of a {@code Class<?>[]}. * * <p>The used session factory can be obtained by annotating a field of type {@link SessionFactory} * with the {@link TestSessionFactory} annotation. The runner will inject the factory in this field * then. Depending on the {@link TestSessionFactory#scope() } setting, either the same session * factory instance will be used for all test methods of a given test class or a new session factory * will be created and injected for each individual test method. * * <p>Finally the {@link Configuration} used for bootstrapping the factory can optionally be * modified by annotating a configuration method with the {@link SessionFactoryConfiguration} a * shown in the example below. * * <p>Usage example: * * <pre>{@code * @RunWith(OgmTestRunner.class) * public class AnimalFarmTest { * * @TestSessionFactory * public SessionFactory sessions; * * @Test * public void shouldCountAnimals() throws Exception { * Session session = sessions.openSession(); * ... * session.close(); * } * * @SessionFactoryConfiguration * public static void configure(Configuration cfg) { * cfg.setProperty( * Environment.MONGODB_ASSOCIATIONS_STORE, * AssociationStorage.COLLECTION.name() * ); * } * * @TestEntities * public Class<?>[] getTestEntities() { * return new Class<?>[]{ PolarBear.class, Giraffe.class }; * } * } * }</pre> * * @see OgmTestCase Base class for tests which is configured with this runner for ease of use * @author Gunnar Morling */ public class OgmTestRunner extends GridDialectSkippableTestRunner { private static final Log LOG = LoggerFactory.make(); private final Set<Field> testScopedFactoryFields; private final Set<Field> testMethodScopedFactoryFields; private SessionFactory testScopedSessionFactory; private SessionFactory testMethodScopedSessionFactory; public OgmTestRunner(Class<?> klass) throws InitializationError { super(klass); testScopedFactoryFields = getTestFactoryFields(getTestClass(), Scope.TEST_CLASS); testMethodScopedFactoryFields = getTestFactoryFields(getTestClass(), Scope.TEST_METHOD); } private static Set<Field> getTestFactoryFields( TestClass testClass, TestSessionFactory.Scope scope) { Set<Field> testFactoryFields = new HashSet<Field>(); for (FrameworkField frameworkField : testClass.getAnnotatedFields(TestSessionFactory.class)) { Field field = frameworkField.getField(); if (scope == field.getAnnotation(TestSessionFactory.class).scope()) { field.setAccessible(true); testFactoryFields.add(field); } } return testFactoryFields; } @Override public void run(RunNotifier notifier) { if (isTestScopedSessionFactoryRequired()) { testScopedSessionFactory = buildSessionFactory(); injectSessionFactory(null, testScopedFactoryFields, testScopedSessionFactory); } try { super.run(notifier); } finally { if (testScopedSessionFactory != null) { cleanUpPendingTransactionIfRequired(); TestHelper.dropSchemaAndDatabase(testScopedSessionFactory); testScopedSessionFactory.close(); } } } @Override protected void runChild(FrameworkMethod method, RunNotifier notifier) { // create test method scoped SF if required; it will be injected in createTest() if (isTestMethodScopedSessionFactoryRequired(method)) { testMethodScopedSessionFactory = buildSessionFactory(); } try { super.runChild(method, notifier); } finally { if (testMethodScopedSessionFactory != null) { testMethodScopedSessionFactory.close(); } } } private boolean isTestScopedSessionFactoryRequired() { return !isTestClassSkipped() && !areAllTestMethodsSkipped(); } private boolean isTestMethodScopedSessionFactoryRequired(FrameworkMethod method) { return !testMethodScopedFactoryFields.isEmpty() && !super.isTestMethodSkipped(method); } private void cleanUpPendingTransactionIfRequired() { TransactionManager transactionManager = ((SessionFactoryImplementor) testScopedSessionFactory) .getServiceRegistry() .getService(JtaPlatform.class) .retrieveTransactionManager(); try { if (transactionManager != null && transactionManager.getTransaction() != null) { LOG.warn( "The test started a transaction but failed to commit it or roll it back. Going to roll it back."); transactionManager.rollback(); } } catch (Exception e) { throw new RuntimeException(e); } } protected SessionFactory buildSessionFactory() { Class<?>[] classes = getConfiguredEntityTypes(); Configuration configuration = createConfiguration(classes); return configuration.buildSessionFactory(); } private Class<?>[] getConfiguredEntityTypes() { for (FrameworkMethod frameworkMethod : getTestClass().getAnnotatedMethods(TestEntities.class)) { Method method = frameworkMethod.getMethod(); method.setAccessible(true); if (method.getReturnType() != Class[].class || method.getParameterTypes().length > 0) { throw new IllegalStateException( "Method annotated with " + TestEntities.class.getSimpleName() + " must have no parameters and must return Class<?>[]."); } try { return (Class<?>[]) method.invoke(super.createTest()); } catch (Exception e) { throw new RuntimeException(e); } } throw new IllegalStateException( "The entities of the test must be retrievable via a parameterless method which is annotated with " + TestEntities.class.getSimpleName() + " and returns Class<?>[]."); } private Configuration createConfiguration(Class<?>[] entityTypes) { if (entityTypes == null || entityTypes.length == 0) { throw new IllegalArgumentException("Define at least a single annotated entity"); } OgmConfiguration configuration = TestHelper.getDefaultTestConfiguration(entityTypes); invokeTestConfigurationMethod(configuration); return configuration; } private void invokeTestConfigurationMethod(OgmConfiguration configuration) { try { for (FrameworkMethod frameworkMethod : getTestClass().getAnnotatedMethods(SessionFactoryConfiguration.class)) { Method method = frameworkMethod.getMethod(); method.setAccessible(true); method.invoke(super.createTest(), configuration); } } catch (Exception e) { throw new RuntimeException(e); } } @Override protected Object createTest() throws Exception { Object test = super.createTest(); // inject SFs as per given scopes if (!testScopedFactoryFields.isEmpty()) { injectSessionFactory(test, testScopedFactoryFields, testScopedSessionFactory); } if (!testMethodScopedFactoryFields.isEmpty()) { injectSessionFactory(test, testMethodScopedFactoryFields, testMethodScopedSessionFactory); } return test; } private void injectSessionFactory( Object test, Iterable<Field> fields, SessionFactory sessionFactory) { for (Field field : fields) { try { if ((test == null && Modifier.isStatic(field.getModifiers())) || (test != null && !Modifier.isStatic(field.getModifiers()))) { field.set(test, sessionFactory); } } catch (Exception e) { throw new RuntimeException("Can't inject session factory into field " + field); } } } }
/** * @author Emmanuel Bernard <*****@*****.**> * @author Sanne Grinovero <*****@*****.**> */ public class TestHelper { private static final Log log = LoggerFactory.make(); private static final TestableGridDialect helper = createStoreSpecificHelper(); static { // set 5 hours timeout on transactions: enough for debug, but not too high in case of CI // problems. TxControl.setDefaultTimeout(60 * 60 * 2); } private TestHelper() {} public static long getNumberOfEntities(EntityManager em) { return getNumberOfEntities(em.unwrap(Session.class)); } private static TestableGridDialect createStoreSpecificHelper() { for (GridDialectType gridType : GridDialectType.values()) { Class<?> classForName = gridType.loadTestableGridDialectClass(); if (classForName != null) { try { TestableGridDialect attempt = (TestableGridDialect) classForName.newInstance(); log.debugf("Using TestGridDialect %s", classForName); return attempt; } catch (Exception e) { // but other errors are not expected: log.errorf(e, "Could not load TestGridDialect by name from %s", gridType); } } } return new org.hibernate.ogm.utils.HashMapTestHelper(); } public static GridDialectType getCurrentDialectType() { return GridDialectType.valueFromHelperClass(helper.getClass()); } public static long getNumberOfEntities(Session session) { return getNumberOfEntities(session.getSessionFactory()); } public static long getNumberOfEntities(SessionFactory sessionFactory) { return helper.getNumberOfEntities(sessionFactory); } public static Map<String, Object> extractEntityTuple( SessionFactory sessionFactory, EntityKey key) { return helper.extractEntityTuple(sessionFactory, key); } public static long getNumberOfAssociations(SessionFactory sessionFactory) { return helper.getNumberOfAssociations(sessionFactory); } /** * Returns the number of associations of the given type. * * <p>Optional operation which only is supported for document datastores. */ public static long getNumberOfAssociations( SessionFactory sessionFactory, AssociationStorageType type) { return helper.getNumberOfAssociations(sessionFactory, type); } /** * Returns the number of collections stored embedded within an entity structure in the datastore * * <p>Optional operation which only is supported for document datastores. */ public static long getNumberOfEmbeddedCollections(SessionFactory sessionFactory) { return helper.getNumberOEmbeddedCollections(sessionFactory); } public static boolean backendSupportsTransactions() { return helper.backendSupportsTransactions(); } @SuppressWarnings("unchecked") public static <T> T get(Session session, Class<T> clazz, Serializable id) { return (T) session.get(clazz, id); } public static void dropSchemaAndDatabase(Session session) { if (session != null) { dropSchemaAndDatabase(session.getSessionFactory()); } } public static void dropSchemaAndDatabase(EntityManagerFactory emf) { dropSchemaAndDatabase(((HibernateEntityManagerFactory) emf).getSessionFactory()); } public static void dropSchemaAndDatabase(SessionFactory sessionFactory) { // if the factory is closed, we don't have access to the service registry if (sessionFactory != null && !sessionFactory.isClosed()) { try { helper.dropSchemaAndDatabase(sessionFactory); } catch (Exception e) { log.warn("Exception while dropping schema and database in test", e); } } } public static Map<String, String> getEnvironmentProperties() { // TODO hibernate.properties is ignored due to HHH-8635, thus explicitly load its properties Map<String, String> properties = getHibernateProperties(); Map<String, String> environmentProperties = helper.getEnvironmentProperties(); if (environmentProperties != null) { properties.putAll(environmentProperties); } return properties; } private static Map<String, String> getHibernateProperties() { InputStream hibernatePropertiesStream = null; Map<String, String> properties = new HashMap<String, String>(); try { hibernatePropertiesStream = TestHelper.class.getResourceAsStream("/hibernate.properties"); Properties hibernateProperties = new Properties(); hibernateProperties.load(hibernatePropertiesStream); for (Entry<Object, Object> property : hibernateProperties.entrySet()) { properties.put(property.getKey().toString(), property.getValue().toString()); } return properties; } catch (Exception e) { throw new RuntimeException(e); } finally { closeQuietly(hibernatePropertiesStream); } } private static void closeQuietly(InputStream stream) { if (stream != null) { try { stream.close(); } catch (IOException e) { // ignore } } } public static void checkCleanCache(SessionFactory sessionFactory) { assertThat(getNumberOfEntities(sessionFactory)).as("Entity cache should be empty").isEqualTo(0); assertThat(getNumberOfAssociations(sessionFactory)) .as("Association cache should be empty") .isEqualTo(0); } /** * Provides a default {@link OgmConfiguration} for tests, using the given set of annotated entity * types. * * @param entityTypes the entity types for which to build a configuration * @return a default configuration based on the given types */ public static OgmConfiguration getDefaultTestConfiguration(Class<?>... entityTypes) { OgmConfiguration configuration = new OgmConfiguration(); // by default use the new id generator scheme... configuration.setProperty(Configuration.USE_NEW_ID_GENERATOR_MAPPINGS, "true"); for (Map.Entry<String, String> entry : TestHelper.getEnvironmentProperties().entrySet()) { configuration.setProperty(entry.getKey(), entry.getValue()); } configuration.setProperty(Environment.HBM2DDL_AUTO, "none"); // volatile indexes for Hibernate Search (if used) configuration.setProperty("hibernate.search.default.directory_provider", "ram"); // disable warnings about unspecified Lucene version configuration.setProperty("hibernate.search.lucene_version", "LUCENE_35"); for (Class<?> aClass : entityTypes) { configuration.addAnnotatedClass(aClass); } return configuration; } /** * Returns a {@link GlobalContext} for configuring the current datastore. * * @param configuration the target the configuration will be applied to * @return a context object for configuring the current datastore. */ public static GlobalContext<?, ?> configureDatastore(OgmConfiguration configuration) { return helper.configureDatastore(configuration); } }
/** * An OGM specific session implementation which delegate most of the work to the underlying * Hibernate ORM {@code Session}, except queries which are redirected to the OGM engine. * * @author Emmanuel Bernard <[email protected]> */ public class OgmSessionImpl extends SessionDelegatorBaseImpl implements OgmSession, EventSource { private static final Log log = LoggerFactory.make(); private final EventSource delegate; private final OgmSessionFactoryImpl factory; public OgmSessionImpl(OgmSessionFactory factory, EventSource delegate) { super(delegate, delegate); this.delegate = delegate; this.factory = (OgmSessionFactoryImpl) factory; } // Overridden methods @Override public SessionFactoryImplementor getFactory() { return factory; } @Override public OgmSessionFactory getSessionFactory() { return factory; } @Override public Criteria createCriteria(Class persistentClass) { // TODO plug the Lucene engine throw new NotSupportedException("OGM-23", "Criteria queries are not supported yet"); } @Override public Criteria createCriteria(Class persistentClass, String alias) { // TODO plug the Lucene engine throw new NotSupportedException("OGM-23", "Criteria queries are not supported yet"); } @Override public Criteria createCriteria(String entityName) { // TODO plug the Lucene engine throw new NotSupportedException("OGM-23", "Criteria queries are not supported yet"); } @Override public Criteria createCriteria(String entityName, String alias) { // TODO plug the Lucene engine throw new NotSupportedException("OGM-23", "Criteria queries are not supported yet"); } @Override public Query createQuery(NamedQueryDefinition namedQueryDefinition) { errorIfClosed(); checkTransactionSynchStatus(); String queryString = namedQueryDefinition.getQueryString(); Query query = createQuery(queryString); query.setComment("named HQL/JP-QL query " + namedQueryDefinition.getName()); query.setFlushMode(namedQueryDefinition.getFlushMode()); return query; } @Override public NoSQLQuery createSQLQuery(String queryString) throws HibernateException { return createNativeQuery(queryString); } @Override public SQLQuery createSQLQuery(NamedSQLQueryDefinition namedQueryDefinition) { return createNativeQuery(namedQueryDefinition.getQuery()); } @Override public NoSQLQuery createNativeQuery(String nativeQuery) { errorIfClosed(); checkTransactionSynchStatus(); return new NoSQLQueryImpl( nativeQuery, this, factory.getQueryPlanCache().getSQLParameterMetadata(nativeQuery)); } @Override public Query createFilter(Object collection, String queryString) throws HibernateException { // TODO plug the Lucene engine throw new NotSupportedException("OGM-24", "filters are not supported yet"); } @Override public Filter enableFilter(String filterName) { throw new NotSupportedException("OGM-25", "filters are not supported yet"); } @Override public Filter getEnabledFilter(String filterName) { return delegate.getEnabledFilter(filterName); } @Override public void disableFilter(String filterName) { throw new NotSupportedException("OGM-25", "filters are not supported yet"); } @Override public void doWork(Work work) throws HibernateException { throw new IllegalStateException("Hibernate OGM does not support SQL Connections hence no Work"); } @Override public <T> T doReturningWork(ReturningWork<T> work) throws HibernateException { return delegate.doReturningWork(work); } @Override public ProcedureCall getNamedProcedureCall(String name) { throw new NotSupportedException("OGM-359", "Stored procedures are not supported yet"); } @Override public ProcedureCall createStoredProcedureCall(String procedureName) { throw new NotSupportedException("OGM-359", "Stored procedures are not supported yet"); } @Override public ProcedureCall createStoredProcedureCall(String procedureName, Class... resultClasses) { throw new NotSupportedException("OGM-359", "Stored procedures are not supported yet"); } @Override public ProcedureCall createStoredProcedureCall( String procedureName, String... resultSetMappings) { throw new NotSupportedException("OGM-359", "Stored procedures are not supported yet"); } // Event Source methods @Override public ActionQueue getActionQueue() { return delegate.getActionQueue(); } @Override public Object instantiate(EntityPersister persister, Serializable id) throws HibernateException { return delegate.instantiate(persister, id); } @Override public void forceFlush(EntityEntry e) throws HibernateException { delegate.forceFlush(e); } @Override public void merge(String entityName, Object object, Map copiedAlready) throws HibernateException { delegate.merge(entityName, object, copiedAlready); } @Override public void persist(String entityName, Object object, Map createdAlready) throws HibernateException { delegate.persist(entityName, object, createdAlready); } @Override public void persistOnFlush(String entityName, Object object, Map copiedAlready) { delegate.persistOnFlush(entityName, object, copiedAlready); } @Override public void refresh(String entityName, Object object, Map refreshedAlready) throws HibernateException { delegate.refresh(entityName, object, refreshedAlready); } @Override public void delete( String entityName, Object child, boolean isCascadeDeleteEnabled, Set transientEntities) { delegate.delete(entityName, child, isCascadeDeleteEnabled, transientEntities); } @Override public JdbcConnectionAccess getJdbcConnectionAccess() { return delegate.getJdbcConnectionAccess(); } @Override public EntityKey generateEntityKey(Serializable id, EntityPersister persister) { return delegate.generateEntityKey(id, persister); } @Override public List<?> listCustomQuery(CustomQuery customQuery, QueryParameters queryParameters) throws HibernateException { errorIfClosed(); checkTransactionSynchStatus(); if (log.isTraceEnabled()) { log.tracev("NoSQL query: {0}", customQuery.getSQL()); } BackendCustomLoader loader = new BackendCustomLoader((BackendCustomQuery<?>) customQuery, getFactory()); autoFlushIfRequired(loader.getQuerySpaces()); return loader.list(getDelegate(), queryParameters); } /** * detect in-memory changes, determine if the changes are to tables named in the query and, if so, * complete execution the flush * * <p>NOTE: Copied as-is from {@link SessionImpl}. We need it here as {@link * #listCustomQuery(CustomQuery, QueryParameters)} needs to be customized (which makes use of auto * flushes) to work with our custom loaders. */ private boolean autoFlushIfRequired(Set<String> querySpaces) throws HibernateException { if (!isTransactionInProgress()) { // do not auto-flush while outside a transaction return false; } AutoFlushEvent event = new AutoFlushEvent(querySpaces, getDelegate()); for (AutoFlushEventListener listener : listeners(EventType.AUTO_FLUSH)) { listener.onAutoFlush(event); } return event.isFlushRequired(); } private <T> Iterable<T> listeners(EventType<T> type) { return eventListenerGroup(type).listeners(); } private <T> EventListenerGroup<T> eventListenerGroup(EventType<T> type) { return factory .getServiceRegistry() .getService(EventListenerRegistry.class) .getEventListenerGroup(type); } @Override public ScrollableResults scrollCustomQuery( CustomQuery customQuery, QueryParameters queryParameters) throws HibernateException { return delegate.scrollCustomQuery(customQuery, queryParameters); } @Override public List<?> list(NativeSQLQuerySpecification spec, QueryParameters queryParameters) throws HibernateException { return listCustomQuery( factory.getQueryPlanCache().getNativeSQLQueryPlan(spec).getCustomQuery(), queryParameters); } @Override public ScrollableResults scroll(NativeSQLQuerySpecification spec, QueryParameters queryParameters) throws HibernateException { return delegate.scroll(spec, queryParameters); } // SessionImplementor methods @Override public Query getNamedQuery(String name) { errorIfClosed(); checkTransactionSynchStatus(); NamedQueryDefinition namedQuery = factory.getNamedQuery(name); // ORM looks for native queries when no HQL definition is found, we do the same here. if (namedQuery == null) { return getNamedSQLQuery(name); } return createQuery(namedQuery); } @Override public Query getNamedSQLQuery(String queryName) { errorIfClosed(); checkTransactionSynchStatus(); NamedSQLQueryDefinition nsqlqd = findNamedNativeQuery(queryName); Query query = new NoSQLQueryImpl( nsqlqd, this, factory.getQueryPlanCache().getSQLParameterMetadata(nsqlqd.getQuery())); query.setComment("named native query " + queryName); return query; } private NamedSQLQueryDefinition findNamedNativeQuery(String queryName) { NamedSQLQueryDefinition nsqlqd = factory.getNamedSQLQuery(queryName); if (nsqlqd == null) { throw new MappingException("Named native query not found: " + queryName); } return nsqlqd; } @Override public SharedSessionBuilder sessionWithOptions() { return new OgmSharedSessionBuilderDelegator(delegate.sessionWithOptions(), factory); } // Copied from org.hibernate.internal.AbstractSessionImpl.errorIfClosed() to mimic same behaviour protected void errorIfClosed() { if (delegate.isClosed()) { throw new SessionException("Session is closed!"); } } // Copied from org.hibernate.internal.SessionImpl.checkTransactionSynchStatus() to mimic same // behaviour private void checkTransactionSynchStatus() { pulseTransactionCoordinator(); delayedAfterCompletion(); } // Copied from org.hibernate.internal.SessionImpl.pulseTransactionCoordinator() to mimic same // behaviour private void pulseTransactionCoordinator() { if (!isClosed()) { delegate.getTransactionCoordinator().pulse(); } } // Copied from org.hibernate.internal.SessionImpl.delayedAfterCompletion() to mimic same behaviour private void delayedAfterCompletion() { if (delegate.getTransactionCoordinator() instanceof JtaTransactionCoordinatorImpl) { ((JtaTransactionCoordinatorImpl) delegate.getTransactionCoordinator()) .getSynchronizationCallbackCoordinator() .processAnyDelayedAfterCompletion(); } } public <G extends GlobalContext<?, ?>, D extends DatastoreConfiguration<G>> G configureDatastore( Class<D> datastoreType) { throw new UnsupportedOperationException( "OGM-343 Session specific options are not currently supported"); } @Override public void removeOrphanBeforeUpdates(String entityName, Object child) { delegate.removeOrphanBeforeUpdates(entityName, child); } /** * Returns the underlying ORM session to which most work is delegated. * * @return the underlying session */ public EventSource getDelegate() { return delegate; } @Override public NaturalIdLoadAccess byNaturalId(Class entityClass) { throw new UnsupportedOperationException("OGM-589 - Natural id look-ups are not yet supported"); } @Override public NaturalIdLoadAccess byNaturalId(String entityName) { throw new UnsupportedOperationException("OGM-589 - Natural id look-ups are not yet supported"); } @Override public SimpleNaturalIdLoadAccess bySimpleNaturalId(Class entityClass) { throw new UnsupportedOperationException("OGM-589 - Natural id look-ups are not yet supported"); } @Override public SimpleNaturalIdLoadAccess bySimpleNaturalId(String entityName) { throw new UnsupportedOperationException("OGM-589 - Natural id look-ups are not yet supported"); } }