/** Method to delete the object from the datastore. */
  protected void internalDeletePersistent() {
    if (isDeleting()) {
      throw new NucleusUserException(Localiser.msg("026008"));
    }

    activity = ActivityState.DELETING;
    try {
      if (dirty) {
        clearDirtyFlags();

        // Clear the PM's knowledge of our being dirty. This calls flush() which does nothing
        myEC.flushInternal(false);
      }

      myEC.getNucleusContext();
      if (!isEmbedded()) {
        // Nothing to delete if embedded
        getStoreManager().getPersistenceHandler().deleteObject(this);
      }

      preDeleteLoadedFields = null;
    } finally {
      activity = ActivityState.NONE;
    }
  }
  /**
   * Convenience method to load fields from the datastore. Note that if the fieldNumbers is
   * null/empty we still should call the persistence handler since it may mean that the version
   * field needs loading.
   *
   * @param fieldNumbers The field numbers.
   */
  protected void loadFieldsFromDatastore(int[] fieldNumbers) {
    if (myLC.isNew() && myLC.isPersistent() && !isFlushedNew()) {
      // Not yet flushed new persistent object to datastore so no point in "loading"
      return;
    }

    if ((flags & FLAG_NEED_INHERITANCE_VALIDATION)
        != 0) // TODO Merge this into fetch object handler
    {
      String className =
          getStoreManager().getClassNameForObjectID(myID, myEC.getClassLoaderResolver(), myEC);
      if (!getObject().getClass().getName().equals(className)) {
        myEC.removeObjectFromLevel1Cache(myID);
        myEC.removeObjectFromLevel2Cache(myID);
        throw new NucleusObjectNotFoundException(
            "Object with id "
                + myID
                + " was created without validating of type "
                + getObject().getClass().getName()
                + " but is actually of type "
                + className);
      }
      flags &= ~FLAG_NEED_INHERITANCE_VALIDATION;
    }

    // TODO If the field has "loadFetchGroup" defined, then add it to the fetch plan etc
    getStoreManager().getPersistenceHandler().fetchObject(this, fieldNumbers);
  }
  /**
   * Method to check if an element is already persistent, or is managed by a different
   * ExecutionContext. If not persistent, this will persist it.
   *
   * @param ec execution context
   * @param element The element
   * @param fieldValues any initial field values to use if persisting the element
   * @return Whether the element was persisted during this call
   */
  protected boolean validateElementForWriting(
      ExecutionContext ec, Object element, FieldValues fieldValues) {
    // TODO Pass in cascade flag and if element not present then throw exception
    // Check the element type for this collection
    if (!elementIsPersistentInterface
        && !validateElementType(ec.getClassLoaderResolver(), element)) {
      throw new ClassCastException(
          Localiser.msg(
              "056033",
              element.getClass().getName(),
              ownerMemberMetaData.getFullFieldName(),
              elementType));
    }

    boolean persisted = false;
    if (elementsAreEmbedded || elementsAreSerialised) {
      // Element is embedded/serialised so has no id
    } else {
      ObjectProvider elementSM = ec.findObjectProvider(element);
      if (elementSM != null && elementSM.isEmbedded()) {
        // Element is already with ObjectProvider and is embedded in another field!
        throw new NucleusUserException(
            Localiser.msg("056028", ownerMemberMetaData.getFullFieldName(), element));
      }

      persisted = SCOUtils.validateObjectForWriting(ec, element, fieldValues);
    }
    return persisted;
  }
Exemple #4
0
  /**
   * Constructor, using the ObjectProvider of the "owner" and the field name.
   *
   * @param op The owner ObjectProvider
   * @param mmd Metadata for the member
   */
  public Map(ObjectProvider op, AbstractMemberMetaData mmd) {
    super(op, mmd);

    // Set up our "delegate"
    this.delegate = new java.util.HashMap();

    ExecutionContext ec = ownerOP.getExecutionContext();
    allowNulls = SCOUtils.allowNullsInContainer(allowNulls, mmd);
    useCache = SCOUtils.useContainerCache(ownerOP, mmd);

    if (!SCOUtils.mapHasSerialisedKeysAndValues(mmd)
        && mmd.getPersistenceModifier() == FieldPersistenceModifier.PERSISTENT) {
      ClassLoaderResolver clr = ec.getClassLoaderResolver();
      this.backingStore =
          (MapStore)
              ((BackedSCOStoreManager) ownerOP.getStoreManager())
                  .getBackingStoreForField(clr, mmd, java.util.Map.class);
    }

    if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
      NucleusLogger.PERSISTENCE.debug(
          SCOUtils.getContainerInfoMessage(
              ownerOP,
              ownerMmd.getName(),
              this,
              useCache,
              allowNulls,
              SCOUtils.useCachedLazyLoading(ownerOP, ownerMmd)));
    }
  }
  /**
   * Constructor, using the ObjectProvider of the "owner" and the field name.
   *
   * @param op The owner ObjectProvider
   * @param mmd Metadata for the member
   */
  public ArrayList(ObjectProvider op, AbstractMemberMetaData mmd) {
    super(op, mmd);

    // Set up our delegate
    this.delegate = new java.util.ArrayList();

    ExecutionContext ec = op.getExecutionContext();
    allowNulls = SCOUtils.allowNullsInContainer(allowNulls, mmd);
    queued = ec.isDelayDatastoreOperationsEnabled();
    useCache = SCOUtils.useContainerCache(op, mmd);

    if (!SCOUtils.collectionHasSerialisedElements(mmd)
        && mmd.getPersistenceModifier() == FieldPersistenceModifier.PERSISTENT) {
      ClassLoaderResolver clr = ec.getClassLoaderResolver();
      this.backingStore =
          (ListStore)
              ((BackedSCOStoreManager) op.getStoreManager())
                  .getBackingStoreForField(clr, mmd, java.util.ArrayList.class);
    }

    if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
      NucleusLogger.PERSISTENCE.debug(
          SCOUtils.getContainerInfoMessage(
              op,
              ownerMmd.getName(),
              this,
              useCache,
              queued,
              allowNulls,
              SCOUtils.useCachedLazyLoading(op, ownerMmd)));
    }
  }
  @Override
  public NucleusConnection getNucleusConnection(ExecutionContext ec) {
    ConnectionFactory cf = connectionMgr.lookupConnectionFactory(primaryConnectionFactoryName);

    final ManagedConnection mc;
    final boolean enlisted;
    enlisted = ec.getTransaction().isActive();
    mc =
        cf.getConnection(
            enlisted ? ec : null,
            ec.getTransaction(),
            null); // Will throw exception if already locked

    // Lock the connection now that it is in use by the user
    mc.lock();

    Runnable closeRunnable =
        new Runnable() {
          public void run() {
            // Unlock the connection now that the user has finished with it
            mc.unlock();
            if (!enlisted) {
              // TODO Anything to do here?
            }
          }
        };
    return new NucleusConnectionImpl(mc.getConnection(), closeRunnable);
  }
 public FieldManager getFieldManagerForResultProcessing(
     ObjectProvider op, Object resultSet, StatementClassMapping resultMappings) {
   ExecutionContext ec = op.getExecutionContext();
   Class<?> cls =
       ec.getClassLoaderResolver().classForName(op.getClassMetaData().getFullClassName());
   Object internalKey = EntityUtils.idToInternalKey(ec, cls, resultSet, true);
   // Need to provide this to the field manager in the form of the pk
   // of the type: Key, Long, encoded String, or unencoded String
   return new KeyOnlyFieldManager(internalKey);
 }
  public Object getObject(ExecutionContext ec, ResultSet resultSet, int[] exprIndex) {
    if (exprIndex == null) {
      return null;
    }

    Object datastoreValue = getDatastoreMapping(0).getObject(resultSet, exprIndex[0]);
    if (datastoreValue == null) {
      return null;
    }

    if (datastoreValue instanceof String) {
      TypeConverter conv =
          ec.getNucleusContext()
              .getTypeManager()
              .getTypeConverterForType(LocalTime.class, String.class);
      if (conv != null) {
        return conv.toMemberType(datastoreValue);
      } else {
        throw new NucleusUserException("This type doesn't support persistence as a String");
      }
    } else if (datastoreValue instanceof Time) {
      Time time = (Time) datastoreValue;
      Calendar cal = Calendar.getInstance();
      cal.setTime(time);
      LocalTime localTime =
          LocalTime.of(
              cal.get(Calendar.HOUR_OF_DAY),
              cal.get(Calendar.MINUTE),
              cal.get(Calendar.SECOND),
              cal.get(Calendar.MILLISECOND) * 1000000);
      return localTime;
    } else {
      return null;
    }
  }
  /**
   * Method to find an object value generator based on its name. Caches the generators once
   * generated.
   *
   * @param ec ExecutionContext
   * @param genName The generator name
   * @return The value generator (if any)
   * @throws NucleusException if no generator of that name is found
   */
  protected static ObjectValueGenerator getObjectValueGenerator(
      ExecutionContext ec, String genName) {
    if (!objectValGenerators.isEmpty()) {
      ObjectValueGenerator valGen = objectValGenerators.get(genName);
      if (valGen != null) {
        return valGen;
      }
    }

    try {
      ObjectValueGenerator valGen =
          (ObjectValueGenerator)
              ec.getNucleusContext()
                  .getPluginManager()
                  .createExecutableExtension(
                      "org.datanucleus.store_objectvaluegenerator",
                      new String[] {"name"},
                      new String[] {genName},
                      "class-name",
                      null,
                      null);
      objectValGenerators.put(genName, valGen);
      return valGen;
    } catch (Exception e) {
      NucleusLogger.VALUEGENERATION.info(
          "Exception thrown generating value using objectvaluegenerator " + genName, e);
      throw new NucleusException("Exception thrown generating value for object", e);
    }
  }
  /**
   * Method to check if an element is already persistent or is persistent but managed by a different
   * ExecutionContext.
   *
   * @param op The ObjectProvider of this owner
   * @param element The element
   * @return Whether it is valid for reading.
   */
  protected boolean validateElementForReading(ObjectProvider op, Object element) {
    if (!validateElementType(op.getExecutionContext().getClassLoaderResolver(), element)) {
      return false;
    }

    if (element != null && !elementsAreEmbedded && !elementsAreSerialised) {
      ExecutionContext ec = op.getExecutionContext();
      if ((!ec.getApiAdapter().isPersistent(element)
              || ec != ec.getApiAdapter().getExecutionContext(element))
          && !ec.getApiAdapter().isDetached(element)) {
        return false;
      }
    }

    return true;
  }
  /**
   * Convenience method to retrieve field values from an L2 cached object if they are loaded in that
   * object. If the object is not in the L2 cache then just returns, and similarly if the required
   * fields aren't available.
   *
   * @param fieldNumbers Numbers of fields to load from the L2 cache
   * @return The fields that couldn't be loaded
   */
  protected int[] loadFieldsFromLevel2Cache(int[] fieldNumbers) {
    // Only continue if there are fields, and not being deleted/flushed etc
    if (fieldNumbers == null
        || fieldNumbers.length == 0
        || myEC.isFlushing()
        || myLC.isDeleted()
        || isDeleting()
        || getExecutionContext().getTransaction().isCommitting()) {
      return fieldNumbers;
    }
    // TODO Drop this check when we're confident that this doesn't affect some use-cases
    if (!myEC.getNucleusContext()
        .getConfiguration()
        .getBooleanProperty(PropertyNames.PROPERTY_CACHE_L2_LOADFIELDS, true)) {
      return fieldNumbers;
    }

    Level2Cache l2cache = myEC.getNucleusContext().getLevel2Cache();
    if (l2cache != null && myEC.getNucleusContext().isClassCacheable(cmd)) {
      CachedPC<T> cachedPC = l2cache.get(myID);
      if (cachedPC != null) {
        int[] cacheFieldsToLoad =
            ClassUtils.getFlagsSetTo(cachedPC.getLoadedFields(), fieldNumbers, true);
        if (cacheFieldsToLoad != null && cacheFieldsToLoad.length > 0) {
          if (NucleusLogger.CACHE.isDebugEnabled()) {
            NucleusLogger.CACHE.debug(
                Localiser.msg(
                    "026034",
                    StringUtils.toJVMIDString(getObject()),
                    myID,
                    StringUtils.intArrayToString(cacheFieldsToLoad)));
          }

          L2CacheRetrieveFieldManager l2RetFM = new L2CacheRetrieveFieldManager(this, cachedPC);
          this.replaceFields(cacheFieldsToLoad, l2RetFM);
          int[] fieldsNotLoaded = l2RetFM.getFieldsNotLoaded();
          if (fieldsNotLoaded != null) {
            for (int i = 0; i < fieldsNotLoaded.length; i++) {
              loadedFields[fieldsNotLoaded[i]] = false;
            }
          }
        }
      }
    }

    return ClassUtils.getFlagsSetTo(loadedFields, fieldNumbers, false);
  }
  /**
   * Method to execute a PreparedStatement query, and return the ResultSet. Prints logging
   * information about timings.
   *
   * @param conn The connection (required since the one on PreparedStatement is not always the same
   *     so we can't use it)
   * @param stmt The statement text
   * @param ps The Prepared Statement
   * @return The ResultSet from the query
   * @throws SQLException Thrown if an error occurs
   */
  public ResultSet executeStatementQuery(
      ExecutionContext ec, ManagedConnection conn, String stmt, PreparedStatement ps)
      throws SQLException {
    if (supportsBatching) {
      ConnectionStatementState state = getConnectionStatementState(conn);
      if (state != null) {
        if (state.processable) {
          // Current batch statement is processable now so lets just process it before processing
          // our query
          processConnectionStatement(conn);
        } else {
          // Current wait statement is not processable now so leave it in wait state
          if (NucleusLogger.DATASTORE_RETRIEVE.isDebugEnabled()) {
            NucleusLogger.DATASTORE_RETRIEVE.debug(LOCALISER.msg("052106", state.stmtText, stmt));
          }
        }
      }
    }

    // Execute this query
    long startTime = System.currentTimeMillis();
    if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) {
      if (ps instanceof ParamLoggingPreparedStatement) {
        NucleusLogger.DATASTORE_NATIVE.debug(
            ((ParamLoggingPreparedStatement) ps).getStatementWithParamsReplaced());
      } else {
        NucleusLogger.DATASTORE_NATIVE.debug(stmt);
      }
    }

    ResultSet rs = ps.executeQuery();
    if (ec != null && ec.getStatistics() != null) {
      // Add to statistics
      ec.getStatistics().incrementNumReads();
    }

    ps.clearBatch();
    if (NucleusLogger.DATASTORE_RETRIEVE.isDebugEnabled()) {
      NucleusLogger.DATASTORE_RETRIEVE.debug(
          LOCALISER.msg("045000", (System.currentTimeMillis() - startTime)));
    }

    return rs;
  }
  /**
   * Convenience method to update a Level2 cached version of this object if cacheable and has not
   * been modified during this transaction.
   *
   * @param fieldNumbers Numbers of fields to update in L2 cached object
   */
  protected void updateLevel2CacheForFields(int[] fieldNumbers) {
    String updateMode = (String) myEC.getProperty(PropertyNames.PROPERTY_CACHE_L2_UPDATE_MODE);
    if (updateMode != null && updateMode.equalsIgnoreCase("commit-only")) {
      return;
    }
    if (fieldNumbers == null || fieldNumbers.length == 0) {
      return;
    }

    Level2Cache l2cache = myEC.getNucleusContext().getLevel2Cache();
    if (l2cache != null
        && myEC.getNucleusContext().isClassCacheable(cmd)
        && !myEC.isObjectModifiedInTransaction(myID)) {
      CachedPC<T> cachedPC = l2cache.get(myID);
      if (cachedPC != null) {
        // This originally just updated the L2 cache for fields where the L2 cache didn't have a
        // value for that field, like this
        /*
        int[] cacheFieldsToLoad = ClassUtils.getFlagsSetTo(cachedPC.getLoadedFields(), fieldNumbers, false);
        if (cacheFieldsToLoad == null || cacheFieldsToLoad.length == 0)
        {
            return;
        }
        */
        int[] cacheFieldsToLoad = fieldNumbers;
        CachedPC copyCachedPC = cachedPC.getCopy();
        if (NucleusLogger.CACHE.isDebugEnabled()) {
          NucleusLogger.CACHE.debug(
              Localiser.msg(
                  "026033",
                  StringUtils.toJVMIDString(getObject()),
                  myID,
                  StringUtils.intArrayToString(cacheFieldsToLoad)));
        }

        provideFields(cacheFieldsToLoad, new L2CachePopulateFieldManager(this, copyCachedPC));

        // Replace the current L2 cached object with this one
        myEC.getNucleusContext().getLevel2Cache().put(getInternalObjectId(), copyCachedPC);
      }
    }
  }
  /** Method to change the object state to write-field. */
  protected void transitionWriteField() {
    try {
      if (myEC.getMultithreaded()) {
        myEC.getLock().lock();
        lock.lock();
      }

      preStateChange();
      try {
        myLC = myLC.transitionWriteField(this);
      } finally {
        postStateChange();
      }
    } finally {
      if (myEC.getMultithreaded()) {
        lock.unlock();
        myEC.getLock().unlock();
      }
    }
  }
  /**
   * Method to execute a PreparedStatement (using PreparedStatement.execute()). Prints logging
   * information about timings.
   *
   * @param ec Execution Context
   * @param conn The connection (required since the one on PreparedStatement is not always the same
   *     so we can't use it)
   * @param stmt The statement text
   * @param ps The Prepared Statement
   * @return The numer of rows affected (as per PreparedStatement.execute)
   * @throws SQLException Thrown if an error occurs
   */
  public boolean executeStatement(
      ExecutionContext ec, ManagedConnection conn, String stmt, PreparedStatement ps)
      throws SQLException {
    if (supportsBatching) {
      // Check for a waiting batched statement that is ready for processing
      ConnectionStatementState state = getConnectionStatementState(conn);
      if (state != null && state.processable) {
        // Process the batch statement before returning our new query statement
        processConnectionStatement(conn);
      }
    }

    // Process the normal execute statement
    long startTime = System.currentTimeMillis();
    if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) {
      if (ps instanceof ParamLoggingPreparedStatement) {
        NucleusLogger.DATASTORE_NATIVE.debug(
            ((ParamLoggingPreparedStatement) ps).getStatementWithParamsReplaced());
      } else {
        NucleusLogger.DATASTORE_NATIVE.debug(stmt);
      }
    }

    boolean flag = ps.execute();
    if (ec != null && ec.getStatistics() != null) {
      // Add to statistics
      ec.getStatistics().incrementNumWrites();
    }

    ps.clearBatch();
    if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
      NucleusLogger.DATASTORE_PERSIST.debug(
          LOCALISER.msg(
              "045002",
              "" + (System.currentTimeMillis() - startTime),
              StringUtils.toJVMIDString(ps)));
    }

    return flag;
  }
 private static void addColumnsToScanForEmbeddedMember(
     Scan scan, List<AbstractMemberMetaData> embMmds, Table table, ExecutionContext ec) {
   AbstractMemberMetaData lastMmd = embMmds.get(embMmds.size() - 1);
   ClassLoaderResolver clr = ec.getClassLoaderResolver();
   AbstractClassMetaData embCmd =
       ec.getMetaDataManager().getMetaDataForClass(lastMmd.getTypeName(), clr);
   int[] embMmdPosns = embCmd.getAllMemberPositions();
   for (int i = 0; i < embMmdPosns.length; i++) {
     AbstractMemberMetaData embMmd = embCmd.getMetaDataForManagedMemberAtAbsolutePosition(i);
     List<AbstractMemberMetaData> subEmbMmds = new ArrayList<AbstractMemberMetaData>(embMmds);
     subEmbMmds.add(embMmd);
     RelationType relationType = embMmd.getRelationType(clr);
     MemberColumnMapping mapping = table.getMemberColumnMappingForEmbeddedMember(subEmbMmds);
     if (RelationType.isRelationSingleValued(relationType)) {
       addColumnsToScanForEmbeddedMember(scan, subEmbMmds, table, ec);
     } else {
       String familyName = HBaseUtils.getFamilyNameForColumn(mapping.getColumn(0));
       String qualifName = HBaseUtils.getQualifierNameForColumn(mapping.getColumn(0));
       scan.addColumn(familyName.getBytes(), qualifName.getBytes());
     }
   }
 }
  /** Method to change the object state to evicted. */
  public void evict() {
    if (myLC != myEC.getNucleusContext().getApiAdapter().getLifeCycleState(LifeCycleState.P_CLEAN)
        && myLC
            != myEC.getNucleusContext()
                .getApiAdapter()
                .getLifeCycleState(LifeCycleState.P_NONTRANS)) {
      return;
    }

    preStateChange();
    try {
      try {
        getCallbackHandler().preClear(getObject());

        getCallbackHandler().postClear(getObject());
      } finally {
        myLC = myLC.transitionEvict(this);
      }
    } finally {
      postStateChange();
    }
  }
 /**
  * This method is invoked just before a rollback is performed in a Transaction involving the
  * persistable object managed by this StateManager.
  *
  * @param tx The transaction
  */
 public void preRollback(org.datanucleus.Transaction tx) {
   preStateChange();
   try {
     myEC.clearDirty(this);
     myLC = myLC.transitionRollback(this, tx);
     if (transactionalVersion != myVersion) {
       transactionalVersion = myVersion;
     }
     this.lockMode = LockManager.LOCK_MODE_NONE;
   } finally {
     postStateChange();
   }
 }
  /**
   * Method to change the object state to read-field.
   *
   * @param isLoaded if the field was previously loaded
   */
  protected void transitionReadField(boolean isLoaded) {
    try {
      if (myEC.getMultithreaded()) {
        myEC.getLock().lock();
        lock.lock();
      }

      if (myLC == null) {
        return;
      }
      preStateChange();
      try {
        myLC = myLC.transitionReadField(this, isLoaded);
      } finally {
        postStateChange();
      }
    } finally {
      if (myEC.getMultithreaded()) {
        lock.unlock();
        myEC.getLock().unlock();
      }
    }
  }
 // TODO Remove this when we support subclasses from a query
 @Override
 public Extent getExtent(ExecutionContext ec, Class c, boolean subclasses) {
   AbstractClassMetaData cmd =
       getMetaDataManager().getMetaDataForClass(c, ec.getClassLoaderResolver());
   validateMetaDataForClass(cmd);
   if (!cmd.isRequiresExtent()) {
     throw new NoExtentException(c.getName());
   }
   if (!getBooleanProperty(GET_EXTENT_CAN_RETURN_SUBCLASSES_PROPERTY, false)) {
     subclasses = false;
   }
   // In order to avoid breaking existing apps I'm hard-coding subclasses to
   // be false.  This breaks spec compliance since the no-arg overload of
   // PersistenceManager.getExtent() is supposed to return subclasses.
   return new DefaultCandidateExtent(ec, c, subclasses, cmd);
 }
 /** Method to (re)connect the provider to the specified ExecutionContext and object type. */
 public void connect(ExecutionContext ec, AbstractClassMetaData cmd) {
   int fieldCount = cmd.getMemberCount();
   this.cmd = cmd;
   this.dirtyFields = new boolean[fieldCount];
   this.loadedFields = new boolean[fieldCount];
   this.dirty = false;
   this.myEC = ec;
   this.myFP = myEC.getFetchPlan().getFetchPlanForClass(cmd);
   this.lock = new ReentrantLock();
   this.lockMode = LockManager.LOCK_MODE_NONE;
   this.savedFlags = 0;
   this.savedLoadedFields = null;
   this.objectType = 0;
   this.activity = ActivityState.NONE;
   this.myVersion = null;
   this.transactionalVersion = null;
   this.persistenceFlags = 0;
 }
  /**
   * Convenience method that takes a result set that contains a discriminator column and returns the
   * class name that it represents.
   *
   * @param discrimMapping Mapping for the discriminator column
   * @param dismd Metadata for the discriminator
   * @param rs The result set
   * @param ec execution context
   * @return The class name for the object represented in the current row
   */
  public static String getClassNameFromDiscriminatorResultSetRow(
      JavaTypeMapping discrimMapping,
      DiscriminatorMetaData dismd,
      ResultSet rs,
      ExecutionContext ec) {
    String rowClassName = null;

    if (discrimMapping != null && dismd.getStrategy() != DiscriminatorStrategy.NONE) {
      try {
        String discriminatorColName =
            discrimMapping.getDatastoreMapping(0).getColumn().getIdentifier().getIdentifierName();
        String discriminatorValue = rs.getString(discriminatorColName);
        rowClassName =
            ec.getMetaDataManager().getClassNameFromDiscriminatorValue(discriminatorValue, dismd);
      } catch (SQLException e) {
        // discriminator column doesn't exist with this name
      }
    }
    return rowClassName;
  }
 public void setObject(ExecutionContext ec, PreparedStatement ps, int[] exprIndex, Object value) {
   if (value == null) {
     getDatastoreMapping(0).setObject(ps, exprIndex[0], null);
   } else if (datastoreMappings != null
       && datastoreMappings.length > 0
       && datastoreMappings[0].isStringBased()) {
     TypeConverter conv =
         ec.getNucleusContext()
             .getTypeManager()
             .getTypeConverterForType(LocalTime.class, String.class);
     if (conv != null) {
       Object obj = conv.toDatastoreType(value);
       getDatastoreMapping(0).setObject(ps, exprIndex[0], obj);
     } else {
       throw new NucleusUserException("This type doesn't support persistence as a String");
     }
   } else {
     LocalTime val = (LocalTime) value;
     Calendar cal = Calendar.getInstance();
     cal.set(0, 0, 0, val.getHour(), val.getMinute(), val.getSecond());
     getDatastoreMapping(0).setObject(ps, exprIndex[0], cal);
   }
 }
  /*
   * (non-Javadoc)
   * @see org.datanucleus.store.mapping.JavaTypeMapping#getObject(org.datanucleus.ExecutionContext, java.lang.Object, int[])
   */
  public Object getObject(ExecutionContext ec, ResultSet resultSet, int[] exprIndex) {
    try {
      // Check for null entries
      if (getDatastoreMapping(0).getObject(resultSet, exprIndex[0]) == null) {
        return null;
      }
    } catch (Exception e) {
      // Do nothing
    }

    if (singleColumn) {
      Timestamp ts = (Timestamp) getDatastoreMapping(0).getObject(resultSet, exprIndex[0]);
      GregorianCalendar cal = new GregorianCalendar();
      cal.setTimeInMillis(ts.getTime());

      String timezoneID =
          ec.getNucleusContext()
              .getPersistenceConfiguration()
              .getStringProperty("datanucleus.ServerTimeZoneID");
      if (timezoneID != null) {
        // Apply server timezone ID since we dont know what it was upon persistence
        cal.setTimeZone(TimeZone.getTimeZone(timezoneID));
      }
      return cal;
    } else {
      // (Timestamp millisecs, Timezone) implementation
      long millisecs = getDatastoreMapping(0).getLong(resultSet, exprIndex[0]);

      GregorianCalendar cal = new GregorianCalendar();
      cal.setTime(new Date(millisecs));
      String timezoneId = getDatastoreMapping(1).getString(resultSet, exprIndex[1]);
      if (timezoneId != null) {
        cal.setTimeZone(TimeZone.getTimeZone(timezoneId));
      }
      return cal;
    }
  }
 /**
  * Accessor for the current DatastoreService for this ExecutionContext. Each PM/EM has its own
  * DatastoreService.
  *
  * @param ec ExecutionContext
  * @return The DatastoreService
  */
 public DatastoreService getDatastoreService(ExecutionContext ec) {
   ManagedConnection mconn = ec.getStoreManager().getConnection(ec);
   return ((EmulatedXAResource) mconn.getXAResource()).getDatastoreService();
 }
 /**
  * Accessor for the current DatastoreTransaction for this ExecutionContext. Each PM/EM has its own
  * DatastoreService, and consequently can have a current DatastoreTransaction.
  *
  * @param ec ExecutionContext
  * @return The DatastoreTransaction if active, or null
  */
 public DatastoreTransaction getDatastoreTransaction(ExecutionContext ec) {
   ManagedConnection mconn = ec.getStoreManager().getConnection(ec);
   return ((EmulatedXAResource) mconn.getXAResource()).getCurrentTransaction();
 }
 public boolean useBackedSCOWrapperForMember(AbstractMemberMetaData mmd, ExecutionContext ec) {
   // Use backed SCO wrapper on relation field (to support legacy), and use simple SCO wrapper on
   // others
   return (mmd.getRelationType(ec.getClassLoaderResolver()) == RelationType.NONE ? false : true);
 }
Exemple #28
0
  /**
   * Method to initialise the SCO from an existing value.
   *
   * @param m The object to set from
   * @param forInsert Whether the object needs inserting in the datastore with this value
   * @param forUpdate Whether to update the datastore with this value
   */
  public synchronized void initialise(java.util.Map m, boolean forInsert, boolean forUpdate) {
    if (m != null) {
      // Check for the case of serialised maps, and assign ObjectProviders to any PC keys/values
      // without
      if (SCOUtils.mapHasSerialisedKeysAndValues(ownerMmd)
          && (ownerMmd.getMap().keyIsPersistent() || ownerMmd.getMap().valueIsPersistent())) {
        ExecutionContext ec = ownerOP.getExecutionContext();
        Iterator iter = m.entrySet().iterator();
        while (iter.hasNext()) {
          Map.Entry entry = (Map.Entry) iter.next();
          Object key = entry.getKey();
          Object value = entry.getValue();
          if (ownerMmd.getMap().keyIsPersistent()) {
            ObjectProvider objSM = ec.findObjectProvider(key);
            if (objSM == null) {
              objSM =
                  ec.getNucleusContext()
                      .getObjectProviderFactory()
                      .newForEmbedded(ec, key, false, ownerOP, ownerMmd.getAbsoluteFieldNumber());
            }
          }
          if (ownerMmd.getMap().valueIsPersistent()) {
            ObjectProvider objSM = ec.findObjectProvider(value);
            if (objSM == null) {
              objSM =
                  ec.getNucleusContext()
                      .getObjectProviderFactory()
                      .newForEmbedded(ec, value, false, ownerOP, ownerMmd.getAbsoluteFieldNumber());
            }
          }
        }
      }

      if (backingStore != null && useCache && !isCacheLoaded) {
        // Mark as loaded
        isCacheLoaded = true;
      }

      if (forInsert) {
        if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
          NucleusLogger.PERSISTENCE.debug(
              Localiser.msg(
                  "023007", ownerOP.getObjectAsPrintable(), ownerMmd.getName(), "" + m.size()));
        }

        makeDirty();
        if (useCache) {
          // Make sure we have all values loaded (e.g if in optimistic tx and we put new entry)
          loadFromStore();
        }
        if (backingStore != null) {
          if (SCOUtils.useQueuedUpdate(ownerOP)) {
            Iterator iter = m.entrySet().iterator();
            while (iter.hasNext()) {
              Map.Entry entry = (Map.Entry) iter.next();
              ownerOP
                  .getExecutionContext()
                  .addOperationToQueue(
                      new MapPutOperation(ownerOP, backingStore, entry.getKey(), entry.getValue()));
            }
          } else {
            backingStore.putAll(ownerOP, m);
          }
        }
        delegate.putAll(m);
      } else if (forUpdate) {
        if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
          NucleusLogger.PERSISTENCE.debug(
              Localiser.msg(
                  "023008", ownerOP.getObjectAsPrintable(), ownerMmd.getName(), "" + m.size()));
        }

        // TODO This is clear+putAll. Improve it to work out what is changed
        delegate.clear();
        if (backingStore != null) {
          if (SCOUtils.useQueuedUpdate(ownerOP)) {
            // If not yet flushed to store then no need to add to queue (since will be handled via
            // insert)
            if (ownerOP.isFlushedToDatastore() || !ownerOP.getLifecycleState().isNew()) {
              ownerOP
                  .getExecutionContext()
                  .addOperationToQueue(new MapClearOperation(ownerOP, backingStore));
            }
          } else {
            backingStore.clear(ownerOP);
          }
        }
        if (useCache) {
          // Make sure we have all values loaded (e.g if in optimistic tx and we put new entry)
          loadFromStore();
        }

        if (backingStore != null) {
          if (SCOUtils.useQueuedUpdate(ownerOP)) {
            // If not yet flushed to store then no need to add to queue (since will be handled via
            // insert)
            if (ownerOP.isFlushedToDatastore() || !ownerOP.getLifecycleState().isNew()) {
              Iterator iter = m.entrySet().iterator();
              while (iter.hasNext()) {
                Map.Entry entry = (Map.Entry) iter.next();
                ownerOP
                    .getExecutionContext()
                    .addOperationToQueue(
                        new MapPutOperation(
                            ownerOP, backingStore, entry.getKey(), entry.getValue()));
              }
            }
          } else {
            backingStore.putAll(ownerOP, m);
          }
        }
        delegate.putAll(m);
        makeDirty();
      } else {
        if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
          NucleusLogger.PERSISTENCE.debug(
              Localiser.msg(
                  "023007", ownerOP.getObjectAsPrintable(), ownerMmd.getName(), "" + m.size()));
        }
        delegate.clear();
        delegate.putAll(m);
      }
    }
  }
  /**
   * Utility that does a discriminator candidate query for the specified candidate and subclasses
   * and returns the class name of the instance that has the specified identity (if any).
   *
   * @param storeMgr RDBMS StoreManager
   * @param ec execution context
   * @param id The id
   * @param cmd Metadata for the root candidate class
   * @return Name of the class with this identity (or null if none found)
   */
  public static String getClassNameForIdUsingDiscriminator(
      RDBMSStoreManager storeMgr, ExecutionContext ec, Object id, AbstractClassMetaData cmd) {
    // Check for input error
    if (cmd == null || id == null) {
      return null;
    }

    SQLExpressionFactory exprFactory = storeMgr.getSQLExpressionFactory();
    ClassLoaderResolver clr = ec.getClassLoaderResolver();
    DatastoreClass primaryTable = storeMgr.getDatastoreClass(cmd.getFullClassName(), clr);

    // Form the query to find which one of these classes has the instance with this id
    DiscriminatorStatementGenerator stmtGen =
        new DiscriminatorStatementGenerator(
            storeMgr, clr, clr.classForName(cmd.getFullClassName()), true, null, null);
    stmtGen.setOption(SelectStatementGenerator.OPTION_RESTRICT_DISCRIM);
    SelectStatement sqlStmt = stmtGen.getStatement();

    // Select the discriminator
    JavaTypeMapping discrimMapping = primaryTable.getDiscriminatorMapping(true);
    SQLTable discrimSqlTbl =
        SQLStatementHelper.getSQLTableForMappingOfTable(
            sqlStmt, sqlStmt.getPrimaryTable(), discrimMapping);
    sqlStmt.select(discrimSqlTbl, discrimMapping, null);

    // Restrict to this id
    JavaTypeMapping idMapping = primaryTable.getIdMapping();
    JavaTypeMapping idParamMapping = new PersistableIdMapping((PersistableMapping) idMapping);
    SQLExpression sqlFldExpr =
        exprFactory.newExpression(sqlStmt, sqlStmt.getPrimaryTable(), idMapping);
    SQLExpression sqlFldVal = exprFactory.newLiteralParameter(sqlStmt, idParamMapping, id, "ID");
    sqlStmt.whereAnd(sqlFldExpr.eq(sqlFldVal), true);

    // Perform the query
    try {
      ManagedConnection mconn = storeMgr.getConnection(ec);
      SQLController sqlControl = storeMgr.getSQLController();
      if (ec.getSerializeReadForClass(cmd.getFullClassName())) {
        sqlStmt.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, true);
      }

      try {
        PreparedStatement ps =
            SQLStatementHelper.getPreparedStatementForSQLStatement(sqlStmt, ec, mconn, null, null);
        String statement = sqlStmt.getSQLText().toSQL();
        try {
          ResultSet rs = sqlControl.executeStatementQuery(ec, mconn, statement, ps);
          try {
            if (rs != null) {
              while (rs.next()) {
                DiscriminatorMetaData dismd = discrimMapping.getTable().getDiscriminatorMetaData();
                return RDBMSQueryUtils.getClassNameFromDiscriminatorResultSetRow(
                    discrimMapping, dismd, rs, ec);
              }
            }
          } finally {
            rs.close();
          }
        } finally {
          sqlControl.closeStatement(mconn, ps);
        }
      } finally {
        mconn.release();
      }
    } catch (SQLException sqe) {
      NucleusLogger.DATASTORE.error("Exception thrown on querying of discriminator for id", sqe);
      throw new NucleusDataStoreException(sqe.toString(), sqe);
    }

    return null;
  }
  /**
   * Utility that does a union candidate query for the specified candidate(s) and subclasses and
   * returns the class name of the instance that has the specified identity (if any).
   *
   * @param storeMgr RDBMS StoreManager
   * @param ec execution context
   * @param id The id
   * @param rootCmds Metadata for the classes at the root
   * @return Name of the class with this identity (or null if none found)
   */
  public static String getClassNameForIdUsingUnion(
      RDBMSStoreManager storeMgr,
      ExecutionContext ec,
      Object id,
      List<AbstractClassMetaData> rootCmds) {
    // Check for input error
    if (rootCmds == null || rootCmds.isEmpty() || id == null) {
      return null;
    }

    SQLExpressionFactory exprFactory = storeMgr.getSQLExpressionFactory();
    ClassLoaderResolver clr = ec.getClassLoaderResolver();

    // Form a query UNIONing all possible root candidates (and their subclasses)
    Iterator<AbstractClassMetaData> rootCmdIter = rootCmds.iterator();

    AbstractClassMetaData sampleCmd =
        null; // Metadata for sample class in the tree so we can check if needs locking
    SelectStatement sqlStmtMain = null;
    while (rootCmdIter.hasNext()) {
      AbstractClassMetaData rootCmd = rootCmdIter.next();
      DatastoreClass rootTbl = storeMgr.getDatastoreClass(rootCmd.getFullClassName(), clr);
      if (rootTbl == null) {
        // Class must be using "subclass-table" (no table of its own) so find where it is
        AbstractClassMetaData[] subcmds = storeMgr.getClassesManagingTableForClass(rootCmd, clr);
        if (subcmds == null || subcmds.length == 0) {
          // No table for this class so ignore
        } else {
          for (int i = 0; i < subcmds.length; i++) {
            UnionStatementGenerator stmtGen =
                new UnionStatementGenerator(
                    storeMgr,
                    clr,
                    clr.classForName(subcmds[i].getFullClassName()),
                    true,
                    null,
                    null);
            stmtGen.setOption(SelectStatementGenerator.OPTION_SELECT_NUCLEUS_TYPE);
            if (sqlStmtMain == null) {
              sampleCmd = subcmds[i];
              sqlStmtMain = stmtGen.getStatement();

              // WHERE (object id) = ?
              JavaTypeMapping idMapping = sqlStmtMain.getPrimaryTable().getTable().getIdMapping();
              JavaTypeMapping idParamMapping =
                  new PersistableIdMapping((PersistableMapping) idMapping);
              SQLExpression fieldExpr =
                  exprFactory.newExpression(sqlStmtMain, sqlStmtMain.getPrimaryTable(), idMapping);
              SQLExpression fieldVal =
                  exprFactory.newLiteralParameter(sqlStmtMain, idParamMapping, id, "ID");
              sqlStmtMain.whereAnd(fieldExpr.eq(fieldVal), true);
            } else {
              SelectStatement sqlStmt = stmtGen.getStatement();

              // WHERE (object id) = ?
              JavaTypeMapping idMapping = sqlStmt.getPrimaryTable().getTable().getIdMapping();
              JavaTypeMapping idParamMapping =
                  new PersistableIdMapping((PersistableMapping) idMapping);
              SQLExpression fieldExpr =
                  exprFactory.newExpression(sqlStmt, sqlStmt.getPrimaryTable(), idMapping);
              SQLExpression fieldVal =
                  exprFactory.newLiteralParameter(sqlStmt, idParamMapping, id, "ID");
              sqlStmt.whereAnd(fieldExpr.eq(fieldVal), true);

              sqlStmtMain.union(sqlStmt);
            }
          }
        }
      } else {
        UnionStatementGenerator stmtGen =
            new UnionStatementGenerator(
                storeMgr, clr, clr.classForName(rootCmd.getFullClassName()), true, null, null);
        stmtGen.setOption(SelectStatementGenerator.OPTION_SELECT_NUCLEUS_TYPE);
        if (sqlStmtMain == null) {
          sampleCmd = rootCmd;
          sqlStmtMain = stmtGen.getStatement();

          // WHERE (object id) = ?
          JavaTypeMapping idMapping = sqlStmtMain.getPrimaryTable().getTable().getIdMapping();
          JavaTypeMapping idParamMapping = new PersistableIdMapping((PersistableMapping) idMapping);
          SQLExpression fieldExpr =
              exprFactory.newExpression(sqlStmtMain, sqlStmtMain.getPrimaryTable(), idMapping);
          SQLExpression fieldVal =
              exprFactory.newLiteralParameter(sqlStmtMain, idParamMapping, id, "ID");
          sqlStmtMain.whereAnd(fieldExpr.eq(fieldVal), true);
        } else {
          SelectStatement sqlStmt = stmtGen.getStatement();

          // WHERE (object id) = ?
          JavaTypeMapping idMapping = sqlStmt.getPrimaryTable().getTable().getIdMapping();
          JavaTypeMapping idParamMapping = new PersistableIdMapping((PersistableMapping) idMapping);
          SQLExpression fieldExpr =
              exprFactory.newExpression(sqlStmt, sqlStmt.getPrimaryTable(), idMapping);
          SQLExpression fieldVal =
              exprFactory.newLiteralParameter(sqlStmt, idParamMapping, id, "ID");
          sqlStmt.whereAnd(fieldExpr.eq(fieldVal), true);

          sqlStmtMain.union(sqlStmt);
        }
      }
    }

    // Perform the query
    try {
      ManagedConnection mconn = storeMgr.getConnection(ec);
      SQLController sqlControl = storeMgr.getSQLController();
      if (ec.getSerializeReadForClass(sampleCmd.getFullClassName())) {
        sqlStmtMain.addExtension(SQLStatement.EXTENSION_LOCK_FOR_UPDATE, true);
      }

      try {
        PreparedStatement ps =
            SQLStatementHelper.getPreparedStatementForSQLStatement(
                sqlStmtMain, ec, mconn, null, null);
        String statement = sqlStmtMain.getSQLText().toSQL();
        try {
          ResultSet rs = sqlControl.executeStatementQuery(ec, mconn, statement, ps);
          try {
            if (rs != null) {
              while (rs.next()) {
                try {
                  return rs.getString(UnionStatementGenerator.NUC_TYPE_COLUMN).trim();
                } catch (SQLException sqle) {
                }
              }
            }
          } finally {
            rs.close();
          }
        } finally {
          sqlControl.closeStatement(mconn, ps);
        }
      } finally {
        mconn.release();
      }
    } catch (SQLException sqe) {
      NucleusLogger.DATASTORE.error("Exception with UNION statement", sqe);
      throw new NucleusDataStoreException(sqe.toString());
    }

    return null;
  }