コード例 #1
0
 /**
  * Method to return a string form of this object for convenience debug.
  *
  * @return The String form
  */
 public String toString() {
   return StringUtils.toJVMIDString(this)
       + " [conn="
       + StringUtils.toJVMIDString(conn)
       + ", commitOnRelease="
       + commitOnRelease
       + ", closeOnRelease="
       + closeOnRelease
       + ", closeOnTxnEnd="
       + closeAfterTransactionEnd()
       + "]";
 }
コード例 #2
0
  /**
   * Utility to take a ResultSet and return a ResultObjectFactory for extracting the results,
   * assuming that no candidate class is supplied. The QueryResult will return either a result class
   * type, or Object/Object[] depending on whether a ResultClass has been defined.
   *
   * @param storeMgr RDBMS StoreManager
   * @param rs The ResultSet
   * @param resultClass Result class if required (or null)
   * @return The query ResultObjectFactory
   */
  public static ResultObjectFactory getResultObjectFactoryForNoCandidateClass(
      RDBMSStoreManager storeMgr, ResultSet rs, Class resultClass) {
    // No candidate class, so use resultClass or Object/Object[]
    Class requiredResultClass = resultClass;
    int numberOfColumns = 0;
    String[] resultFieldNames = null;
    try {
      ResultSetMetaData rsmd = rs.getMetaData();
      numberOfColumns = rsmd.getColumnCount();
      if (requiredResultClass == null) {
        if (numberOfColumns == 1) {
          requiredResultClass = Object.class;
        } else {
          requiredResultClass = Object[].class;
        }
      }

      // Generate names to use for the fields based on the column names
      resultFieldNames = new String[numberOfColumns];
      for (int i = 0; i < numberOfColumns; i++) {
        // Use "label" (i.e SQL alias) if specified, otherwise "name"
        String colName = rsmd.getColumnName(i + 1);
        String colLabel = rsmd.getColumnLabel(i + 1);
        if (StringUtils.isWhitespace(colLabel)) {
          resultFieldNames[i] = colName;
        } else {
          resultFieldNames[i] = colLabel;
        }
      }
    } catch (SQLException sqe) {
      // Do nothing
    }

    return new ResultClassROF(storeMgr, requiredResultClass, resultFieldNames);
  }
コード例 #3
0
 public String toString() {
   return "StateManager[pc="
       + StringUtils.toJVMIDString(getObject())
       + ", lifecycle="
       + myLC
       + "]";
 }
コード例 #4
0
  /**
   * Constructs a Microsoft SQL Server adapter based on the given JDBC metadata.
   *
   * @param metadata the database metadata.
   */
  public MSSQLServerAdapter(DatabaseMetaData metadata) {
    super(metadata);

    reservedKeywords.addAll(StringUtils.convertCommaSeparatedStringToSet(MSSQL_RESERVED_WORDS));

    supportedOptions.add(IDENTITY_COLUMNS);
    supportedOptions.add(LOCK_OPTION_PLACED_AFTER_FROM);
    supportedOptions.add(LOCK_OPTION_PLACED_WITHIN_JOIN);
    supportedOptions.add(ANALYSIS_METHODS);
    supportedOptions.add(STORED_PROCEDURES);

    supportedOptions.remove(BOOLEAN_COMPARISON);
    supportedOptions.remove(DEFERRED_CONSTRAINTS);
    supportedOptions.remove(FK_DELETE_ACTION_DEFAULT);
    supportedOptions.remove(FK_DELETE_ACTION_RESTRICT);
    supportedOptions.remove(FK_DELETE_ACTION_NULL);
    supportedOptions.remove(FK_UPDATE_ACTION_DEFAULT);
    supportedOptions.remove(FK_UPDATE_ACTION_RESTRICT);
    supportedOptions.remove(FK_UPDATE_ACTION_NULL);

    if (datastoreMajorVersion >= 11) {
      // SQLServer 2012+ support these features
      supportedOptions.add(SEQUENCES);
      supportedOptions.add(ORDERBY_NULLS_DIRECTIVES);
    }

    if (datastoreMajorVersion >= 12) {
      // SQLServer 2014+ support these features (what about earlier?)
      supportedOptions.add(OPERATOR_BITWISE_AND);
      supportedOptions.add(OPERATOR_BITWISE_OR);
      supportedOptions.add(OPERATOR_BITWISE_XOR);
    }
  }
コード例 #5
0
  /**
   * Convenience method to retrieve field values from an L2 cached object if they are loaded in that
   * object. If the object is not in the L2 cache then just returns, and similarly if the required
   * fields aren't available.
   *
   * @param fieldNumbers Numbers of fields to load from the L2 cache
   * @return The fields that couldn't be loaded
   */
  protected int[] loadFieldsFromLevel2Cache(int[] fieldNumbers) {
    // Only continue if there are fields, and not being deleted/flushed etc
    if (fieldNumbers == null
        || fieldNumbers.length == 0
        || myEC.isFlushing()
        || myLC.isDeleted()
        || isDeleting()
        || getExecutionContext().getTransaction().isCommitting()) {
      return fieldNumbers;
    }
    // TODO Drop this check when we're confident that this doesn't affect some use-cases
    if (!myEC.getNucleusContext()
        .getConfiguration()
        .getBooleanProperty(PropertyNames.PROPERTY_CACHE_L2_LOADFIELDS, true)) {
      return fieldNumbers;
    }

    Level2Cache l2cache = myEC.getNucleusContext().getLevel2Cache();
    if (l2cache != null && myEC.getNucleusContext().isClassCacheable(cmd)) {
      CachedPC<T> cachedPC = l2cache.get(myID);
      if (cachedPC != null) {
        int[] cacheFieldsToLoad =
            ClassUtils.getFlagsSetTo(cachedPC.getLoadedFields(), fieldNumbers, true);
        if (cacheFieldsToLoad != null && cacheFieldsToLoad.length > 0) {
          if (NucleusLogger.CACHE.isDebugEnabled()) {
            NucleusLogger.CACHE.debug(
                Localiser.msg(
                    "026034",
                    StringUtils.toJVMIDString(getObject()),
                    myID,
                    StringUtils.intArrayToString(cacheFieldsToLoad)));
          }

          L2CacheRetrieveFieldManager l2RetFM = new L2CacheRetrieveFieldManager(this, cachedPC);
          this.replaceFields(cacheFieldsToLoad, l2RetFM);
          int[] fieldsNotLoaded = l2RetFM.getFieldsNotLoaded();
          if (fieldsNotLoaded != null) {
            for (int i = 0; i < fieldsNotLoaded.length; i++) {
              loadedFields[fieldsNotLoaded[i]] = false;
            }
          }
        }
      }
    }

    return ClassUtils.getFlagsSetTo(loadedFields, fieldNumbers, false);
  }
コード例 #6
0
 public VersionMetaData setStrategy(String strategy) {
   if (StringUtils.isWhitespace(strategy)
       || VersionStrategy.getVersionStrategy(strategy) == null) {
     throw new RuntimeException(LOCALISER.msg("044156"));
   }
   this.versionStrategy = VersionStrategy.getVersionStrategy(strategy);
   return this;
 }
コード例 #7
0
 public SequenceMetaData setAllocationSize(String allocationSize) {
   if (!StringUtils.isWhitespace(allocationSize)) {
     try {
       this.allocationSize = Integer.parseInt(allocationSize);
     } catch (NumberFormatException nfe) {
     }
   }
   return this;
 }
コード例 #8
0
 public SequenceMetaData setInitialValue(String initialValue) {
   if (!StringUtils.isWhitespace(initialValue)) {
     try {
       this.initialValue = Integer.parseInt(initialValue);
     } catch (NumberFormatException nfe) {
     }
   }
   return this;
 }
コード例 #9
0
  /**
   * Convenience method to create a new PreparedStatement for a query.
   *
   * @param conn The Connection to use for the statement
   * @param stmtText Statement text
   * @param resultSetType Type of result set
   * @param resultSetConcurrency Concurrency for the result set
   * @return The PreparedStatement
   * @throws SQLException thrown if an error occurs creating the statement
   */
  public PreparedStatement getStatementForQuery(
      ManagedConnection conn, String stmtText, String resultSetType, String resultSetConcurrency)
      throws SQLException {
    Connection c = (Connection) conn.getConnection();
    if (supportsBatching) {
      // Check for a waiting batched statement that is ready for processing
      ConnectionStatementState state = getConnectionStatementState(conn);
      if (state != null && state.processable) {
        // Process the batch statement before returning our new query statement
        processConnectionStatement(conn);
      }
    }

    // Create a new PreparedStatement for this query
    PreparedStatement ps = null;
    if (resultSetType != null || resultSetConcurrency != null) {
      int rsTypeValue = ResultSet.TYPE_FORWARD_ONLY;
      if (resultSetType != null) {
        if (resultSetType.equals("scroll-sensitive")) {
          rsTypeValue = ResultSet.TYPE_SCROLL_SENSITIVE;
        } else if (resultSetType.equals("scroll-insensitive")) {
          rsTypeValue = ResultSet.TYPE_SCROLL_INSENSITIVE;
        }
      }

      int rsConcurrencyValue = ResultSet.CONCUR_READ_ONLY;
      if (resultSetConcurrency != null && resultSetConcurrency.equals("updateable")) {
        rsConcurrencyValue = ResultSet.CONCUR_UPDATABLE;
      }
      ps = c.prepareStatement(stmtText, rsTypeValue, rsConcurrencyValue);
      ps
          .clearBatch(); // In case using statement caching and given one with batched statements
                         // left hanging (C3P0)
    } else {
      ps = c.prepareStatement(stmtText);
      ps
          .clearBatch(); // In case using statement caching and given one with batched statements
                         // left hanging (C3P0)
    }

    if (queryTimeout > 0) {
      // Apply any query timeout
      ps.setQueryTimeout(queryTimeout / 1000); // queryTimeout is in milliseconds
    }
    if (NucleusLogger.DATASTORE.isDebugEnabled()) {
      NucleusLogger.DATASTORE.debug(LOCALISER.msg("052110", StringUtils.toJVMIDString(ps)));
    }

    if (!jdbcStatements) {
      // Wrap with our parameter logger
      ps = new ParamLoggingPreparedStatement(ps, stmtText);
      ((ParamLoggingPreparedStatement) ps).setParamsInAngleBrackets(paramValuesInBrackets);
    }

    return ps;
  }
コード例 #10
0
 public String toString() {
   return "StmtState : stmt="
       + StringUtils.toJVMIDString(stmt)
       + " sql="
       + stmtText
       + " batch="
       + batchSize
       + " closeOnProcess="
       + closeStatementOnProcess;
 }
コード例 #11
0
  /**
   * Convenience method to update a Level2 cached version of this object if cacheable and has not
   * been modified during this transaction.
   *
   * @param fieldNumbers Numbers of fields to update in L2 cached object
   */
  protected void updateLevel2CacheForFields(int[] fieldNumbers) {
    String updateMode = (String) myEC.getProperty(PropertyNames.PROPERTY_CACHE_L2_UPDATE_MODE);
    if (updateMode != null && updateMode.equalsIgnoreCase("commit-only")) {
      return;
    }
    if (fieldNumbers == null || fieldNumbers.length == 0) {
      return;
    }

    Level2Cache l2cache = myEC.getNucleusContext().getLevel2Cache();
    if (l2cache != null
        && myEC.getNucleusContext().isClassCacheable(cmd)
        && !myEC.isObjectModifiedInTransaction(myID)) {
      CachedPC<T> cachedPC = l2cache.get(myID);
      if (cachedPC != null) {
        // This originally just updated the L2 cache for fields where the L2 cache didn't have a
        // value for that field, like this
        /*
        int[] cacheFieldsToLoad = ClassUtils.getFlagsSetTo(cachedPC.getLoadedFields(), fieldNumbers, false);
        if (cacheFieldsToLoad == null || cacheFieldsToLoad.length == 0)
        {
            return;
        }
        */
        int[] cacheFieldsToLoad = fieldNumbers;
        CachedPC copyCachedPC = cachedPC.getCopy();
        if (NucleusLogger.CACHE.isDebugEnabled()) {
          NucleusLogger.CACHE.debug(
              Localiser.msg(
                  "026033",
                  StringUtils.toJVMIDString(getObject()),
                  myID,
                  StringUtils.intArrayToString(cacheFieldsToLoad)));
        }

        provideFields(cacheFieldsToLoad, new L2CachePopulateFieldManager(this, copyCachedPC));

        // Replace the current L2 cached object with this one
        myEC.getNucleusContext().getLevel2Cache().put(getInternalObjectId(), copyCachedPC);
      }
    }
  }
コード例 #12
0
  /**
   * Convenience method to process the currently waiting statement for the passed Connection. Only
   * processes the statement if it is in processable state.
   *
   * @param conn The connection
   * @return The return codes from the statement batch
   * @throws SQLException if an error occurs processing the batch
   */
  protected int[] processConnectionStatement(ManagedConnection conn) throws SQLException {
    ConnectionStatementState state = getConnectionStatementState(conn);
    if (state == null || !state.processable) {
      return null;
    }

    long startTime = System.currentTimeMillis();
    if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) {
      if (state.stmt instanceof ParamLoggingPreparedStatement) {
        NucleusLogger.DATASTORE_NATIVE.debug(
            ((ParamLoggingPreparedStatement) state.stmt).getStatementWithParamsReplaced());
      } else {
        NucleusLogger.DATASTORE_NATIVE.debug(state.stmtText);
      }
    }

    int[] ind = state.stmt.executeBatch();
    state.stmt.clearBatch();

    if (NucleusLogger.DATASTORE.isDebugEnabled()) {
      NucleusLogger.DATASTORE.debug(
          LOCALISER.msg(
              "045001",
              "" + (System.currentTimeMillis() - startTime),
              StringUtils.intArrayToString(ind),
              StringUtils.toJVMIDString(state.stmt)));
    }

    // Remove the current connection statement
    removeConnectionStatementState(conn);

    // Close the statement if it is registered for closing after processing
    if (state.closeStatementOnProcess) {
      state.stmt.close();
    }

    return ind;
  }
コード例 #13
0
  static {
    try {
      Class.forName(driverName);
      monitorConfig.loadMonitorParam();
      QUERY_LOG_TABLE_NAME = monitorConfig.getQueryLogResultTable();
      if (StringUtils.isEmpty(QUERY_LOG_TABLE_NAME)) {
        logger.error(
            "table name not defined ,please set param [query.log.parse.result.table] in kylin.properties");
      }

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
コード例 #14
0
 protected void setUp() throws Exception {
   super.setUp();
   if (runTestsForDatastore()) {
     PersistenceManager pm = pmf.getPersistenceManager();
     Transaction tx = pm.currentTransaction();
     try {
       tx.begin();
       pm.newQuery(SamplePoint.class).deletePersistentAll();
       pm.newQuery(SampleLineString.class).deletePersistentAll();
       pm.newQuery(SamplePolygon.class).deletePersistentAll();
       pm.newQuery(SampleGeometryCollection.class).deletePersistentAll();
     } finally {
       tx.commit();
     }
     Connection sqlConn = null;
     try {
       tx.begin();
       sqlConn = (Connection) pm.getDataStoreConnection();
       String connUrl = pmf.getConnectionURL().toLowerCase();
       String fileName = "sample_jts_postgis.sql";
       if (connUrl.contains("mysql") || connUrl.contains("mariadb")) {
         fileName = "sample_jts_mysql.sql";
       }
       File file =
           new File(
               PgGeometryMySQLTest.class
                   .getResource("/org/datanucleus/samples/data/" + fileName)
                   .toURI());
       String s = "";
       InputStream is = new FileInputStream(file);
       int c;
       while ((c = is.read()) != -1) {
         s += (char) c;
       }
       String ss[] = StringUtils.split(s, ";");
       for (int i = 0; i < ss.length; i++) {
         sqlConn.createStatement().execute(ss[i]);
       }
       is.close();
     } finally {
       sqlConn.close();
       tx.commit();
     }
   }
 }
コード例 #15
0
  /**
   * Initialisation method. This should be called AFTER using the populate method if you are going
   * to use populate. It creates the internal convenience arrays etc needed for normal operation.
   */
  public void initialise(ClassLoaderResolver clr, MetaDataManager mmgr) {
    if (hasExtension("field-name")) {
      // User has provided extension "field-name" meaning that we store the version in the column
      // for the specified field (like in JPA)
      String val = getValueForExtension("field-name");
      if (!StringUtils.isWhitespace(val)) {
        this.fieldName = val;
        this.columnName = null;
      }
    }

    if (fieldName == null) {
      // Cater for user specifying column name, or column
      if (columnMetaData == null && columnName != null) {
        columnMetaData = new ColumnMetaData();
        columnMetaData.setName(columnName);
        columnMetaData.parent = this;
      }

      // Interpret the "indexed" value to create our IndexMetaData where it wasn't specified that
      // way
      if (indexMetaData == null
          && columnMetaData != null
          && indexed != null
          && indexed != IndexedValue.FALSE) {
        indexMetaData = new IndexMetaData();
        indexMetaData.setUnique(indexed == IndexedValue.UNIQUE);
        indexMetaData.addColumn(columnMetaData.getName());
        indexMetaData.parent = this;
      }
    } else {
      if (getParent() instanceof AbstractClassMetaData) {
        AbstractMemberMetaData vermmd =
            ((AbstractClassMetaData) getParent()).getMetaDataForMember(fieldName);
        if (vermmd != null && java.util.Date.class.isAssignableFrom(vermmd.getType())) {
          NucleusLogger.GENERAL.debug(
              "Setting version-strategy of field "
                  + vermmd.getFullFieldName()
                  + " to DATE_TIME since is Date-based");
          versionStrategy = VersionStrategy.DATE_TIME;
        }
      }
    }
  }
コード例 #16
0
  /**
   * Method to execute a PreparedStatement (using PreparedStatement.execute()). Prints logging
   * information about timings.
   *
   * @param ec Execution Context
   * @param conn The connection (required since the one on PreparedStatement is not always the same
   *     so we can't use it)
   * @param stmt The statement text
   * @param ps The Prepared Statement
   * @return The numer of rows affected (as per PreparedStatement.execute)
   * @throws SQLException Thrown if an error occurs
   */
  public boolean executeStatement(
      ExecutionContext ec, ManagedConnection conn, String stmt, PreparedStatement ps)
      throws SQLException {
    if (supportsBatching) {
      // Check for a waiting batched statement that is ready for processing
      ConnectionStatementState state = getConnectionStatementState(conn);
      if (state != null && state.processable) {
        // Process the batch statement before returning our new query statement
        processConnectionStatement(conn);
      }
    }

    // Process the normal execute statement
    long startTime = System.currentTimeMillis();
    if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) {
      if (ps instanceof ParamLoggingPreparedStatement) {
        NucleusLogger.DATASTORE_NATIVE.debug(
            ((ParamLoggingPreparedStatement) ps).getStatementWithParamsReplaced());
      } else {
        NucleusLogger.DATASTORE_NATIVE.debug(stmt);
      }
    }

    boolean flag = ps.execute();
    if (ec != null && ec.getStatistics() != null) {
      // Add to statistics
      ec.getStatistics().incrementNumWrites();
    }

    ps.clearBatch();
    if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
      NucleusLogger.DATASTORE_PERSIST.debug(
          LOCALISER.msg(
              "045002",
              "" + (System.currentTimeMillis() - startTime),
              StringUtils.toJVMIDString(ps)));
    }

    return flag;
  }
コード例 #17
0
 /**
  * Convenience method to close a PreparedStatement. If the statement is currently being used as a
  * batch, will register it for closing when executing the batch
  *
  * @param conn The Connection
  * @param ps The PreparedStatement
  * @throws SQLException if an error occurs closing the statement
  */
 public void closeStatement(ManagedConnection conn, PreparedStatement ps) throws SQLException {
   ConnectionStatementState state = getConnectionStatementState(conn);
   if (state != null && state.stmt == ps) {
     // Statement to be closed is the current batch, so register it for closing when it gets
     // processed
     state.closeStatementOnProcess = true;
   } else {
     try {
       if (NucleusLogger.DATASTORE.isDebugEnabled()) {
         NucleusLogger.DATASTORE.debug(LOCALISER.msg("052110", StringUtils.toJVMIDString(ps)));
       }
       ps.close();
     } catch (SQLException sqle) {
       // workaround for DBCP bug: even though PreparedStatement.close()
       // is defined as having no effect if already closed, DBCP
       // will throw SQLException
       if (!sqle.getMessage().equals("Already closed")) {
         throw sqle;
       }
     }
   }
 }
コード例 #18
0
 public SequenceMetaData setDatastoreSequence(String datastoreSequence) {
   this.datastoreSequence =
       (StringUtils.isWhitespace(datastoreSequence) ? null : datastoreSequence);
   return this;
 }
コード例 #19
0
 public VersionMetaData setColumnName(String columnName) {
   this.columnName = (StringUtils.isWhitespace(columnName) ? null : columnName);
   return this;
 }
コード例 #20
0
  /**
   * Convenience method to create a new PreparedStatement for an update.
   *
   * @param conn The Connection to use for the statement
   * @param stmtText Statement text
   * @param batchable Whether this statement is batchable. Whether we will process the statement
   *     before any other statement
   * @param getGeneratedKeysFlag whether to request getGeneratedKeys for this statement
   * @return The PreparedStatement
   * @throws SQLException thrown if an error occurs creating the statement
   */
  public PreparedStatement getStatementForUpdate(
      ManagedConnection conn, String stmtText, boolean batchable, boolean getGeneratedKeysFlag)
      throws SQLException {
    Connection c = (Connection) conn.getConnection();
    if (supportsBatching) {
      ConnectionStatementState state = getConnectionStatementState(conn);
      if (state != null) {
        if (state.processable) {
          // We have a batchable statement in the queue that could be processed now if necessary
          if (!batchable) {
            // This new statement isnt batchable so process the existing one before returning our
            // new statement
            processConnectionStatement(conn);
          } else {
            // Check if we could batch onto this existing statement
            if (state.stmtText.equals(stmtText)) {
              // We can batch onto this statement
              if (maxBatchSize == -1 || state.batchSize < maxBatchSize) {
                state.batchSize++;
                state.processable =
                    false; // Have to wait til we process this part til processable again
                if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
                  NucleusLogger.DATASTORE_PERSIST.debug(
                      LOCALISER.msg("052100", stmtText, "" + state.batchSize));
                }
                return state.stmt;
              } else {
                // Reached max batch size so process it now and start again for this one
                if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
                  NucleusLogger.DATASTORE_PERSIST.debug(LOCALISER.msg("052101", state.stmtText));
                }
                processConnectionStatement(conn);
              }
            } else {
              // We cant batch using the current batch statement so process it first and return our
              // new one
              processConnectionStatement(conn);
            }
          }
        } else {
          if (batchable) {
            // The current statement is being batched so we cant batch this since cant process the
            // current statement now
            if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
              NucleusLogger.DATASTORE_PERSIST.debug(
                  LOCALISER.msg("052102", state.stmtText, stmtText));
            }
            batchable = false;
          }
        }
      }
    }

    PreparedStatement ps =
        (getGeneratedKeysFlag
            ? c.prepareStatement(stmtText, Statement.RETURN_GENERATED_KEYS)
            : c.prepareStatement(stmtText));
    ps
        .clearBatch(); // In case using statement caching and given one with batched statements left
                       // hanging (C3P0)
    if (!jdbcStatements) {
      // Wrap with our parameter logger
      ps = new ParamLoggingPreparedStatement(ps, stmtText);
      ((ParamLoggingPreparedStatement) ps).setParamsInAngleBrackets(paramValuesInBrackets);
    }
    if (NucleusLogger.DATASTORE.isDebugEnabled()) {
      NucleusLogger.DATASTORE.debug(LOCALISER.msg("052109", ps, StringUtils.toJVMIDString(c)));
    }

    if (batchable && supportsBatching) {
      // This statement is batchable so save it as the current batchable
      if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
        NucleusLogger.DATASTORE_PERSIST.debug(LOCALISER.msg("052103", stmtText));
      }
      ConnectionStatementState state = new ConnectionStatementState();
      state.stmt = ps;
      state.stmtText = stmtText;
      state.batchSize = 1;
      setConnectionStatementState(conn, state);
    }

    return ps;
  }
コード例 #21
0
  /**
   * Method to populate any defaults, and check the validity of the MetaData.
   *
   * @param clr ClassLoaderResolver to use for loading any key/value types
   * @param primary the primary ClassLoader to use (or null)
   * @param mmgr MetaData manager
   */
  public void populate(ClassLoaderResolver clr, ClassLoader primary, MetaDataManager mmgr) {
    AbstractMemberMetaData mmd = (AbstractMemberMetaData) parent;
    if (!StringUtils.isWhitespace(key.type) && key.type.indexOf(',') > 0) {
      throw new InvalidMetaDataException(LOCALISER, "044143", mmd.getName(), mmd.getClassName());
    }
    if (!StringUtils.isWhitespace(value.type) && value.type.indexOf(',') > 0) {
      throw new InvalidMetaDataException(LOCALISER, "044144", mmd.getName(), mmd.getClassName());
    }

    ApiAdapter api = mmgr.getApiAdapter();

    // Make sure the type in "key", "value" is set
    key.populate(
        ((AbstractMemberMetaData) parent).getAbstractClassMetaData().getPackageName(),
        clr,
        primary,
        mmgr);
    value.populate(
        ((AbstractMemberMetaData) parent).getAbstractClassMetaData().getPackageName(),
        clr,
        primary,
        mmgr);

    // Check the field type and see if it is castable to a Map
    Class field_type = getMemberMetaData().getType();
    if (!java.util.Map.class.isAssignableFrom(field_type)) {
      throw new InvalidMetaDataException(
          LOCALISER, "044145", getFieldName(), getMemberMetaData().getClassName(false));
    }

    if (java.util.Properties.class.isAssignableFrom(field_type)) {
      // Properties defaults to <String, String>
      if (key.type == null) {
        key.type = String.class.getName();
      }
      if (value.type == null) {
        value.type = String.class.getName();
      }
    }

    // "key-type"
    if (key.type == null) {
      throw new InvalidMetaDataException(
          LOCALISER, "044146", getFieldName(), getMemberMetaData().getClassName(false));
    }

    // Check that the key type exists
    Class keyTypeClass = null;
    try {
      keyTypeClass = clr.classForName(key.type, primary);
    } catch (ClassNotResolvedException cnre) {
      try {
        // Maybe the user specified a java.lang class without fully-qualifying it
        // This is beyond the scope of the JDO spec which expects java.lang cases to be
        // fully-qualified
        keyTypeClass = clr.classForName(ClassUtils.getJavaLangClassForType(key.type), primary);
      } catch (ClassNotResolvedException cnre2) {
        throw new InvalidMetaDataException(
            LOCALISER, "044147", getFieldName(), getMemberMetaData().getClassName(false), key.type);
      }
    }

    if (!keyTypeClass.getName().equals(key.type)) {
      // The value-type has been resolved from what was specified in the MetaData - update to the
      // fully-qualified name
      NucleusLogger.METADATA.info(
          LOCALISER.msg(
              "044148",
              getFieldName(),
              getMemberMetaData().getClassName(false),
              key.type,
              keyTypeClass.getName()));
      key.type = keyTypeClass.getName();
    }

    // "embedded-key"
    if (key.embedded == null) {
      // Assign default for "embedded-key" based on 18.13.2 of JDO 2 spec
      if (mmgr.getNucleusContext().getTypeManager().isDefaultEmbeddedType(keyTypeClass)) {
        key.embedded = Boolean.TRUE;
      } else if (api.isPersistable(keyTypeClass)
          || Object.class.isAssignableFrom(keyTypeClass)
          || keyTypeClass.isInterface()) {
        key.embedded = Boolean.FALSE;
      } else {
        key.embedded = Boolean.TRUE;
      }
    }
    if (Boolean.FALSE.equals(key.embedded)) {
      // If the user has set a non-PC/non-Interface as not embedded, correct it since not supported.
      // Note : this fails when using in the enhancer since not yet PC
      if (!api.isPersistable(keyTypeClass)
          && !keyTypeClass.isInterface()
          && keyTypeClass != java.lang.Object.class) {
        key.embedded = Boolean.TRUE;
      }
    }
    KeyMetaData keymd = ((AbstractMemberMetaData) parent).getKeyMetaData();
    if (keymd != null && keymd.getEmbeddedMetaData() != null) {
      // If the user has specified <embedded>, set to true
      key.embedded = Boolean.TRUE;
    }

    // "value-type"
    if (value.type == null) {
      throw new InvalidMetaDataException(
          LOCALISER, "044149", getFieldName(), getMemberMetaData().getClassName(false));
    }

    // Check that the value-type exists
    Class valueTypeClass = null;
    try {
      valueTypeClass = clr.classForName(value.type);
    } catch (ClassNotResolvedException cnre) {
      try {
        // Maybe the user specified a java.lang class without fully-qualifying it
        // This is beyond the scope of the JDO spec which expects java.lang cases to be
        // fully-qualified
        valueTypeClass = clr.classForName(ClassUtils.getJavaLangClassForType(value.type));
      } catch (ClassNotResolvedException cnre2) {
        throw new InvalidMetaDataException(
            LOCALISER,
            "044150",
            getFieldName(),
            getMemberMetaData().getClassName(false),
            value.type);
      }
    }

    if (!valueTypeClass.getName().equals(value.type)) {
      // The value-type has been resolved from what was specified in the MetaData - update to the
      // fully-qualified name
      NucleusLogger.METADATA.info(
          LOCALISER.msg(
              "044151",
              getFieldName(),
              getMemberMetaData().getClassName(false),
              value.type,
              valueTypeClass.getName()));
      value.type = valueTypeClass.getName();
    }

    // "embedded-value"
    if (value.embedded == null) {
      // Assign default for "embedded-value" based on 18.13.2 of JDO 2 spec
      if (mmgr.getNucleusContext().getTypeManager().isDefaultEmbeddedType(valueTypeClass)) {
        value.embedded = Boolean.TRUE;
      } else if (api.isPersistable(valueTypeClass)
          || Object.class.isAssignableFrom(valueTypeClass)
          || valueTypeClass.isInterface()) {
        value.embedded = Boolean.FALSE;
      } else {
        value.embedded = Boolean.TRUE;
      }
    }
    if (value.embedded == Boolean.FALSE) {
      // If the user has set a non-PC/non-Interface as not embedded, correct it since not supported.
      // Note : this fails when using in the enhancer since not yet PC
      if (!api.isPersistable(valueTypeClass)
          && !valueTypeClass.isInterface()
          && valueTypeClass != java.lang.Object.class) {
        value.embedded = Boolean.TRUE;
      }
    }
    ValueMetaData valuemd = ((AbstractMemberMetaData) parent).getValueMetaData();
    if (valuemd != null && valuemd.getEmbeddedMetaData() != null) {
      // If the user has specified <embedded>, set to true
      value.embedded = Boolean.TRUE;
    }

    key.classMetaData = mmgr.getMetaDataForClassInternal(keyTypeClass, clr);
    value.classMetaData = mmgr.getMetaDataForClassInternal(valueTypeClass, clr);

    // Cater for Key with mapped-by needing to be PK (for JPA)
    if (keymd != null
        && keymd.mappedBy != null
        && keymd.mappedBy.equals("#PK")) // Special value set by JPAMetaDataHandler
    {
      // Need to set the mapped-by of <key> to be the PK of the <value>
      if (value.classMetaData.getNoOfPrimaryKeyMembers() != 1) {
        // TODO Localise this
        throw new NucleusUserException(
            "DataNucleus does not support use of <map-key> with no name field when the"
                + " value class has a composite primary key");
      }
      int[] valuePkFieldNums = value.classMetaData.getPKMemberPositions();
      keymd.mappedBy =
          value.classMetaData.getMetaDataForManagedMemberAtAbsolutePosition(valuePkFieldNums[0])
              .name;
    }

    // Make sure anything in the superclass is populated too
    super.populate(clr, primary, mmgr);

    setPopulated();
  }
コード例 #22
0
  /**
   * Method to execute a PreparedStatement update. Prints logging information about timings.
   *
   * @param conn The connection (required since the one on PreparedStatement is not always the same
   *     so we cant use it)
   * @param stmt The statement text
   * @param ps The Prepared Statement
   * @param processNow Whether to process this statement now (only applies if is batched)
   * @return The numer of rows affected (as per PreparedStatement.executeUpdate)
   * @throws SQLException Thrown if an error occurs
   */
  public int[] executeStatementUpdate(
      ExecutionContext ec,
      ManagedConnection conn,
      String stmt,
      PreparedStatement ps,
      boolean processNow)
      throws SQLException {
    ConnectionStatementState state = getConnectionStatementState(conn);
    if (state != null) {
      if (state.stmt == ps) {
        // Mark as processable
        if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
          NucleusLogger.DATASTORE_PERSIST.debug(
              LOCALISER.msg("052104", state.stmtText, "" + state.batchSize));
        }
        state.processable = true;
        state.stmt.addBatch();

        if (processNow) {
          // Process the batch now
          state.closeStatementOnProcess =
              false; // user method has requested execution so they can close it themselves now
          return processConnectionStatement(conn);
        } else {
          // Leave processing til later
          return null;
        }
      } else {
        // There is a waiting batch yet it is a different statement, so process that one now since
        // we need
        // our statement executing
        processConnectionStatement(conn);
      }
    }

    // Process the normal update statement
    long startTime = System.currentTimeMillis();
    if (NucleusLogger.DATASTORE_NATIVE.isDebugEnabled()) {
      if (ps instanceof ParamLoggingPreparedStatement) {
        NucleusLogger.DATASTORE_NATIVE.debug(
            ((ParamLoggingPreparedStatement) ps).getStatementWithParamsReplaced());
      } else {
        NucleusLogger.DATASTORE_NATIVE.debug(stmt);
      }
    }

    int ind = ps.executeUpdate();
    if (ec != null && ec.getStatistics() != null) {
      // Add to statistics
      ec.getStatistics().incrementNumWrites();
    }

    ps.clearBatch();
    if (NucleusLogger.DATASTORE_PERSIST.isDebugEnabled()) {
      NucleusLogger.DATASTORE_PERSIST.debug(
          LOCALISER.msg(
              "045001",
              "" + (System.currentTimeMillis() - startTime),
              "" + ind,
              StringUtils.toJVMIDString(ps)));
    }

    return new int[] {ind};
  }
コード例 #23
0
 public String getObjectAsPrintable() {
   return StringUtils.toJVMIDString(getObject());
 }
コード例 #24
0
ファイル: SchemaHandlerTest.java プロジェクト: hopecee/texsts
  /** Test of the retrieval of columns. */
  public void testColumnRetrieval() {
    addClassesToSchema(new Class[] {SchemaClass1.class, SchemaClass2.class});

    PersistenceManager pm = pmf.getPersistenceManager();
    RDBMSStoreManager databaseMgr = (RDBMSStoreManager) storeMgr;
    StoreSchemaHandler handler = databaseMgr.getSchemaHandler();
    ClassLoaderResolver clr = storeMgr.getNucleusContext().getClassLoaderResolver(null);
    Connection con =
        (Connection)
            databaseMgr
                .getConnection(((JDOPersistenceManager) pm).getExecutionContext())
                .getConnection();

    // Retrieve and check the table for SchemaClass1
    DatastoreClass table1 = databaseMgr.getDatastoreClass(SchemaClass1.class.getName(), clr);
    RDBMSTableInfo tableInfo1 =
        (RDBMSTableInfo) handler.getSchemaData(con, "columns", new Object[] {table1});
    assertEquals(
        "Number of columns for table " + table1 + " is wrong", 4, tableInfo1.getNumberOfChildren());
    Iterator colsIter = tableInfo1.getChildren().iterator();
    Collection colNamesPresent = new HashSet();
    colNamesPresent.add("TABLE1_ID1");
    colNamesPresent.add("TABLE1_ID2");
    colNamesPresent.add("NAME");
    colNamesPresent.add("OTHER_ID");
    while (colsIter.hasNext()) {
      RDBMSColumnInfo colInfo = (RDBMSColumnInfo) colsIter.next();
      if (colInfo.getColumnName().equals("TABLE1_ID1")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
      if (colInfo.getColumnName().equals("TABLE1_ID2")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
      if (colInfo.getColumnName().equals("NAME")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
      if (colInfo.getColumnName().equals("OTHER_ID")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
    }
    assertTrue(
        "Some columns expected were not present in the datastore table : "
            + StringUtils.collectionToString(colNamesPresent),
        colNamesPresent.size() == 0);

    // Retrieve and check the table for SchemaClass2
    DatastoreClass table2 = databaseMgr.getDatastoreClass(SchemaClass2.class.getName(), clr);
    RDBMSTableInfo tableInfo2 =
        (RDBMSTableInfo) handler.getSchemaData(con, "columns", new Object[] {table2});
    assertEquals(
        "Number of columns for table " + table2 + " is wrong", 3, tableInfo2.getNumberOfChildren());
    colsIter = tableInfo2.getChildren().iterator();
    colNamesPresent.clear();
    colNamesPresent.add("TABLE2_ID");
    colNamesPresent.add("NAME");
    colNamesPresent.add("VALUE");
    while (colsIter.hasNext()) {
      RDBMSColumnInfo colInfo = (RDBMSColumnInfo) colsIter.next();
      if (colInfo.getColumnName().equals("TABLE2_ID")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
      if (colInfo.getColumnName().equals("NAME")) {
        colNamesPresent.remove(colInfo.getColumnName());
        assertEquals(
            "Length of column " + colInfo.getColumnName() + " has incorrect length",
            20,
            colInfo.getColumnSize());
      }
      if (colInfo.getColumnName().equals("VALUE")) {
        colNamesPresent.remove(colInfo.getColumnName());
      }
    }
    assertTrue(
        "Some columns expected were not present in the datastore table : "
            + StringUtils.collectionToString(colNamesPresent),
        colNamesPresent.size() == 0);

    // Now check retrieval of a column for a table
    RDBMSColumnInfo colInfo =
        (RDBMSColumnInfo) handler.getSchemaData(con, "column", new Object[] {table2, "VALUE"});
    assertNotNull("Column VALUE for table " + table2 + " was not found", colInfo);
    assertEquals("Column name is wrong", "VALUE", colInfo.getColumnName());
  }
コード例 #25
0
  /** Method called at the end of the class. */
  public void visitEnd() {
    AbstractClassMetaData cmd = enhancer.getClassMetaData();
    if (cmd.getPersistenceModifier() == ClassPersistenceModifier.PERSISTENCE_CAPABLE) {
      // Add any new fields
      List fields = enhancer.getFieldsList();
      Iterator fieldsIter = fields.iterator();
      while (fieldsIter.hasNext()) {
        ClassField field = (ClassField) fieldsIter.next();
        if (field.getName().equals(enhancer.getNamer().getDetachedStateFieldName())
            && hasDetachedState) {
          // No need to add this field since exists
          continue;
        }

        if (DataNucleusEnhancer.LOGGER.isDebugEnabled()) {
          DataNucleusEnhancer.LOGGER.debug(
              Localiser.msg("005021", ((Class) field.getType()).getName() + " " + field.getName()));
        }
        cv.visitField(
            field.getAccess(),
            field.getName(),
            Type.getDescriptor((Class) field.getType()),
            null,
            null);
      }

      if (!hasStaticInitialisation) {
        // Add a static initialisation block for the class since nothing added yet
        InitClass method = InitClass.getInstance(enhancer);
        method.initialise(cv);
        method.execute();
        method.close();
      }

      if (!hasDefaultConstructor
          && enhancer.hasOption(ClassEnhancer.OPTION_GENERATE_DEFAULT_CONSTRUCTOR)) {
        // Add a default constructor
        DefaultConstructor ctr = DefaultConstructor.getInstance(enhancer);
        ctr.initialise(cv);
        ctr.execute();
        ctr.close();
      }

      // Add any new methods
      List methods = enhancer.getMethodsList();
      Iterator<ClassMethod> methodsIter = methods.iterator();
      while (methodsIter.hasNext()) {
        ClassMethod method = methodsIter.next();
        method.initialise(cv);
        method.execute();
        method.close();
      }

      if (Serializable.class.isAssignableFrom(enhancer.getClassBeingEnhanced())) {
        // Class is Serializable
        if (!hasSerialVersionUID) {
          // Needs "serialVersionUID" field
          Long uid = null;
          try {
            uid =
                (Long)
                    AccessController.doPrivileged(
                        new PrivilegedAction() {
                          public Object run() {
                            return Long.valueOf(
                                ObjectStreamClass.lookup(enhancer.getClassBeingEnhanced())
                                    .getSerialVersionUID());
                          }
                        });
          } catch (Throwable e) {
            DataNucleusEnhancer.LOGGER.warn(StringUtils.getStringFromStackTrace(e));
          }
          ClassField cf =
              new ClassField(
                  enhancer,
                  enhancer.getNamer().getSerialVersionUidFieldName(),
                  Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL,
                  long.class,
                  uid);
          if (DataNucleusEnhancer.LOGGER.isDebugEnabled()) {
            DataNucleusEnhancer.LOGGER.debug(
                Localiser.msg("005021", ((Class) cf.getType()).getName() + " " + cf.getName()));
          }
          cv.visitField(
              cf.getAccess(),
              cf.getName(),
              Type.getDescriptor((Class) cf.getType()),
              null,
              cf.getInitialValue());
        }

        // The dnPreSerialize method need be called only once for a persistent instance. The
        // writeObject method in the least-derived
        // pc class that implements Serializable in the inheritance hierarchy needs to be modified
        // or generated to call it.
        if (cmd.getSuperAbstractClassMetaData() == null && !hasWriteObject) {
          // User hasn't provided their own writeObject, so provide the default but with a call to
          // dnPreSerialize first
          ClassMethod method = WriteObject.getInstance(enhancer);
          method.initialise(cv);
          method.execute();
          method.close();
        }
      }

      // Add dnGetXXX, dnSetXXX for each of the (managed) fields/properties
      AbstractMemberMetaData[] fmds = cmd.getManagedMembers();
      for (int i = 0; i < fmds.length; i++) {
        if (fmds[i].getPersistenceModifier() == FieldPersistenceModifier.NONE) {
          // Field/Property is not persistent so ignore
          continue;
        }

        byte persistenceFlags = fmds[i].getPersistenceFlags();
        ClassMethod getMethod = null;
        ClassMethod setMethod = null;
        if (fmds[i] instanceof PropertyMetaData) {
          // dnGetXXX, dnSetXXX for property are generated when processing existing getXXX, setXXX
          // methods
        } else {
          // Generate dnGetXXX, dnSetXXX for field
          if ((persistenceFlags & Persistable.MEDIATE_READ) == Persistable.MEDIATE_READ) {
            getMethod = new GetViaMediate(enhancer, fmds[i]);
          } else if ((persistenceFlags & Persistable.CHECK_READ) == Persistable.CHECK_READ) {
            getMethod = new GetViaCheck(enhancer, fmds[i]);
          } else {
            getMethod = new GetNormal(enhancer, fmds[i]);
          }

          if ((persistenceFlags & Persistable.MEDIATE_WRITE) == Persistable.MEDIATE_WRITE) {
            setMethod = new SetViaMediate(enhancer, fmds[i]);
          } else if ((persistenceFlags & Persistable.CHECK_WRITE) == Persistable.CHECK_WRITE) {
            setMethod = new SetViaCheck(enhancer, fmds[i]);
          } else {
            setMethod = new SetNormal(enhancer, fmds[i]);
          }
        }

        if (getMethod != null) {
          getMethod.initialise(cv);
          getMethod.execute();
          getMethod.close();
        }
        if (setMethod != null) {
          setMethod.initialise(cv);
          setMethod.execute();
          setMethod.close();
        }
      }
    }
    cv.visitEnd();
  }
コード例 #26
0
  /**
   * Utility to create the application identity columns and mapping. Uses the id mapping of the
   * specified class table and copies the mappings and columns, whilst retaining the passed
   * preferences for column namings. This is used to copy the PK mappings of a superclass table so
   * we have the same PK.
   *
   * @param columnContainer The container of column MetaData with any namings
   * @param refTable The table that we use as reference
   * @param clr The ClassLoaderResolver
   * @param cmd The ClassMetaData
   */
  final void addApplicationIdUsingClassTableId(
      ColumnMetaDataContainer columnContainer,
      DatastoreClass refTable,
      ClassLoaderResolver clr,
      AbstractClassMetaData cmd) {
    ColumnMetaData[] userdefinedCols = null;
    int nextUserdefinedCol = 0;
    if (columnContainer != null) {
      userdefinedCols = columnContainer.getColumnMetaData();
    }

    pkMappings = new JavaTypeMapping[cmd.getPKMemberPositions().length];
    for (int i = 0; i < cmd.getPKMemberPositions().length; i++) {
      AbstractMemberMetaData mmd =
          cmd.getMetaDataForManagedMemberAtAbsolutePosition(cmd.getPKMemberPositions()[i]);
      JavaTypeMapping mapping = refTable.getMemberMapping(mmd);
      if (mapping == null) {
        // probably due to invalid metadata defined by the user
        throw new NucleusUserException(
            "Cannot find mapping for field "
                + mmd.getFullFieldName()
                + " in table "
                + refTable.toString()
                + " "
                + StringUtils.collectionToString(refTable.getColumns()));
      }

      JavaTypeMapping masterMapping =
          storeMgr.getMappingManager().getMapping(clr.classForName(mapping.getType()));
      masterMapping.setMemberMetaData(mmd); // Update field info in mapping
      masterMapping.setTable(this);
      pkMappings[i] = masterMapping;

      // Loop through each id column in the reference table and add the same here
      // applying the required names from the columnContainer
      for (int j = 0; j < mapping.getNumberOfDatastoreMappings(); j++) {
        JavaTypeMapping m = masterMapping;
        Column refColumn = mapping.getDatastoreMapping(j).getColumn();
        if (mapping instanceof PersistableMapping) {
          m =
              storeMgr
                  .getMappingManager()
                  .getMapping(clr.classForName(refColumn.getJavaTypeMapping().getType()));
          ((PersistableMapping) masterMapping).addJavaTypeMapping(m);
        }

        ColumnMetaData userdefinedColumn = null;
        if (userdefinedCols != null) {
          for (int k = 0; k < userdefinedCols.length; k++) {
            if (refColumn.getIdentifier().toString().equals(userdefinedCols[k].getTarget())) {
              userdefinedColumn = userdefinedCols[k];
              break;
            }
          }
          if (userdefinedColumn == null && nextUserdefinedCol < userdefinedCols.length) {
            userdefinedColumn = userdefinedCols[nextUserdefinedCol++];
          }
        }

        // Add this application identity column
        Column idColumn = null;
        if (userdefinedColumn != null) {
          // User has provided a name for this column
          // Currently we only use the column namings from the users definition but we could easily
          // take more of their details.
          idColumn =
              addColumn(
                  refColumn.getStoredJavaType(),
                  storeMgr
                      .getIdentifierFactory()
                      .newIdentifier(IdentifierType.COLUMN, userdefinedColumn.getName()),
                  m,
                  refColumn.getColumnMetaData());
        } else {
          // No name provided so take same as superclass
          idColumn =
              addColumn(
                  refColumn.getStoredJavaType(),
                  refColumn.getIdentifier(),
                  m,
                  refColumn.getColumnMetaData());
        }
        if (mapping.getDatastoreMapping(j).getColumn().getColumnMetaData() != null) {
          refColumn.copyConfigurationTo(idColumn);
        }
        idColumn.setPrimaryKey();

        // Set the column type based on the field.getType()
        getStoreManager()
            .getMappingManager()
            .createDatastoreMapping(m, idColumn, refColumn.getJavaTypeMapping().getType());
      }

      // Update highest field number if this is higher
      int absoluteFieldNumber = mmd.getAbsoluteFieldNumber();
      if (absoluteFieldNumber > highestMemberNumber) {
        highestMemberNumber = absoluteFieldNumber;
      }
    }
  }
コード例 #27
0
 public SequenceMetaData setName(String name) {
   this.name = (StringUtils.isWhitespace(name) ? this.name : name);
   return this;
 }
コード例 #28
0
  /**
   * Method to return a statement selecting the candidate table(s) required to cover all possible
   * types for this candidates inheritance strategy.
   *
   * @param storeMgr RDBMS StoreManager
   * @param parentStmt Parent statement (if there is one)
   * @param cmd Metadata for the class
   * @param clsMapping Mapping for the results of the statement
   * @param ec ExecutionContext
   * @param candidateCls Candidate class
   * @param subclasses Whether to create a statement for subclasses of the candidate too
   * @param result The result clause
   * @param candidateAlias alias for the candidate (if any)
   * @param candidateTableGroupName TableGroup name for the candidate (if any)
   * @return The SQLStatement
   * @throws NucleusException if there are no tables for concrete classes in this query (hence would
   *     return null)
   */
  public static SQLStatement getStatementForCandidates(
      RDBMSStoreManager storeMgr,
      SQLStatement parentStmt,
      AbstractClassMetaData cmd,
      StatementClassMapping clsMapping,
      ExecutionContext ec,
      Class candidateCls,
      boolean subclasses,
      String result,
      String candidateAlias,
      String candidateTableGroupName) {
    SQLStatement stmt = null;

    DatastoreIdentifier candidateAliasId = null;
    if (candidateAlias != null) {
      candidateAliasId = storeMgr.getIdentifierFactory().newTableIdentifier(candidateAlias);
    }

    ClassLoaderResolver clr = ec.getClassLoaderResolver();
    List<DatastoreClass> candidateTables = new ArrayList<DatastoreClass>();
    if (cmd.getInheritanceMetaData().getStrategy() == InheritanceStrategy.COMPLETE_TABLE) {
      DatastoreClass candidateTable = storeMgr.getDatastoreClass(cmd.getFullClassName(), clr);
      if (candidateTable != null) {
        candidateTables.add(candidateTable);
      }
      if (subclasses) {
        Collection<String> subclassNames =
            storeMgr.getSubClassesForClass(cmd.getFullClassName(), subclasses, clr);
        if (subclassNames != null) {
          Iterator<String> subclassIter = subclassNames.iterator();
          while (subclassIter.hasNext()) {
            String subclassName = subclassIter.next();
            DatastoreClass tbl = storeMgr.getDatastoreClass(subclassName, clr);
            if (tbl != null) {
              candidateTables.add(tbl);
            }
          }
        }
      }

      Iterator<DatastoreClass> iter = candidateTables.iterator();
      int maxClassNameLength = cmd.getFullClassName().length();
      while (iter.hasNext()) {
        DatastoreClass cls = iter.next();
        String className = cls.getType();
        if (className.length() > maxClassNameLength) {
          maxClassNameLength = className.length();
        }
      }

      iter = candidateTables.iterator();
      while (iter.hasNext()) {
        DatastoreClass cls = iter.next();

        SQLStatement tblStmt =
            new SQLStatement(parentStmt, storeMgr, cls, candidateAliasId, candidateTableGroupName);
        tblStmt.setClassLoaderResolver(clr);
        tblStmt.setCandidateClassName(cls.getType());

        // Add SELECT of dummy column accessible as "NUCLEUS_TYPE" containing the classname
        JavaTypeMapping m = storeMgr.getMappingManager().getMapping(String.class);
        String nuctypeName = cls.getType();
        if (maxClassNameLength > nuctypeName.length()) {
          nuctypeName = StringUtils.leftAlignedPaddedString(nuctypeName, maxClassNameLength);
        }
        StringLiteral lit = new StringLiteral(tblStmt, m, nuctypeName, null);
        tblStmt.select(lit, UnionStatementGenerator.NUC_TYPE_COLUMN);

        if (stmt == null) {
          stmt = tblStmt;
        } else {
          stmt.union(tblStmt);
        }
      }
      if (clsMapping != null) {
        clsMapping.setNucleusTypeColumnName(UnionStatementGenerator.NUC_TYPE_COLUMN);
      }
    } else {
      // "new-table", "superclass-table", "subclass-table"
      List<Class> candidateClasses = new ArrayList<Class>();
      if (ClassUtils.isReferenceType(candidateCls)) {
        // Persistent interface, so find all persistent implementations
        String[] clsNames =
            storeMgr
                .getNucleusContext()
                .getMetaDataManager()
                .getClassesImplementingInterface(candidateCls.getName(), clr);
        for (int i = 0; i < clsNames.length; i++) {
          Class cls = clr.classForName(clsNames[i]);
          DatastoreClass table = storeMgr.getDatastoreClass(clsNames[i], clr);
          candidateClasses.add(cls);
          candidateTables.add(table);
          AbstractClassMetaData implCmd =
              storeMgr.getNucleusContext().getMetaDataManager().getMetaDataForClass(cls, clr);
          if (implCmd.getIdentityType() != cmd.getIdentityType()) {
            throw new NucleusUserException(
                "You are querying an interface ("
                    + cmd.getFullClassName()
                    + ") "
                    + "yet one of its implementations ("
                    + implCmd.getFullClassName()
                    + ") "
                    + " uses a different identity type!");
          } else if (cmd.getIdentityType() == IdentityType.APPLICATION) {
            if (cmd.getPKMemberPositions().length != implCmd.getPKMemberPositions().length) {
              throw new NucleusUserException(
                  "You are querying an interface ("
                      + cmd.getFullClassName()
                      + ") "
                      + "yet one of its implementations ("
                      + implCmd.getFullClassName()
                      + ") "
                      + " has a different number of PK members!");
            }
          }
        }
      } else {
        DatastoreClass candidateTable = storeMgr.getDatastoreClass(cmd.getFullClassName(), clr);
        if (candidateTable != null) {
          // Candidate has own table
          candidateClasses.add(candidateCls);
          candidateTables.add(candidateTable);
        } else {
          // Candidate stored in subclass tables
          AbstractClassMetaData[] cmds = storeMgr.getClassesManagingTableForClass(cmd, clr);
          if (cmds != null && cmds.length > 0) {
            for (int i = 0; i < cmds.length; i++) {
              DatastoreClass table = storeMgr.getDatastoreClass(cmds[i].getFullClassName(), clr);
              Class cls = clr.classForName(cmds[i].getFullClassName());
              candidateClasses.add(cls);
              candidateTables.add(table);
            }
          } else {
            throw new UnsupportedOperationException(
                "No tables for query of " + cmd.getFullClassName());
          }
        }
      }

      for (int i = 0; i < candidateTables.size(); i++) {
        DatastoreClass tbl = candidateTables.get(i);
        Class cls = candidateClasses.get(i);
        StatementGenerator stmtGen = null;
        if (tbl.getDiscriminatorMapping(true) != null
            || QueryUtils.resultHasOnlyAggregates(result)) {
          // Either has a discriminator, or only selecting aggregates so need single select
          stmtGen =
              new DiscriminatorStatementGenerator(
                  storeMgr, clr, cls, subclasses, candidateAliasId, candidateTableGroupName);
          stmtGen.setOption(StatementGenerator.OPTION_RESTRICT_DISCRIM);
        } else {
          stmtGen =
              new UnionStatementGenerator(
                  storeMgr, clr, cls, subclasses, candidateAliasId, candidateTableGroupName);
          if (result == null) {
            // Returning one row per candidate so include distinguisher column
            stmtGen.setOption(StatementGenerator.OPTION_SELECT_NUCLEUS_TYPE);
            clsMapping.setNucleusTypeColumnName(UnionStatementGenerator.NUC_TYPE_COLUMN);
          }
        }
        stmtGen.setParentStatement(parentStmt);
        SQLStatement tblStmt = stmtGen.getStatement();

        if (stmt == null) {
          stmt = tblStmt;
        } else {
          stmt.union(tblStmt);
        }
      }
    }

    return stmt;
  }
コード例 #29
0
 public SequenceMetaData setFactoryClass(String factoryClass) {
   this.factoryClass = (StringUtils.isWhitespace(factoryClass) ? null : factoryClass);
   return this;
 }
コード例 #30
0
  /**
   * Method to populate any defaults, and check the validity of the MetaData.
   *
   * @param clr ClassLoaderResolver to use for any loading operations
   * @param primary the primary ClassLoader to use (or null)
   * @param mmgr MetaData manager
   */
  public void populate(ClassLoaderResolver clr, ClassLoader primary, MetaDataManager mmgr) {
    AbstractMemberMetaData mmd = (AbstractMemberMetaData) parent;
    if (!StringUtils.isWhitespace(element.type) && element.type.indexOf(',') > 0) {
      throw new InvalidMetaDataException(LOCALISER, "044131", mmd.getName(), mmd.getClassName());
    }

    // Make sure the type in "element" is set
    element.populate(
        ((AbstractMemberMetaData) parent).getAbstractClassMetaData().getPackageName(),
        clr,
        primary,
        mmgr);

    // Check the field type and see if it is castable to a Collection
    Class field_type = getMemberMetaData().getType();
    if (!java.util.Collection.class.isAssignableFrom(field_type)) {
      throw new InvalidMetaDataException(
          LOCALISER, "044132", getFieldName(), getMemberMetaData().getClassName(false));
    }

    // "element-type"
    if (element.type == null) {
      throw new InvalidMetaDataException(
          LOCALISER, "044133", getFieldName(), getMemberMetaData().getClassName(false));
    }

    // Check that the element type exists
    Class elementTypeClass = null;
    try {
      elementTypeClass = clr.classForName(element.type, primary);
    } catch (ClassNotResolvedException cnre) {
      throw new InvalidMetaDataException(
          LOCALISER,
          "044134",
          getFieldName(),
          getMemberMetaData().getClassName(false),
          element.type);
    }

    if (!elementTypeClass.getName().equals(element.type)) {
      // The element-type has been resolved from what was specified in the MetaData - update to the
      // fully-qualified name
      NucleusLogger.METADATA.info(
          LOCALISER.msg(
              "044135",
              getFieldName(),
              getMemberMetaData().getClassName(false),
              element.type,
              elementTypeClass.getName()));
      element.type = elementTypeClass.getName();
    }

    // "embedded-element"
    ApiAdapter api = mmgr.getApiAdapter();
    if (element.embedded == null) {
      // Assign default for "embedded-element" based on 18.13.1 of JDO 2 spec
      // Note : this fails when using in the enhancer since not yet PC
      if (mmgr.getNucleusContext().getTypeManager().isDefaultEmbeddedType(elementTypeClass)) {
        element.embedded = Boolean.TRUE;
      } else if (api.isPersistable(elementTypeClass)
          || Object.class.isAssignableFrom(elementTypeClass)
          || elementTypeClass.isInterface()) {
        element.embedded = Boolean.FALSE;
      } else {
        element.embedded = Boolean.TRUE;
      }
    }
    if (Boolean.FALSE.equals(element.embedded)) {
      // If the user has set a non-PC/non-Interface as not embedded, correct it since not supported.
      // Note : this fails when using in the enhancer since not yet PC
      if (!api.isPersistable(elementTypeClass)
          && !elementTypeClass.isInterface()
          && elementTypeClass != java.lang.Object.class) {
        element.embedded = Boolean.TRUE;
      }
    }

    ElementMetaData elemmd = ((AbstractMemberMetaData) parent).getElementMetaData();
    if (elemmd != null && elemmd.getEmbeddedMetaData() != null) {
      element.embedded = Boolean.TRUE;
    }

    if (Boolean.TRUE.equals(element.dependent)) {
      // If the user has set a non-PC/non-reference as dependent, correct it since not valid.
      // Note : this fails when using in the enhancer since not yet PC
      if (!api.isPersistable(elementTypeClass)
          && !elementTypeClass.isInterface()
          && elementTypeClass != java.lang.Object.class) {
        element.dependent = Boolean.FALSE;
      }
    }

    // Keep a reference to the MetaData for the element
    element.classMetaData = mmgr.getMetaDataForClassInternal(elementTypeClass, clr);

    // Make sure anything in the superclass is populated too
    super.populate(clr, primary, mmgr);

    setPopulated();
  }