/** @since 3.0 */
  @Override
  protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
    // handle CAY-588 - get connection that is separate from the connection in the
    // current transaction.

    // TODO (andrus, 7/6/2006) Note that this will still work in a pool with a single
    // connection, as PK generator is invoked early in the transaction, before the
    // connection is grabbed for commit... So maybe promote this to other adapters in
    // 3.0?

    Transaction transaction = Transaction.getThreadTransaction();
    Transaction.bindThreadTransaction(null);

    try {

      Connection connection = node.getDataSource().getConnection();
      try {
        CallableStatement statement = connection.prepareCall("{call auto_pk_for_table(?, ?)}");
        try {
          statement.setString(1, entity.getName());
          statement.setInt(2, super.getPkCacheSize());

          // can't use "executeQuery"
          // per http://jtds.sourceforge.net/faq.html#expectingResultSet
          statement.execute();
          if (statement.getMoreResults()) {
            ResultSet rs = statement.getResultSet();

            try {
              if (rs.next()) {
                return rs.getLong(1);
              } else {
                throw new CayenneRuntimeException(
                    "Error generating pk for DbEntity " + entity.getName());
              }
            } finally {
              rs.close();
            }
          } else {
            throw new CayenneRuntimeException(
                "Error generating pk for DbEntity "
                    + entity.getName()
                    + ", no result set from stored procedure.");
          }
        } finally {
          statement.close();
        }
      } finally {
        connection.close();
      }
    } finally {
      Transaction.bindThreadTransaction(transaction);
    }
  }
  /**
   * Generates a unique and non-repeating primary key for specified dbEntity.
   *
   * <p>This implementation is naive since it does not lock the database rows when executing select
   * and subsequent update. Adapter-specific implementations are more robust.
   *
   * @since 3.0
   */
  public Object generatePk(DataNode node, DbAttribute pk) throws Exception {

    DbEntity entity = (DbEntity) pk.getEntity();

    switch (pk.getType()) {
      case Types.BINARY:
      case Types.VARBINARY:
        return IDUtil.pseudoUniqueSecureByteSequence(pk.getMaxLength());
    }

    DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
    long cacheSize;
    if (pkGenerator != null && pkGenerator.getKeyCacheSize() != null)
      cacheSize = pkGenerator.getKeyCacheSize().intValue();
    else cacheSize = pkCacheSize;

    long value;

    // if no caching, always generate fresh
    if (cacheSize <= 1) {
      value = longPkFromDatabase(node, entity);
    } else {
      synchronized (pkCache) {
        LongPkRange r = pkCache.get(entity.getName());

        if (r == null) {
          // created exhausted LongPkRange
          r = new LongPkRange(1l, 0l);
          pkCache.put(entity.getName(), r);
        }

        if (r.isExhausted()) {
          long val = longPkFromDatabase(node, entity);
          r.reset(val, val + cacheSize - 1);
        }

        value = r.getNextPrimaryKey();
      }
    }

    if (pk.getType() == Types.BIGINT) {
      return Long.valueOf(value);
    } else {
      // leaving it up to the user to ensure that PK does not exceed max int...
      return Integer.valueOf((int) value);
    }
  }
  /**
   * Performs primary key generation ignoring cache. Generates a range of primary keys as specified
   * by "pkCacheSize" bean property.
   *
   * <p>This method is called internally from "generatePkForDbEntity" and then generated range of
   * key values is saved in cache for performance. Subclasses that implement different primary key
   * generation solutions should override this method, not "generatePkForDbEntity".
   *
   * @since 3.0
   */
  protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
    String select =
        "SELECT #result('NEXT_ID' 'long' 'NEXT_ID') "
            + "FROM AUTO_PK_SUPPORT "
            + "WHERE TABLE_NAME = '"
            + entity.getName()
            + '\'';

    // run queries via DataNode to utilize its transactional behavior
    List<Query> queries = new ArrayList<Query>(2);
    queries.add(new SQLTemplate(entity, select));
    queries.add(new SQLTemplate(entity, pkUpdateString(entity.getName())));

    PkRetrieveProcessor observer = new PkRetrieveProcessor(entity.getName());
    node.performQueries(queries, observer);
    return observer.getId();
  }
  public List<String> createAutoPkStatements(List<DbEntity> dbEntities) {
    List<String> list = new ArrayList<String>(dbEntities.size() + 2);

    list.add(pkTableCreateString());
    list.add(pkDeleteString(dbEntities));

    for (DbEntity ent : dbEntities) {
      list.add(pkCreateString(ent.getName()));
    }

    return list;
  }
 protected String pkDeleteString(List<DbEntity> dbEntities) {
   StringBuilder buf = new StringBuilder();
   buf.append("DELETE FROM AUTO_PK_SUPPORT WHERE TABLE_NAME IN (");
   int len = dbEntities.size();
   for (int i = 0; i < len; i++) {
     if (i > 0) {
       buf.append(", ");
     }
     DbEntity ent = dbEntities.get(i);
     buf.append('\'').append(ent.getName()).append('\'');
   }
   buf.append(')');
   return buf.toString();
 }
  public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
    // check if a table exists

    // create AUTO_PK_SUPPORT table
    if (!autoPkTableExists(node)) {
      runUpdate(node, pkTableCreateString());
    }

    // delete any existing pk entries
    runUpdate(node, pkDeleteString(dbEntities));

    // insert all needed entries
    for (DbEntity ent : dbEntities) {
      runUpdate(node, pkCreateString(ent.getName()));
    }
  }
 @Override
 public boolean supportsFKConstraints(DbEntity entity) {
   // MySQL supports that, but there are problems deleting objects from such
   // tables...
   return !NO_CONSTRAINTS_TABLES.contains(entity.getName());
 }
  public void testForeignKey() throws Exception {
    dropTableIfPresent("NEW_TABLE");
    dropTableIfPresent("NEW_TABLE2");

    assertTokensAndExecute(0, 0);

    DbEntity dbEntity1 = new DbEntity("NEW_TABLE");

    DbAttribute e1col1 = new DbAttribute("ID", Types.INTEGER, dbEntity1);
    e1col1.setMandatory(true);
    e1col1.setPrimaryKey(true);
    dbEntity1.addAttribute(e1col1);

    DbAttribute e1col2 = new DbAttribute("NAME", Types.VARCHAR, dbEntity1);
    e1col2.setMaxLength(10);
    e1col2.setMandatory(false);
    dbEntity1.addAttribute(e1col2);

    map.addDbEntity(dbEntity1);

    DbEntity dbEntity2 = new DbEntity("NEW_TABLE2");
    DbAttribute e2col1 = new DbAttribute("ID", Types.INTEGER, dbEntity2);
    e2col1.setMandatory(true);
    e2col1.setPrimaryKey(true);
    dbEntity2.addAttribute(e2col1);
    DbAttribute e2col2 = new DbAttribute("FK", Types.INTEGER, dbEntity2);
    dbEntity2.addAttribute(e2col2);
    DbAttribute e2col3 = new DbAttribute("NAME", Types.VARCHAR, dbEntity2);
    e2col3.setMaxLength(10);
    dbEntity2.addAttribute(e2col3);

    map.addDbEntity(dbEntity2);

    // create db relationships
    DbRelationship rel1To2 = new DbRelationship("rel1To2");
    rel1To2.setSourceEntity(dbEntity1);
    rel1To2.setTargetEntity(dbEntity2);
    rel1To2.setToMany(true);
    rel1To2.addJoin(new DbJoin(rel1To2, e1col1.getName(), e2col2.getName()));
    dbEntity1.addRelationship(rel1To2);
    DbRelationship rel2To1 = new DbRelationship("rel2To1");
    rel2To1.setSourceEntity(dbEntity2);
    rel2To1.setTargetEntity(dbEntity1);
    rel2To1.setToMany(false);
    rel2To1.addJoin(new DbJoin(rel2To1, e2col2.getName(), e1col1.getName()));
    dbEntity2.addRelationship(rel2To1);
    assertSame(rel1To2, rel2To1.getReverseRelationship());
    assertSame(rel2To1, rel1To2.getReverseRelationship());

    assertTokensAndExecute(4, 0);
    assertTokensAndExecute(0, 0);

    // create ObjEntities
    ObjEntity objEntity1 = new ObjEntity("NewTable");
    objEntity1.setDbEntity(dbEntity1);
    ObjAttribute oatr1 = new ObjAttribute("name");
    oatr1.setDbAttributePath(e1col2.getName());
    oatr1.setType("java.lang.String");
    objEntity1.addAttribute(oatr1);
    map.addObjEntity(objEntity1);
    ObjEntity objEntity2 = new ObjEntity("NewTable2");
    objEntity2.setDbEntity(dbEntity2);
    ObjAttribute o2a1 = new ObjAttribute("name");
    o2a1.setDbAttributePath(e2col3.getName());
    o2a1.setType("java.lang.String");
    objEntity2.addAttribute(o2a1);
    map.addObjEntity(objEntity2);

    // create ObjRelationships
    assertEquals(0, objEntity1.getRelationships().size());
    assertEquals(0, objEntity2.getRelationships().size());
    ObjRelationship objRel1To2 = new ObjRelationship("objRel1To2");
    objRel1To2.addDbRelationship(rel1To2);
    objRel1To2.setSourceEntity(objEntity1);
    objRel1To2.setTargetEntity(objEntity2);
    objEntity1.addRelationship(objRel1To2);
    ObjRelationship objRel2To1 = new ObjRelationship("objRel2To1");
    objRel2To1.addDbRelationship(rel2To1);
    objRel2To1.setSourceEntity(objEntity2);
    objRel2To1.setTargetEntity(objEntity1);
    objEntity2.addRelationship(objRel2To1);
    assertEquals(1, objEntity1.getRelationships().size());
    assertEquals(1, objEntity2.getRelationships().size());
    assertSame(objRel1To2, objRel2To1.getReverseRelationship());
    assertSame(objRel2To1, objRel1To2.getReverseRelationship());

    // remove relationship and fk from model, merge to db and read to model
    dbEntity2.removeRelationship(rel2To1.getName());
    dbEntity1.removeRelationship(rel1To2.getName());
    dbEntity2.removeAttribute(e2col2.getName());
    List<MergerToken> tokens = createMergeTokens();
    assertTokens(tokens, 2, 1);
    for (MergerToken token : tokens) {
      if (token.getDirection().isToDb()) {
        execute(token);
      }
    }
    assertTokensAndExecute(0, 0);
    dbEntity2.addRelationship(rel2To1);
    dbEntity1.addRelationship(rel1To2);
    dbEntity2.addAttribute(e2col2);

    // try do use the merger to remove the relationship in the model
    tokens = createMergeTokens();
    assertTokens(tokens, 2, 0);
    // TODO: reversing the following two tokens should also reverse the order
    MergerToken token0 = tokens.get(0).createReverse(mergerFactory());
    MergerToken token1 = tokens.get(1).createReverse(mergerFactory());
    if (!(token0 instanceof DropRelationshipToModel && token1 instanceof DropColumnToModel
        || token1 instanceof DropRelationshipToModel && token0 instanceof DropColumnToModel)) {
      fail();
    }
    execute(token0);
    execute(token1);

    // check after merging
    assertNull(dbEntity2.getAttribute(e2col2.getName()));
    assertEquals(0, dbEntity1.getRelationships().size());
    assertEquals(0, dbEntity2.getRelationships().size());
    assertEquals(0, objEntity1.getRelationships().size());
    assertEquals(0, objEntity2.getRelationships().size());

    // clear up
    dbEntity1.removeRelationship(rel1To2.getName());
    dbEntity2.removeRelationship(rel2To1.getName());
    map.removeObjEntity(objEntity1.getName(), true);
    map.removeDbEntity(dbEntity1.getName(), true);
    map.removeObjEntity(objEntity2.getName(), true);
    map.removeDbEntity(dbEntity2.getName(), true);
    resolver.refreshMappingCache();
    assertNull(map.getObjEntity(objEntity1.getName()));
    assertNull(map.getDbEntity(dbEntity1.getName()));
    assertNull(map.getObjEntity(objEntity2.getName()));
    assertNull(map.getDbEntity(dbEntity2.getName()));
    assertFalse(map.getDbEntities().contains(dbEntity1));
    assertFalse(map.getDbEntities().contains(dbEntity2));

    assertTokensAndExecute(2, 0);
    assertTokensAndExecute(0, 0);
  }