/**
   * Determine the name of the sequence (or table if this resolves to a physical table) to use.
   *
   * <p>Called during {@link #configure configuration}.
   *
   * @param params The params supplied in the generator config (plus some standard useful extras).
   * @param dialect The dialect in effect
   * @param jdbcEnv The JdbcEnvironment
   * @return The sequence name
   */
  @SuppressWarnings("UnusedParameters")
  protected QualifiedName determineSequenceName(
      Properties params, Dialect dialect, JdbcEnvironment jdbcEnv) {
    final String sequencePerEntitySuffix =
        ConfigurationHelper.getString(
            CONFIG_SEQUENCE_PER_ENTITY_SUFFIX, params, DEF_SEQUENCE_SUFFIX);
    // JPA_ENTITY_NAME value honors <class ... entity-name="..."> (HBM) and @Entity#name (JPA)
    // overrides.
    final String defaultSequenceName =
        ConfigurationHelper.getBoolean(CONFIG_PREFER_SEQUENCE_PER_ENTITY, params, false)
            ? params.getProperty(JPA_ENTITY_NAME) + sequencePerEntitySuffix
            : DEF_SEQUENCE_NAME;

    final String sequenceName =
        ConfigurationHelper.getString(SEQUENCE_PARAM, params, defaultSequenceName);
    if (sequenceName.contains(".")) {
      return QualifiedNameParser.INSTANCE.parse(sequenceName);
    } else {
      // todo : need to incorporate implicit catalog and schema names
      final Identifier catalog =
          jdbcEnv
              .getIdentifierHelper()
              .toIdentifier(ConfigurationHelper.getString(CATALOG, params));
      final Identifier schema =
          jdbcEnv.getIdentifierHelper().toIdentifier(ConfigurationHelper.getString(SCHEMA, params));
      return new QualifiedNameParser.NameParts(
          catalog, schema, jdbcEnv.getIdentifierHelper().toIdentifier(sequenceName));
    }
  }
 public static String getConfigFilePath(Properties props) {
   String configResourcePath =
       ConfigurationHelper.getString(CacheEnvironment.CONFIG_FILE_PATH_LEGACY, props, null);
   if (StringHelper.isEmpty(configResourcePath)) {
     configResourcePath =
         ConfigurationHelper.getString(CacheEnvironment.CONFIG_FILE_PATH, props, null);
   }
   return configResourcePath;
 }
 /**
  * Determine the optimizer to use.
  *
  * <p>Called during {@link #configure configuration}.
  *
  * @param params The params supplied in the generator config (plus some standard useful extras).
  * @param incrementSize The {@link #determineIncrementSize determined increment size}
  * @return The optimizer strategy (name)
  */
 protected String determineOptimizationStrategy(Properties params, int incrementSize) {
   // if the increment size is greater than one, we prefer pooled optimization; but we first
   // need to see if the user prefers POOL or POOL_LO...
   final String defaultPooledOptimizerStrategy =
       ConfigurationHelper.getBoolean(Environment.PREFER_POOLED_VALUES_LO, params, false)
           ? StandardOptimizerDescriptor.POOLED_LO.getExternalName()
           : StandardOptimizerDescriptor.POOLED.getExternalName();
   final String defaultOptimizerStrategy =
       incrementSize <= 1
           ? StandardOptimizerDescriptor.NONE.getExternalName()
           : defaultPooledOptimizerStrategy;
   return ConfigurationHelper.getString(OPT_PARAM, params, defaultOptimizerStrategy);
 }
  /**
   * Pool configuration.
   *
   * @param props
   * @throws HibernateException
   */
  public void configure(Properties props) throws HibernateException {
    try {
      this.config = new BoneCPConfig(props);

      // old hibernate config
      String url = props.getProperty(CONFIG_CONNECTION_URL);
      String username = props.getProperty(CONFIG_CONNECTION_USERNAME);
      String password = props.getProperty(CONFIG_CONNECTION_PASSWORD);
      String driver = props.getProperty(CONFIG_CONNECTION_DRIVER_CLASS);
      if (url == null) {
        url = props.getProperty(CONFIG_CONNECTION_URL_ALTERNATE);
      }
      if (username == null) {
        username = props.getProperty(CONFIG_CONNECTION_USERNAME_ALTERNATE);
      }
      if (password == null) {
        password = props.getProperty(CONFIG_CONNECTION_PASSWORD_ALTERNATE);
      }
      if (driver == null) {
        driver = props.getProperty(CONFIG_CONNECTION_DRIVER_CLASS_ALTERNATE);
      }

      if (url != null) {
        this.config.setJdbcUrl(url);
      }
      if (username != null) {
        this.config.setUsername(username);
      }
      if (password != null) {
        this.config.setPassword(password);
      }

      // Remember Isolation level
      this.isolation = ConfigurationHelper.getInteger(AvailableSettings.ISOLATION, props);
      this.autocommit = ConfigurationHelper.getBoolean(AvailableSettings.AUTOCOMMIT, props);

      logger.debug(this.config.toString());

      if (driver != null && !driver.trim().equals("")) {
        loadClass(driver);
      }
      if (this.config.getConnectionHookClassName() != null) {
        Object hookClass = loadClass(this.config.getConnectionHookClassName()).newInstance();
        this.config.setConnectionHook((ConnectionHook) hookClass);
      }
      // create the connection pool
      this.pool = createPool(this.config);
    } catch (Exception e) {
      throw new HibernateException(e);
    }
  }
 // todo remove this once we move to new metamodel
 public static RegionFactory createRegionFactory(Properties properties, boolean cachingEnabled) {
   // todo : REMOVE!  THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which
   // will be going away
   String regionFactoryClassName =
       RegionFactoryInitiator.mapLegacyNames(
           ConfigurationHelper.getString(
               AvailableSettings.CACHE_REGION_FACTORY, properties, null));
   if (regionFactoryClassName == null) {
     regionFactoryClassName = DEF_CACHE_REG_FACTORY;
   }
   LOG.debugf("Cache region factory : %s", regionFactoryClassName);
   try {
     try {
       return (RegionFactory)
           org.hibernate.internal.util.ReflectHelper.classForName(regionFactoryClassName)
               .getConstructor(Properties.class)
               .newInstance(properties);
     } catch (NoSuchMethodException e) {
       // no constructor accepting Properties found, try no arg constructor
       LOG.debugf(
           "%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
           regionFactoryClassName);
       return (RegionFactory)
           org.hibernate.internal.util.ReflectHelper.classForName(regionFactoryClassName)
               .newInstance();
     }
   } catch (Exception e) {
     throw new HibernateException(
         "could not instantiate RegionFactory [" + regionFactoryClassName + "]", e);
   }
 }
 public static ServiceRegistryImpl buildServiceRegistry(Map serviceRegistryConfig) {
   Properties properties = new Properties();
   properties.putAll(serviceRegistryConfig);
   Environment.verifyProperties(properties);
   ConfigurationHelper.resolvePlaceHolders(properties);
   return new ServiceRegistryImpl(properties);
 }
  /**
   * Constructor. Initializes the ComboPooledDataSource based on the config.properties.
   *
   * @throws PropertyVetoException
   */
  public DatabaseMultiTenantConnectionProvider() throws PropertyVetoException {
    logger.info("Initializing Connection Pool!");

    if (tenants == null) tenants = new ArrayList<String>();
    tenants.add(CurrentTenantResolver.DEFAULT_TENANT_ID);
    Resource resource = new ClassPathResource("/application.properties");
    try {
      props = PropertiesLoaderUtils.loadProperties(resource);
      String t = ConfigurationHelper.getString("tenants", props);

      if (t != null) {
        for (String tenant : t.split(",")) {
          tenants.add(tenant);
        }
      }

    } catch (IOException e) {
      logger.error(e.getCause() + ":" + e.getMessage());
    }

    for (String tenant : tenants) {
      connProviderMap.put(tenant, initPoolConnection(tenant));
    }

    logger.info("Connection Pool initialised!");
  }
Beispiel #8
0
  static {
    try {
      Map<Object, Object> properties = getEntityManagerProperties();

      Environment.verifyProperties(properties);
      ConfigurationHelper.resolvePlaceHolders(properties);
      ServiceRegistry reg =
          new ServiceRegistryBuilder().applySettings(properties).buildServiceRegistry();

      DbOpenHelper openHelper =
          new DbOpenHelper(reg) {

            @Override
            public void onCreate() throws Exception {
              executeAllScript(0);
            }

            @Override
            public void onUpgrade(int oldVersion, int newVersion) throws Exception {
              // TODO Auto-generated method stub
            }
          };
      openHelper.open();

      emfInstance =
          Persistence.createEntityManagerFactory(
              "transactions-optional", getEntityManagerProperties());

    } catch (Throwable t) {
      log.error("Error creating EMF", t);
    }
  }
  private Configuration configureTransactionManager(
      Configuration regionOverrides, String templateCacheName, Properties properties) {
    // Get existing configuration to verify whether a tm was configured or not.
    Configuration templateConfig =
        manager.defineConfiguration(templateCacheName, new Configuration());
    String ispnTmLookupClassName = templateConfig.getTransactionManagerLookupClass();
    String hbTmLookupClassName =
        org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup.class.getName();
    if (ispnTmLookupClassName != null && !ispnTmLookupClassName.equals(hbTmLookupClassName)) {
      log.debug(
          "Infinispan is configured ["
              + ispnTmLookupClassName
              + "] with a different transaction manager lookup "
              + "class than Hibernate ["
              + hbTmLookupClassName
              + "]");
    } else {
      regionOverrides.setTransactionManagerLookup(transactionManagerlookup);
    }

    String useSyncProp =
        ConfigurationHelper.extractPropertyValue(INFINISPAN_USE_SYNCHRONIZATION_PROP, properties);
    boolean useSync =
        useSyncProp == null ? DEF_USE_SYNCHRONIZATION : Boolean.parseBoolean(useSyncProp);
    regionOverrides.fluent().transaction().useSynchronization(useSync);

    return regionOverrides;
  }
 private static RegionFactory createRegionFactory(
     Properties properties, boolean cachingEnabled, ServiceRegistry serviceRegistry) {
   String regionFactoryClassName =
       RegionFactoryInitiator.mapLegacyNames(
           ConfigurationHelper.getString(
               AvailableSettings.CACHE_REGION_FACTORY, properties, null));
   if (regionFactoryClassName == null || !cachingEnabled) {
     regionFactoryClassName = DEF_CACHE_REG_FACTORY;
   }
   LOG.debugf("Cache region factory : %s", regionFactoryClassName);
   try {
     try {
       return (RegionFactory)
           serviceRegistry
               .getService(ClassLoaderService.class)
               .classForName(regionFactoryClassName)
               .getConstructor(Properties.class)
               .newInstance(properties);
     } catch (NoSuchMethodException e) {
       // no constructor accepting Properties found, try no arg constructor
       LOG.debugf(
           "%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
           regionFactoryClassName);
       return (RegionFactory)
           serviceRegistry
               .getService(ClassLoaderService.class)
               .classForName(regionFactoryClassName)
               .newInstance();
     }
   } catch (Exception e) {
     throw new HibernateException(
         "could not instantiate RegionFactory [" + regionFactoryClassName + "]", e);
   }
 }
  @Configuration
  public Option[] config() throws Exception {
    final Properties paxExamEnvironment = loadPaxExamEnvironmentProperties();

    final boolean debug =
        ConfigurationHelper.getBoolean(
            "org.hibernate.testing.osgi.paxExam.debug", Environment.getProperties(), DEBUG);

    return options(
        when(debug).useOptions(debugConfiguration("5005", true)),
        karafDistributionConfiguration()
            .frameworkUrl(
                paxExamEnvironment.getProperty("org.ops4j.pax.exam.container.karaf.distroUrl"))
            .karafVersion(
                paxExamEnvironment.getProperty("org.ops4j.pax.exam.container.karaf.version"))
            .name("Apache Karaf")
            .unpackDirectory(
                new File(
                    paxExamEnvironment.getProperty("org.ops4j.pax.exam.container.karaf.unpackDir")))
            .useDeployFolder(false),
        editConfigurationFileExtend(
            "etc/org.ops4j.pax.url.mvn.cfg",
            "org.ops4j.pax.url.mvn.repositories",
            "https://repository.jboss.org/nexus/content/groups/public/"),
        configureConsole().ignoreLocalConsole().ignoreRemoteShell(),
        when(debug).useOptions(keepRuntimeFolder()),
        logLevel(LogLevelOption.LogLevel.INFO),
        features(featureXmlUrl(paxExamEnvironment), "hibernate-orm"),
        features(testingFeatureXmlUrl(), "hibernate-osgi-testing"));
  }
 private TypeOverrides overrideStatisticsIfPresent(TypeOverrides override, Properties properties) {
   String globalStats =
       ConfigurationHelper.extractPropertyValue(INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
   if (globalStats != null) {
     override.setExposeStatistics(Boolean.parseBoolean(globalStats));
   }
   return override;
 }
 protected JpaMetaModelPopulationSetting determineJpaMetaModelPopulationSetting(
     Configuration cfg) {
   String setting =
       ConfigurationHelper.getString(
           AvailableSettings.JPA_METAMODEL_POPULATION, cfg.getProperties(), null);
   if (setting == null) {
     setting =
         ConfigurationHelper.getString(
             AvailableSettings.JPA_METAMODEL_GENERATION, cfg.getProperties(), null);
     if (setting != null) {
       log.infof(
           "Encountered deprecated setting [%s], use [%s] instead",
           AvailableSettings.JPA_METAMODEL_GENERATION, AvailableSettings.JPA_METAMODEL_POPULATION);
     }
   }
   return JpaMetaModelPopulationSetting.parse(setting);
 }
Beispiel #14
0
  @Override
  public void setParameterValues(Properties parameters) {
    final ParameterType reader = (ParameterType) parameters.get(PARAMETER_TYPE);

    // IMPL NOTE : be protective about not setting enumValueMapper (i.e. calling
    // treatAsNamed/treatAsOrdinal)
    // in cases where we do not have enough information.  In such cases we do additional checks
    // as part of nullSafeGet/nullSafeSet to query against the JDBC metadata to make the
    // determination.

    if (reader != null) {
      enumClass = reader.getReturnedClass().asSubclass(Enum.class);

      final boolean isOrdinal;
      final javax.persistence.EnumType enumType = getEnumType(reader);
      if (enumType == null) {
        isOrdinal = true;
      } else if (javax.persistence.EnumType.ORDINAL.equals(enumType)) {
        isOrdinal = true;
      } else if (javax.persistence.EnumType.STRING.equals(enumType)) {
        isOrdinal = false;
      } else {
        throw new AssertionFailure("Unknown EnumType: " + enumType);
      }

      if (isOrdinal) {
        treatAsOrdinal();
      } else {
        treatAsNamed();
      }
      sqlType = enumValueMapper.getSqlType();
    } else {
      String enumClassName = (String) parameters.get(ENUM);
      try {
        enumClass =
            ReflectHelper.classForName(enumClassName, this.getClass()).asSubclass(Enum.class);
      } catch (ClassNotFoundException exception) {
        throw new HibernateException("Enum class not found", exception);
      }

      final Object useNamedSetting = parameters.get(NAMED);
      if (useNamedSetting != null) {
        final boolean useNamed = ConfigurationHelper.getBoolean(NAMED, parameters);
        if (useNamed) {
          treatAsNamed();
        } else {
          treatAsOrdinal();
        }
        sqlType = enumValueMapper.getSqlType();
      }
    }

    final String type = (String) parameters.get(TYPE);
    if (type != null) {
      sqlType = Integer.decode(type);
    }
  }
 protected EmbeddedCacheManager createCacheManager(Properties properties) throws CacheException {
   try {
     String configLoc =
         ConfigurationHelper.getString(
             INFINISPAN_CONFIG_RESOURCE_PROP, properties, DEF_INFINISPAN_CONFIG_RESOURCE);
     EmbeddedCacheManager manager = new DefaultCacheManager(configLoc, false);
     String globalStats =
         ConfigurationHelper.extractPropertyValue(INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
     if (globalStats != null) {
       manager
           .getGlobalConfiguration()
           .setExposeGlobalJmxStatistics(Boolean.parseBoolean(globalStats));
     }
     manager.start();
     return manager;
   } catch (IOException e) {
     throw new CacheException("Unable to create default cache manager", e);
   }
 }
  @Override
  public void configure(Type type, Properties params, ServiceRegistry serviceRegistry)
      throws MappingException {
    final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService(JdbcEnvironment.class);
    final Dialect dialect = jdbcEnvironment.getDialect();

    this.identifierType = type;
    boolean forceTableUse = ConfigurationHelper.getBoolean(FORCE_TBL_PARAM, params, false);

    final QualifiedName sequenceName = determineSequenceName(params, dialect, jdbcEnvironment);

    final int initialValue = determineInitialValue(params);
    int incrementSize = determineIncrementSize(params);

    final String optimizationStrategy = determineOptimizationStrategy(params, incrementSize);
    incrementSize = determineAdjustedIncrementSize(optimizationStrategy, incrementSize);

    if (dialect.supportsSequences() && !forceTableUse) {
      if (!dialect.supportsPooledSequences()
          && OptimizerFactory.isPooledOptimizer(optimizationStrategy)) {
        forceTableUse = true;
        LOG.forcingTableUse();
      }
    }

    this.databaseStructure =
        buildDatabaseStructure(
            type,
            params,
            jdbcEnvironment,
            forceTableUse,
            sequenceName,
            initialValue,
            incrementSize);
    this.optimizer =
        OptimizerFactory.buildOptimizer(
            optimizationStrategy,
            identifierType.getReturnedClass(),
            incrementSize,
            ConfigurationHelper.getInt(INITIAL_PARAM, params, -1));
    this.databaseStructure.prepare(optimizer);
  }
  @Override
  public void configure(
      Type type, Properties params, Dialect dialect, ClassLoaderService classLoaderService)
      throws MappingException {
    this.identifierType = type;
    boolean forceTableUse = ConfigurationHelper.getBoolean(FORCE_TBL_PARAM, params, false);

    final ObjectName qualifiedSequenceName = determineSequenceName(params, dialect);
    final String sequenceNameText = qualifiedSequenceName.toText(dialect);

    final int initialValue = determineInitialValue(params);
    int incrementSize = determineIncrementSize(params);

    final String optimizationStrategy = determineOptimizationStrategy(params, incrementSize);
    incrementSize = determineAdjustedIncrementSize(optimizationStrategy, incrementSize);

    if (dialect.supportsSequences() && !forceTableUse) {
      if (!dialect.supportsPooledSequences()
          && OptimizerFactory.isPooledOptimizer(optimizationStrategy)) {
        forceTableUse = true;
        LOG.forcingTableUse();
      }
    }

    this.databaseStructure =
        buildDatabaseStructure(
            type,
            params,
            dialect,
            forceTableUse,
            qualifiedSequenceName,
            initialValue,
            incrementSize);
    this.optimizer =
        OptimizerFactory.buildOptimizer(
            optimizationStrategy,
            identifierType.getReturnedClass(),
            incrementSize,
            ConfigurationHelper.getInt(INITIAL_PARAM, params, -1),
            classLoaderService);
    this.databaseStructure.prepare(optimizer);
  }
Beispiel #18
0
 public static int getLockTimeoutInMillis(Properties props) {
   int timeout = -1;
   try {
     timeout = ConfigurationHelper.getInt(LOCK_TIMEOUT, props, -1);
   } catch (Exception ignored) {
   }
   if (timeout < 0) {
     timeout = MAXIMUM_LOCK_TIMEOUT;
   }
   return timeout;
 }
  protected StandardServiceRegistryImpl buildServiceRegistry(Configuration configuration) {
    Properties properties = new Properties();
    properties.putAll(configuration.getProperties());
    Environment.verifyProperties(properties);
    ConfigurationHelper.resolvePlaceHolders(properties);

    final BootstrapServiceRegistry bootstrapServiceRegistry = generateBootstrapRegistry(properties);
    ServiceRegistryBuilder registryBuilder =
        new ServiceRegistryBuilder(bootstrapServiceRegistry).applySettings(properties);
    prepareBasicRegistryBuilder(registryBuilder);
    return (StandardServiceRegistryImpl) registryBuilder.buildServiceRegistry();
  }
 private void dissectProperty(int prefixLoc, String key, Properties properties) {
   TypeOverrides cfgOverride = null;
   int suffixLoc = -1;
   if (!key.equals(INFINISPAN_CONFIG_RESOURCE_PROP)
       && (suffixLoc = key.indexOf(CONFIG_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setCacheName(ConfigurationHelper.extractPropertyValue(key, properties));
   } else if ((suffixLoc = key.indexOf(STRATEGY_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setEvictionStrategy(ConfigurationHelper.extractPropertyValue(key, properties));
   } else if ((suffixLoc = key.indexOf(WAKE_UP_INTERVAL_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setEvictionWakeUpInterval(
         Long.parseLong(ConfigurationHelper.extractPropertyValue(key, properties)));
   } else if ((suffixLoc = key.indexOf(MAX_ENTRIES_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setEvictionMaxEntries(ConfigurationHelper.getInt(key, properties, -1));
   } else if ((suffixLoc = key.indexOf(LIFESPAN_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setExpirationLifespan(
         Long.parseLong(ConfigurationHelper.extractPropertyValue(key, properties)));
   } else if ((suffixLoc = key.indexOf(MAX_IDLE_SUFFIX)) != -1) {
     cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
     cfgOverride.setExpirationMaxIdle(
         Long.parseLong(ConfigurationHelper.extractPropertyValue(key, properties)));
   }
   //      else if ((suffixLoc = key.indexOf(STATISTICS_SUFFIX)) != -1) {
   //         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
   //
   // cfgOverride.setExposeStatistics(Boolean.parseBoolean(PropertiesHelper.extractPropertyValue(key, properties)));
   //      }
 }
  public static ServiceRegistry getDefaultServiceRegistry(Configuration configuration) {
    ServiceRegistry registry = cache.get(configuration);
    if (registry == null) {
      Properties properties = new Properties();
      properties.putAll(configuration.getProperties());
      Environment.verifyProperties(properties);
      ConfigurationHelper.resolvePlaceHolders(properties);
      registry =
          new org.hibernate.service.ServiceRegistryBuilder(properties).buildServiceRegistry();
      cache.put(configuration, registry);
    }

    return registry;
  }
  /**
   * Determine the name of the sequence (or table if this resolves to a physical table) to use.
   *
   * <p>Called during {@link #configure configuration}.
   *
   * @param params The params supplied in the generator config (plus some standard useful extras).
   * @param dialect The dialect in effect
   * @return The sequence name
   */
  protected ObjectName determineSequenceName(Properties params, Dialect dialect) {
    String sequencePerEntitySuffix =
        ConfigurationHelper.getString(
            CONFIG_SEQUENCE_PER_ENTITY_SUFFIX, params, DEF_SEQUENCE_SUFFIX);
    // JPA_ENTITY_NAME value honors <class ... entity-name="..."> (HBM) and @Entity#name (JPA)
    // overrides.
    String sequenceName =
        ConfigurationHelper.getBoolean(CONFIG_PREFER_SEQUENCE_PER_ENTITY, params, false)
            ? params.getProperty(JPA_ENTITY_NAME) + sequencePerEntitySuffix
            : DEF_SEQUENCE_NAME;
    final ObjectNameNormalizer normalizer =
        (ObjectNameNormalizer) params.get(IDENTIFIER_NORMALIZER);
    sequenceName =
        normalizer.normalizeIdentifierQuoting(
            ConfigurationHelper.getString(SEQUENCE_PARAM, params, sequenceName));
    if (sequenceName.indexOf('.') < 0) {
      final String schemaName = normalizer.normalizeIdentifierQuoting(params.getProperty(SCHEMA));
      final String catalogName = normalizer.normalizeIdentifierQuoting(params.getProperty(CATALOG));

      return new ObjectName(catalogName, schemaName, sequenceName);
    } else {
      return ObjectName.parse(sequenceName);
    }
  }
Beispiel #23
0
  /**
   * Create a schema exporter for the given Configuration, using the supplied connection for
   * connectivity.
   *
   * @param configuration The configuration to use.
   * @param connection The JDBC connection to use.
   * @throws HibernateException Indicates problem preparing for schema export.
   */
  public SchemaExport(Configuration configuration, Connection connection)
      throws HibernateException {
    this.connectionHelper = new SuppliedConnectionHelper(connection);

    this.sqlStatementLogger = new SqlStatementLogger(false, true);
    this.formatter = FormatStyle.DDL.getFormatter();
    this.sqlExceptionHelper = new SqlExceptionHelper();

    this.importFiles =
        ConfigurationHelper.getString(
            AvailableSettings.HBM2DDL_IMPORT_FILES,
            configuration.getProperties(),
            DEFAULT_IMPORT_FILE);

    final Dialect dialect = Dialect.getDialect(configuration.getProperties());
    this.dropSQL = configuration.generateDropSchemaScript(dialect);
    this.createSQL = configuration.generateSchemaCreationScript(dialect);
  }
 protected QueryTranslatorFactory createQueryTranslatorFactory(
     Properties properties, ServiceRegistry serviceRegistry) {
   String className =
       ConfigurationHelper.getString(
           AvailableSettings.QUERY_TRANSLATOR,
           properties,
           "org.hibernate.hql.internal.ast.ASTQueryTranslatorFactory");
   LOG.debugf("Query translator: %s", className);
   try {
     return (QueryTranslatorFactory)
         serviceRegistry
             .getService(ClassLoaderService.class)
             .classForName(className)
             .newInstance();
   } catch (Exception e) {
     throw new HibernateException("could not instantiate QueryTranslatorFactory: " + className, e);
   }
 }
 protected QueryCacheFactory createQueryCacheFactory(
     Properties properties, ServiceRegistry serviceRegistry) {
   String queryCacheFactoryClassName =
       ConfigurationHelper.getString(
           AvailableSettings.QUERY_CACHE_FACTORY,
           properties,
           StandardQueryCacheFactory.class.getName());
   LOG.debugf("Query cache factory: %s", queryCacheFactoryClassName);
   try {
     return (QueryCacheFactory)
         serviceRegistry
             .getService(ClassLoaderService.class)
             .classForName(queryCacheFactoryClassName)
             .newInstance();
   } catch (Exception e) {
     throw new HibernateException(
         "could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, e);
   }
 }
Beispiel #26
0
  public SchemaExport(ServiceRegistry serviceRegistry, Configuration configuration) {
    this.connectionHelper =
        new SuppliedConnectionProviderConnectionHelper(
            serviceRegistry.getService(ConnectionProvider.class));
    this.sqlStatementLogger =
        serviceRegistry.getService(JdbcServices.class).getSqlStatementLogger();
    this.formatter =
        (sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE).getFormatter();
    this.sqlExceptionHelper =
        serviceRegistry.getService(JdbcServices.class).getSqlExceptionHelper();

    this.importFiles =
        ConfigurationHelper.getString(
            AvailableSettings.HBM2DDL_IMPORT_FILES,
            configuration.getProperties(),
            DEFAULT_IMPORT_FILE);

    final Dialect dialect = serviceRegistry.getService(JdbcServices.class).getDialect();
    this.dropSQL = configuration.generateDropSchemaScript(dialect);
    this.createSQL = configuration.generateSchemaCreationScript(dialect);
  }
Beispiel #27
0
  /**
   * Create a schema exporter for the given Configuration, with the given database connection
   * properties.
   *
   * @param configuration The configuration from which to build a schema export.
   * @param properties The properties from which to configure connectivity etc.
   * @throws HibernateException Indicates problem preparing for schema export.
   * @deprecated properties may be specified via the Configuration object
   */
  @Deprecated
  public SchemaExport(Configuration configuration, Properties properties)
      throws HibernateException {
    final Dialect dialect = Dialect.getDialect(properties);

    Properties props = new Properties();
    props.putAll(dialect.getDefaultProperties());
    props.putAll(properties);
    this.connectionHelper = new ManagedProviderConnectionHelper(props);

    this.sqlStatementLogger = new SqlStatementLogger(false, true);
    this.formatter = FormatStyle.DDL.getFormatter();
    this.sqlExceptionHelper = new SqlExceptionHelper();

    this.importFiles =
        ConfigurationHelper.getString(
            AvailableSettings.HBM2DDL_IMPORT_FILES, properties, DEFAULT_IMPORT_FILE);

    this.dropSQL = configuration.generateDropSchemaScript(dialect);
    this.createSQL = configuration.generateSchemaCreationScript(dialect);
  }
Beispiel #28
0
  public SchemaExport(MetadataImplementor metadata) {
    ServiceRegistry serviceRegistry = metadata.getServiceRegistry();
    this.connectionHelper =
        new SuppliedConnectionProviderConnectionHelper(
            serviceRegistry.getService(ConnectionProvider.class));
    JdbcServices jdbcServices = serviceRegistry.getService(JdbcServices.class);
    this.sqlStatementLogger = jdbcServices.getSqlStatementLogger();
    this.formatter =
        (sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE).getFormatter();
    this.sqlExceptionHelper = jdbcServices.getSqlExceptionHelper();

    this.importFiles =
        ConfigurationHelper.getString(
            AvailableSettings.HBM2DDL_IMPORT_FILES,
            serviceRegistry.getService(ConfigurationService.class).getSettings(),
            DEFAULT_IMPORT_FILE);

    final Dialect dialect = jdbcServices.getDialect();
    this.dropSQL = metadata.getDatabase().generateDropSchemaScript(dialect);
    this.createSQL = metadata.getDatabase().generateSchemaCreationScript(dialect);
  }
 /**
  * Determine the optimizer to use.
  *
  * <p>Called during {@link #configure configuration}.
  *
  * @param params The params supplied in the generator config (plus some standard useful extras).
  * @param incrementSize The {@link #determineIncrementSize determined increment size}
  * @return The optimizer strategy (name)
  */
 protected String determineOptimizationStrategy(Properties params, int incrementSize) {
   return ConfigurationHelper.getString(
       OPT_PARAM, params, OptimizerFactory.determineImplicitOptimizerName(incrementSize, params));
 }
 /**
  * Determine the increment size to be applied. The exact implications of this value depends on the
  * {@link #getOptimizer() optimizer} being used.
  *
  * <p>Called during {@link #configure configuration}.
  *
  * @param params The params supplied in the generator config (plus some standard useful extras).
  * @return The increment size
  */
 protected int determineIncrementSize(Properties params) {
   return ConfigurationHelper.getInt(INCREMENT_PARAM, params, DEFAULT_INCREMENT_SIZE);
 }