public String sqlTemporaryTableCreateString(Dialect dialect, Mapping mapping) throws HibernateException { StringBuffer buffer = new StringBuffer(dialect.getCreateTemporaryTableString()) .append(' ') .append(name) .append(" ("); Iterator itr = getColumnIterator(); while (itr.hasNext()) { final Column column = (Column) itr.next(); buffer.append(column.getQuotedName(dialect)).append(' '); buffer.append(column.getSqlType(dialect, mapping)); if (column.isNullable()) { buffer.append(dialect.getNullColumnString()); } else { buffer.append(" not null"); } if (itr.hasNext()) { buffer.append(", "); } } buffer.append(") "); buffer.append(dialect.getCreateTemporaryTablePostfix()); return buffer.toString(); }
/** * Determine if the given name is quoted. It is considered quoted if either: * * <ol> * <li>starts AND ends with backticks (`) * <li>starts with dialect-specified {@link org.hibernate.dialect.Dialect#openQuote() * open-quote} AND ends with dialect-specified {@link * org.hibernate.dialect.Dialect#closeQuote() close-quote} * </ol> * * @param name The name to check * @param dialect The dialect (to determine the "real" quoting chars). * @return True if quoted, false otherwise */ public static boolean isQuoted(String name, Dialect dialect) { return name != null && name.length() != 0 && ((name.charAt(0) == '`' && name.charAt(name.length() - 1) == '`') || (name.charAt(0) == '"' && name.charAt(name.length() - 1) == '"') || (name.charAt(0) == dialect.openQuote() && name.charAt(name.length() - 1) == dialect.closeQuote())); }
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) { StringBuffer buf = new StringBuffer("drop table "); if (dialect.supportsIfExistsBeforeTableName()) buf.append("if exists "); buf.append(getQualifiedName(dialect, defaultCatalog, defaultSchema)) .append(dialect.getCascadeConstraintsString()); if (dialect.supportsIfExistsAfterTableName()) buf.append(" if exists"); return buf.toString(); }
/* 49: */ /* 50: */ private Date getCurrentTimestamp(SessionImplementor session) /* 51: */ { /* 52: 81 */ Dialect dialect = session.getFactory().getDialect(); /* 53: 82 */ String timestampSelectString = dialect.getCurrentTimestampSelectString(); /* 54: 83 */ if (dialect.isCurrentTimestampSelectStringCallable()) { /* 55: 83 */ return useCallableStatement(timestampSelectString, session); /* 56: */ } /* 57: 84 */ return usePreparedStatement(timestampSelectString, session); /* 58: */ }
public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException { Dialect dialect = criteriaQuery.getFactory().getDialect(); String[] columns = criteriaQuery.getColumnsUsingProjection(criteria, propertyName); if (columns.length != 1) { throw new HibernateException("Like may only be used with single-column properties"); } String lhs = ignoreCase ? dialect.getLowercaseFunction() + '(' + columns[0] + ')' : columns[0]; return lhs + " like ?" + (escapeChar == null ? "" : " escape \'" + escapeChar + "\'"); }
public JbpmSchemaUpdate(Configuration cfg, Properties connectionProperties) throws HibernateException { this.configuration = cfg; dialect = Dialect.getDialect(connectionProperties); Properties props = new Properties(); props.putAll(dialect.getDefaultProperties()); props.putAll(connectionProperties); connectionProvider = ConnectionProviderFactory.newConnectionProvider(props); exceptions = new ArrayList(); }
public void configure(Type type, Properties params, Dialect dialect) throws MappingException { tableName = PropertiesHelper.getString(ID_TABLE, params, DEFAULT_TABLE); pkColumnName = PropertiesHelper.getString(PK_COLUMN_NAME, params, DEFAULT_PK_COLUMN); valueColumnName = PropertiesHelper.getString(VALUE_COLUMN_NAME, params, DEFAULT_VALUE_COLUMN); String schemaName = params.getProperty(SCHEMA); String catalogName = params.getProperty(CATALOG); keySize = PropertiesHelper.getInt(PK_LENGTH_NAME, params, DEFAULT_PK_LENGTH); String keyValue = PropertiesHelper.getString(PK_VALUE_NAME, params, params.getProperty(TABLE)); if (tableName.indexOf('.') < 0) { tableName = Table.qualify(catalogName, schemaName, tableName); } query = "select " + valueColumnName + " from " + dialect.appendLockHint(LockMode.UPGRADE, tableName) + " where " + pkColumnName + " = '" + keyValue + "'" + dialect.getForUpdateString(); update = "update " + tableName + " set " + valueColumnName + " = ? where " + valueColumnName + " = ? and " + pkColumnName + " = '" + keyValue + "'"; insert = "insert into " + tableName + "(" + pkColumnName + ", " + valueColumnName + ") " + "values('" + keyValue + "', ?)"; // hilo config maxLo = PropertiesHelper.getInt(MAX_LO, params, Short.MAX_VALUE); lo = maxLo + 1; // so we "clock over" on the first invocation returnClass = type.getReturnedClass(); }
public String[] sqlDropStrings(Dialect dialect) throws HibernateException { StringBuffer sqlDropString = new StringBuffer("drop table "); if (dialect.supportsIfExistsBeforeTableName()) { sqlDropString.append("if exists "); } sqlDropString.append(tableName).append(dialect.getCascadeConstraintsString()); if (dialect.supportsIfExistsAfterTableName()) { sqlDropString.append(" if exists"); } return new String[] {sqlDropString.toString()}; }
public String getSqlType(Dialect dialect, Mapping mapping) throws HibernateException { if (sqlType == null) { sqlType = dialect.getTypeName(getSqlTypeCode(mapping), getLength(), getPrecision(), getScale()); } return sqlType; }
public MigrationBuilder(Configuration config) { this.config = config; this.dialect = Dialect.getDialect(config.getProperties()); this.dialectExtension = DialectExtensionSelector.select(this.dialect); this.elements = new ArrayList<MigrationElement>(); this.statements = new ArrayList<String>(); }
private static Dialect determineDialect() { try { return Dialect.getDialect(); } catch (Exception e) { return new Dialect() {}; } }
public Iterator sqlCommentStrings(Dialect dialect, String defaultCatalog, String defaultSchema) { List comments = new ArrayList(); if (dialect.supportsCommentOn()) { String tableName = getQualifiedName(dialect, defaultCatalog, defaultSchema); if (comment != null) { comments.add("comment on table " + tableName + " is '" + comment + "'"); } Iterator iter = getColumnIterator(); while (iter.hasNext()) { Column column = (Column) iter.next(); String columnComment = column.getComment(); if (columnComment != null) { comments.add( "comment on column " + tableName + '.' + column.getQuotedName(dialect) + " is '" + columnComment + "'"); } } } return comments.iterator(); }
private static void updateCommand(String[] args) { String unitName, filename = null, url, username, password; if (args.length < 5) System.out.println("Expected unitName jdbcUrl jdbcUsername jdbcPassword [filename]"); else { unitName = args[1]; url = args[2]; username = args[3]; password = args[4]; if (args.length > 5) filename = args[5]; Configuration configuration = HibernateDDL.getConfiguration(unitName); configuration.buildMappings(); AuditConfiguration.getFor(configuration); Dialect dialect = Dialect.getDialect(configuration.getProperties()); Connection conn = null; DatabaseMetadata meta = null; try { conn = DriverManager.getConnection(url, username, password); meta = new DatabaseMetadata(conn, dialect, configuration, true); List<SchemaUpdateScript> updateScriptList = configuration.generateSchemaUpdateScriptList(dialect, meta); String[] updateSQL = SchemaUpdateScript.toStringArray(updateScriptList); HibernateDDL.stringToStream(updateSQL, filename); } catch (SQLException e) { e.printStackTrace(); } } }
public Iterator sqlCommentStrings(Dialect dialect, String defaultCatalog, String defaultSchema) { List comments = new ArrayList(); if (dialect.supportsCommentOn()) { String tableName = getQualifiedName(dialect, defaultCatalog, defaultSchema); if (comment != null) { StringBuffer buf = new StringBuffer() .append("comment on table ") .append(tableName) .append(" is '") .append(comment) .append("'"); comments.add(buf.toString()); } Iterator iter = getColumnIterator(); while (iter.hasNext()) { Column column = (Column) iter.next(); String columnComment = column.getComment(); if (columnComment != null) { StringBuffer buf = new StringBuffer() .append("comment on column ") .append(tableName) .append('.') .append(column.getQuotedName(dialect)) .append(" is '") .append(columnComment) .append("'"); comments.add(buf.toString()); } } } return comments.iterator(); }
private void buildSessionFactory(String[] files) throws Exception { if (getSessions() != null) getSessions().close(); try { setCfg(new Configuration()); cfg.addProperties(getExtraProperties()); if (recreateSchema()) { cfg.setProperty(Environment.HBM2DDL_AUTO, "create-drop"); } for (int i = 0; i < files.length; i++) { if (!files[i].startsWith("net/")) files[i] = getBaseForMappings() + files[i]; getCfg().addResource(files[i], TestCase.class.getClassLoader()); } setDialect(Dialect.getDialect()); configure(cfg); if (getCacheConcurrencyStrategy() != null) { Iterator iter = cfg.getClassMappings(); while (iter.hasNext()) { PersistentClass clazz = (PersistentClass) iter.next(); Iterator props = clazz.getPropertyClosureIterator(); boolean hasLob = false; while (props.hasNext()) { Property prop = (Property) props.next(); if (prop.getValue().isSimpleValue()) { String type = ((SimpleValue) prop.getValue()).getTypeName(); if ("blob".equals(type) || "clob".equals(type)) hasLob = true; if (Blob.class.getName().equals(type) || Clob.class.getName().equals(type)) hasLob = true; } } if (!hasLob && !clazz.isInherited() && overrideCacheStrategy()) { cfg.setCacheConcurrencyStrategy(clazz.getEntityName(), getCacheConcurrencyStrategy()); } } iter = cfg.getCollectionMappings(); while (iter.hasNext()) { Collection coll = (Collection) iter.next(); cfg.setCollectionCacheConcurrencyStrategy(coll.getRole(), getCacheConcurrencyStrategy()); } } setSessions(getCfg().buildSessionFactory(/*new TestInterceptor()*/ )); afterSessionFactoryBuilt(); } catch (Exception e) { e.printStackTrace(); throw e; } }
public String[] sqlCreateStrings(Dialect dialect) throws HibernateException { return new String[] { new StringBuffer(dialect.getCreateTableString()) .append(' ') .append(tableName) .append(" ( ") .append(pkColumnName) .append(' ') .append(dialect.getTypeName(Types.VARCHAR, keySize, 0, 0)) .append(", ") .append(valueColumnName) .append(' ') .append(dialect.getTypeName(Types.INTEGER)) .append(" ) ") .toString() }; }
private static boolean isFunctionOrKeyword( String lcToken, String nextToken, Dialect dialect, SQLFunctionRegistry functionRegistry) { return "(".equals(nextToken) || KEYWORDS.contains(lcToken) || isFunction(lcToken, nextToken, functionRegistry) || dialect.getKeywords().contains(lcToken) || FUNCTION_KEYWORDS.contains(lcToken); }
public static void processDynamicFilterParameters( final String sqlFragment, final ParameterContainer container, final HqlSqlWalker walker) { if (walker.getEnabledFilters().isEmpty() && (!hasDynamicFilterParam(sqlFragment)) && (!(hasCollectionFilterParam(sqlFragment)))) { return; } Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect(); String symbols = new StringBuffer() .append(ParserHelper.HQL_SEPARATORS) .append(dialect.openQuote()) .append(dialect.closeQuote()) .toString(); StringTokenizer tokens = new StringTokenizer(sqlFragment, symbols, true); StringBuffer result = new StringBuffer(); while (tokens.hasMoreTokens()) { final String token = tokens.nextToken(); if (token.startsWith(ParserHelper.HQL_VARIABLE_PREFIX)) { final String filterParameterName = token.substring(1); final String[] parts = LoadQueryInfluencers.parseFilterParameterName(filterParameterName); final FilterImpl filter = (FilterImpl) walker.getEnabledFilters().get(parts[0]); final Object value = filter.getParameter(parts[1]); final Type type = filter.getFilterDefinition().getParameterType(parts[1]); final String typeBindFragment = StringHelper.join( ",", ArrayHelper.fillArray( "?", type.getColumnSpan(walker.getSessionFactoryHelper().getFactory()))); final String bindFragment = (value != null && Collection.class.isInstance(value)) ? StringHelper.join( ",", ArrayHelper.fillArray(typeBindFragment, ((Collection) value).size())) : typeBindFragment; result.append(bindFragment); container.addEmbeddedParameter( new DynamicFilterParameterSpecification(parts[0], parts[1], type)); } else { result.append(token); } } container.setText(result.toString()); }
/** * 获取分页查询sql <功能详细描述> [参数说明] * * @return void [返回类型说明] * @exception throws [异常类型] [异常说明] * @see [类、类#方法、类#成员] */ public String queryPagedSql(Object obj, int pageIndex, int pageSize) { String querySql = querySql(obj); int offset = pageSize * (pageIndex - 1); int limit = pageSize * pageIndex; final boolean isSupportsVariableLimit = dialect.supportsVariableLimit(); // 是否支持物理分页 final boolean isSupportsLimit = dialect.supportsLimit(); // 是否支持limit final boolean isSupportsLimitOffset = dialect.supportsLimitOffset(); // 是否支持offset // 如果不支持物理分页,直接返回sql if (!isSupportsVariableLimit || (!isSupportsLimit && !isSupportsLimitOffset)) { return querySql; } else { // 如果支持 String limitSql = dialect.getLimitString(querySql, offset, limit); return limitSql; } }
private static BasicTypeRegistry handleTypes(MetadataBuildingOptions options) { final ClassLoaderService classLoaderService = options.getServiceRegistry().getService(ClassLoaderService.class); // ultimately this needs to change a little bit to account for HHH-7792 final BasicTypeRegistry basicTypeRegistry = new BasicTypeRegistry(); final TypeContributions typeContributions = new TypeContributions() { public void contributeType(BasicType type) { basicTypeRegistry.register(type); } public void contributeType(BasicType type, String... keys) { basicTypeRegistry.register(type, keys); } public void contributeType(UserType type, String... keys) { basicTypeRegistry.register(type, keys); } public void contributeType(CompositeUserType type, String... keys) { basicTypeRegistry.register(type, keys); } }; // add Dialect contributed types final Dialect dialect = options.getServiceRegistry().getService(JdbcServices.class).getDialect(); dialect.contributeTypes(typeContributions, options.getServiceRegistry()); // add TypeContributor contributed types. for (TypeContributor contributor : classLoaderService.loadJavaServices(TypeContributor.class)) { contributor.contribute(typeContributions, options.getServiceRegistry()); } // add explicit application registered types for (BasicTypeRegistration basicTypeRegistration : options.getBasicTypeRegistrations()) { basicTypeRegistry.register( basicTypeRegistration.getBasicType(), basicTypeRegistration.getRegistrationKeys()); } return basicTypeRegistry; }
public void visitGenerators(Configuration cfg, IssueCollector collector) { Iterator iter = iterateGenerators(cfg); Set sequences = Collections.EMPTY_SET; if (dialect.supportsSequences()) { sequences = reader.readSequences(dialect.getQuerySequencesString()); } // TODO: move this check into something that could check per class or collection instead. while (iter.hasNext()) { PersistentIdentifierGenerator generator = (PersistentIdentifierGenerator) iter.next(); Object key = generator.generatorKey(); if (!isSequence(key, sequences) && !isTable(key)) { collector.reportIssue( new Issue( "MISSING_ID_GENERATOR", Issue.HIGH_PRIORITY, "Missing sequence or table: " + key)); } } }
public Delegate(PostInsertIdentityPersister persister, Dialect dialect, String sequenceName) { super(persister); this.dialect = dialect; this.sequenceNextValFragment = dialect.getSelectSequenceNextValString(sequenceName); this.keyColumns = getPersister().getRootTableKeyColumnNames(); if (keyColumns.length > 1) { throw new HibernateException( "sequence-identity generator cannot be used with with multi-column keys"); } }
/** * For any column name, generate an alias that is unique to that column name, and also 10 * characters or less in length. */ public String getAlias(Dialect dialect) { String alias = name; String unique = Integer.toString(uniqueInteger) + '_'; int lastLetter = StringHelper.lastIndexOfLetter(name); if (lastLetter == -1) { alias = "column"; } else if (lastLetter < name.length() - 1) { alias = name.substring(0, lastLetter + 1); } if (alias.length() > dialect.getMaxAliasLength()) { alias = alias.substring(0, dialect.getMaxAliasLength() - unique.length()); } boolean useRawName = name.equals(alias) && !quoted && !name.toLowerCase().equals("rowid"); if (useRawName) { return alias; } else { return alias + unique; } }
@Override public void configure( Type type, Properties params, Dialect dialect, ClassLoaderService classLoaderService) throws MappingException { this.identifierType = type; boolean forceTableUse = ConfigurationHelper.getBoolean(FORCE_TBL_PARAM, params, false); final ObjectName qualifiedSequenceName = determineSequenceName(params, dialect); final String sequenceNameText = qualifiedSequenceName.toText(dialect); final int initialValue = determineInitialValue(params); int incrementSize = determineIncrementSize(params); final String optimizationStrategy = determineOptimizationStrategy(params, incrementSize); incrementSize = determineAdjustedIncrementSize(optimizationStrategy, incrementSize); if (dialect.supportsSequences() && !forceTableUse) { if (!dialect.supportsPooledSequences() && OptimizerFactory.isPooledOptimizer(optimizationStrategy)) { forceTableUse = true; LOG.forcingTableUse(); } } this.databaseStructure = buildDatabaseStructure( type, params, dialect, forceTableUse, qualifiedSequenceName, initialValue, incrementSize); this.optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize, ConfigurationHelper.getInt(INITIAL_PARAM, params, -1), classLoaderService); this.databaseStructure.prepare(optimizer); }
@Override public void configure(Type type, Properties params, ServiceRegistry serviceRegistry) throws MappingException { final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService(JdbcEnvironment.class); final Dialect dialect = jdbcEnvironment.getDialect(); this.identifierType = type; boolean forceTableUse = ConfigurationHelper.getBoolean(FORCE_TBL_PARAM, params, false); final QualifiedName sequenceName = determineSequenceName(params, dialect, jdbcEnvironment); final int initialValue = determineInitialValue(params); int incrementSize = determineIncrementSize(params); final String optimizationStrategy = determineOptimizationStrategy(params, incrementSize); incrementSize = determineAdjustedIncrementSize(optimizationStrategy, incrementSize); if (dialect.supportsSequences() && !forceTableUse) { if (!dialect.supportsPooledSequences() && OptimizerFactory.isPooledOptimizer(optimizationStrategy)) { forceTableUse = true; LOG.forcingTableUse(); } } this.databaseStructure = buildDatabaseStructure( type, params, jdbcEnvironment, forceTableUse, sequenceName, initialValue, incrementSize); this.optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize, ConfigurationHelper.getInt(INITIAL_PARAM, params, -1)); this.databaseStructure.prepare(optimizer); }
/** * Create a schema exporter for the given Configuration, with the given database connection * properties. * * @param configuration The configuration from which to build a schema export. * @param properties The properties from which to configure connectivity etc. * @throws HibernateException Indicates problem preparing for schema export. * @deprecated properties may be specified via the Configuration object */ @Deprecated public SchemaExport(Configuration configuration, Properties properties) throws HibernateException { final Dialect dialect = Dialect.getDialect(properties); Properties props = new Properties(); props.putAll(dialect.getDefaultProperties()); props.putAll(properties); this.connectionHelper = new ManagedProviderConnectionHelper(props); this.sqlStatementLogger = new SqlStatementLogger(false, true); this.formatter = FormatStyle.DDL.getFormatter(); this.sqlExceptionHelper = new SqlExceptionHelper(); this.importFiles = ConfigurationHelper.getString( AvailableSettings.HBM2DDL_IMPORT_FILES, properties, DEFAULT_IMPORT_FILE); this.dropSQL = configuration.generateDropSchemaScript(dialect); this.createSQL = configuration.generateSchemaCreationScript(dialect); }
public IdentitySchema(Configuration configuration) { this.configuration = configuration; this.properties = configuration.getProperties(); this.dialect = Dialect.getDialect(properties); try { // get the mapping field via reflection :-( Field mappingField = Configuration.class.getDeclaredField("mapping"); mappingField.setAccessible(true); this.mapping = (Mapping) mappingField.get(configuration); } catch (Exception e) { throw new RuntimeException("couldn't get the hibernate mapping", e); } }
@Override protected String applyLocks( String sql, QueryParameters parameters, Dialect dialect, List<AfterLoadAction> afterLoadActions) throws QueryException { // can't cache this stuff either (per-invocation) final LockOptions lockOptions = parameters.getLockOptions(); final String result; if (lockOptions == null || (lockOptions.getLockMode() == LockMode.NONE && lockOptions.getAliasLockCount() == 0)) { return sql; } else { LockOptions locks = new LockOptions(); locks.setLockMode(lockOptions.getLockMode()); locks.setTimeOut(lockOptions.getTimeOut()); locks.setScope(lockOptions.getScope()); Iterator iter = lockOptions.getAliasLockIterator(); while (iter.hasNext()) { Map.Entry me = (Map.Entry) iter.next(); locks.setAliasSpecificLockMode( getAliasName((String) me.getKey()), (LockMode) me.getValue()); } Map keyColumnNames = null; if (dialect.forUpdateOfColumns()) { keyColumnNames = new HashMap(); for (int i = 0; i < names.length; i++) { keyColumnNames.put(names[i], persisters[i].getIdentifierColumnNames()); } } result = dialect.applyLocksToSql(sql, locks, keyColumnNames); } logQuery(queryString, result); return result; }
protected String applyLocks(String sql, Map lockModes, Dialect dialect) throws QueryException { // can't cache this stuff either (per-invocation) final String result; if (lockModes == null || lockModes.size() == 0) { result = sql; } else { Map aliasedLockModes = new HashMap(); Iterator iter = lockModes.entrySet().iterator(); while (iter.hasNext()) { Map.Entry me = (Map.Entry) iter.next(); aliasedLockModes.put(getAliasName((String) me.getKey()), me.getValue()); } Map keyColumnNames = null; if (dialect.forUpdateOfColumns()) { keyColumnNames = new HashMap(); for (int i = 0; i < names.length; i++) { keyColumnNames.put(names[i], persisters[i].getIdentifierColumnNames()); } } result = dialect.applyLocksToSql(sql, aliasedLockModes, keyColumnNames); } logQuery(queryString, result); return result; }
public static String expandBatchIdPlaceholder( String sql, Serializable[] ids, String alias, String[] keyColumnNames, Dialect dialect) { if (keyColumnNames.length == 1) { // non-composite return StringHelper.replace(sql, BATCH_ID_PLACEHOLDER, repeat("?", ids.length, ",")); } else { // composite if (dialect.supportsRowValueConstructorSyntaxInInList()) { final String tuple = "(" + StringHelper.repeat("?", keyColumnNames.length, ",") + ")"; return StringHelper.replace(sql, BATCH_ID_PLACEHOLDER, repeat(tuple, ids.length, ",")); } else { final String keyCheck = "(" + joinWithQualifierAndSuffix(keyColumnNames, alias, " = ?", " and ") + ")"; return replace(sql, BATCH_ID_PLACEHOLDER, repeat(keyCheck, ids.length, " or ")); } } }