/** * Generates the SQL statements required to run the change. * * @param database databasethe target {@link liquibase.database.Database} associated to this * change's statements * @return an array of {@link String}s with the statements */ public SqlStatement[] generateStatements(Database database) { final InsertStatement insertDefinition = new InsertStatement(database.getDefaultSchemaName(), "krim_attr_defn_t"); final SqlStatement getId = new RuntimeStatement() { public Sql[] generate(Database database) { return new Sql[] { new UnparsedSql("insert into krim_attr_defn_id_s values(null)"), new UnparsedSql("select max(id) from krim_attr_defn_id_s") }; } }; try { final BigInteger id = (BigInteger) ExecutorService.getInstance() .getExecutor(database) .queryForObject(getId, BigInteger.class); insertDefinition.addColumnValue("KIM_ATTR_DEFN_ID", id); insertDefinition.addColumnValue("nmspc_cd", getNamespace()); insertDefinition.addColumnValue("NM", getName()); insertDefinition.addColumnValue("LBL", getLabel()); insertDefinition.addColumnValue("actv_ind", getActive()); insertDefinition.addColumnValue("CMPNT_NM", getComponent()); insertDefinition.addColumnValue("ver_nbr", 1); insertDefinition.addColumnValue("obj_id", "sys_guid()"); } catch (Exception e) { throw new RuntimeException(e); } return new SqlStatement[] {insertDefinition}; }
@Test public void accepts() throws DatabaseException { ArrayList<RanChangeSet> ranChanges = new ArrayList<RanChangeSet>(); ranChanges.add( new RanChangeSet( "path/changelog", "1", "testAuthor", CheckSum.parse("12345"), new Date(), null, null)); ranChanges.add( new RanChangeSet( "path/changelog", "2", "testAuthor", CheckSum.parse("12345"), new Date(), null, null)); Database database = createMock(Database.class); expect(database.getRanChangeSetList()).andReturn(ranChanges); expect(database.getDatabaseChangeLogTableName()).andReturn("DATABASECHANGELOG").anyTimes(); expect(database.getDefaultSchemaName()).andReturn(null).anyTimes(); Executor template = createMock(Executor.class); expect(template.update(isA(UpdateStatement.class))).andReturn(1).anyTimes(); // template.comment("Lock Database"); // expectLastCall(); replay(database); replay(template); ExecutorService.getInstance().setExecutor(database, template); ShouldRunChangeSetFilter filter = new ShouldRunChangeSetFilter(database); // everything same assertFalse( filter.accepts( new ChangeSet("1", "testAuthor", false, false, "path/changelog", null, null))); // alwaysRun assertTrue( filter.accepts( new ChangeSet("1", "testAuthor", true, false, "path/changelog", null, null))); // run on change assertTrue( filter.accepts( new ChangeSet("1", "testAuthor", false, true, "path/changelog", null, null))); // different id assertTrue( filter.accepts( new ChangeSet("3", "testAuthor", false, false, "path/changelog", null, null))); // different author assertTrue( filter.accepts( new ChangeSet("1", "otherAuthor", false, false, "path/changelog", null, null))); // different path assertTrue( filter.accepts( new ChangeSet("1", "testAuthor", false, false, "other/changelog", null, null))); }
public SqlStatement[] generateStatements(Database database) throws UnsupportedChangeException { List<SqlStatement> statements = new ArrayList<SqlStatement>(); String schemaName = getSchemaName() == null ? database.getDefaultSchemaName() : getSchemaName(); statements.add(new RenameTableStatement(schemaName, getOldTableName(), getNewTableName())); if (database instanceof DB2Database) { statements.add(new ReorganizeTableStatement(schemaName, getNewTableName())); } return statements.toArray(new SqlStatement[statements.size()]); }
@Override public SqlStatement[] generateStatements(Database database) { String schemaName = getSchemaName() == null ? database.getDefaultSchemaName() : getSchemaName(); EnableConstraintStatement statement = new EnableConstraintStatement(getTableName(), schemaName, getConstraintName()); statement.setTablespace(getTablespace()); return new SqlStatement[] {statement}; }
public SqlStatement[] generateStatements(Database database) { boolean constraints = false; if (isCascadeConstraints() != null) { constraints = isCascadeConstraints(); } return new SqlStatement[] { new DropTableStatement( getSchemaName() == null ? database.getDefaultSchemaName() : getSchemaName(), getTableName(), constraints) }; }
public SqlStatement[] generateStatements(Database database) { List<SqlStatement> statements = new ArrayList<SqlStatement>(); String schemaName = getSchemaName() == null ? database.getDefaultSchemaName() : getSchemaName(); if (defaultNullValue != null) { String defaultValue = defaultNullValue; statements.add( new UpdateStatement(schemaName, getTableName()) .addNewColumnValue(getColumnName(), defaultValue) .setWhereClause(getColumnName() + " IS NULL")); } statements.add( new AddNotNullConstraintStatement( schemaName, getTableName(), getColumnName(), getConstraintName())); return statements.toArray(new SqlStatement[statements.size()]); }
public void check(Database database, DatabaseChangeLog changeLog, ChangeSet changeSet) throws PreconditionFailedException, PreconditionErrorException { String currentSchemaName; try { currentSchemaName = getSchemaName() == null ? (database == null ? null : database.getDefaultSchemaName()) : getSchemaName(); if (!DatabaseSnapshotGeneratorFactory.getInstance() .getGenerator(database) .hasView(currentSchemaName, getViewName(), database)) { throw new PreconditionFailedException( "View " + database.escapeTableName(currentSchemaName, getViewName()) + " does not exist", changeLog, this); } } catch (PreconditionFailedException e) { throw e; } catch (Exception e) { throw new PreconditionErrorException(e, changeLog, this); } }
@Override public Sql[] generateSql( CreateTempTableStatementVertica statement, Database database, SqlGeneratorChain sqlGeneratorChain) { StringBuilder sql = new StringBuilder(); sql.append("CREATE "); if (statement.getIsGlobal() != null) sql.append(" GLOBAL "); else sql.append(" LOCAL "); sql.append(" TEMPORARY TABLE "); if (statement.getSchemaName() != null) sql.append(statement.getSchemaName()).append("."); else sql.append(database.getDefaultSchemaName()).append("."); if (statement.getTableName() != null) { sql.append(statement.getTableName()).append(" "); } boolean isSinglePrimaryKeyColumn = statement.getPrimaryKeyConstraint() != null && statement.getPrimaryKeyConstraint().getColumns().size() == 1; boolean isPrimaryKeyAutoIncrement = false; sql.append("( "); Iterator<String> columnIterator = statement.getColumns().iterator(); List<String> primaryKeyColumns = new LinkedList<String>(); while (columnIterator.hasNext()) { String column = columnIterator.next(); DatabaseDataType columnType = statement.getColumnTypes().get(column).toDatabaseDataType(database); sql.append( database.escapeColumnName( statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), column)); // This is the difference between vertica & other RDBMS - the encoding part. AutoIncrementConstraint autoIncrementConstraint = null; for (AutoIncrementConstraint currentAutoIncrementConstraint : statement.getAutoIncrementConstraints()) { if (column.equals(currentAutoIncrementConstraint.getColumnName())) { autoIncrementConstraint = currentAutoIncrementConstraint; break; } } boolean isAutoIncrementColumn = autoIncrementConstraint != null; boolean isPrimaryKeyColumn = statement.getPrimaryKeyConstraint() != null && statement.getPrimaryKeyConstraint().getColumns().contains(column); isPrimaryKeyAutoIncrement = isPrimaryKeyAutoIncrement || isPrimaryKeyColumn && isAutoIncrementColumn; if (isPrimaryKeyColumn) { primaryKeyColumns.add(column); } if (!isAutoIncrementColumn) { sql.append(" ").append(columnType); } // for the serial data type in postgres, there should be no default value if (!columnType.isAutoIncrement() && statement.getDefaultValue(column) != null) { Object defaultValue = statement.getDefaultValue(column); sql.append(" DEFAULT "); sql.append(statement.getColumnTypes().get(column).objectToSql(defaultValue, database)); } // TODO: Change this - vertica supports both auto incremental & identity fields. if (isAutoIncrementColumn) { // TODO: check if database supports auto increment on non primary key column if (database.supportsAutoIncrement()) { String autoIncrementClause = database.getAutoIncrementClause( autoIncrementConstraint.getStartWith(), autoIncrementConstraint.getIncrementBy()); if (!"".equals(autoIncrementClause)) { sql.append(" ").append(autoIncrementClause); } if (database instanceof PostgresDatabase && autoIncrementConstraint.getStartWith() != null) { String sequenceName = statement.getTableName() + "_" + column + "_seq"; // additionalSql.add(new UnparsedSql("alter sequence // "+database.escapeSequenceName(statement.getCatalogName(), statement.getSchemaName(), // sequenceName)+" start with "+autoIncrementConstraint.getStartWith(), new // Sequence().setName(sequenceName).setSchema(statement.getCatalogName(), // statement.getSchemaName()))); } } else { LogFactory.getLogger() .warning( database.getShortName() + " does not support autoincrement columns as request for " + (database.escapeTableName( statement.getCatalogName(), statement.getSchemaName(), statement.getTableName()))); } } if (isPrimaryKeyColumn) { String pkName = StringUtils.trimToNull(statement.getPrimaryKeyConstraint().getConstraintName()); if (pkName != null) { sql.append(" CONSTRAINT "); sql.append(database.escapeConstraintName(pkName)); } sql.append(" PRIMARY KEY "); } if (statement.getNotNullColumns().contains(column)) { sql.append(" NOT NULL"); } if (statement.getColumnEncoding(column) != null) { sql.append(" ENCODING ").append(statement.getColumnEncoding(column)); } if (statement.getColumnAccessrank(column) != null) sql.append(" ACCESSRANK ").append(statement.getColumnAccessrank(column)); if (columnIterator.hasNext()) { sql.append(", "); } } sql.append(" )"); sql.append(" ON COMMIT "); if (statement.getIsPreserve()) sql.append(" PRESERVE "); else sql.append(" DELETE"); sql.append(" ROWS "); if (statement.getOrderby() != null) sql.append(" ORDER BY ").append(statement.getOrderby()); if (statement.getSegmentation() != null) { Segmentation seg = statement.getSegmentation(); if (seg.getUnsegmented() == true) { sql.append(" UNSEGMENTED "); } else { sql.append(" SEGMENTED BY "); sql.append(seg.getExpression()); } if (seg.getAllNodes()) { sql.append(" ALL NODES "); } else { sql.append(" NODES ").append(seg.getNodes()); if (seg.getOffset() != null) sql.append(" OFFSET ").append(seg.getOffset().toString()); } } if (statement.getKsafe() != null) sql.append(" KSAFE ").append(statement.getKsafe()); if (statement.getPartitionby() != null) sql.append(" PARTITION BY ").append(statement.getPartitionby()); System.out.println(sql.toString()); return new Sql[] {new UnparsedSql(sql.toString())}; }