Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/maven/org.apache.maven.plugins-…
Browse files Browse the repository at this point in the history
…maven-shade-plugin-3.4.0
  • Loading branch information
nvoxland committed Sep 21, 2022
2 parents aaafaea + 8e1a3fb commit 3f1f125
Show file tree
Hide file tree
Showing 34 changed files with 521 additions and 47 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/build.yml
Expand Up @@ -20,6 +20,8 @@ on:

jobs:
check_build_safety:
permissions:
contents: none
name: Check if Build should be done
runs-on: ubuntu-latest
steps:
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/create-release.yml
Expand Up @@ -96,6 +96,8 @@ jobs:
(cd download/repo/liquibase-pro && git push -f origin v${{ needs.setup.outputs.version }})
build-installers:
permissions:
contents: write # for softprops/action-gh-release to create GitHub release
needs: [ setup, reversion ]
name: Build Installers
runs-on: macos-latest #needs macos for apple notarization
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release-published.yml
Expand Up @@ -42,7 +42,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download release assets
uses: robinraju/release-downloader@v1.4
uses: robinraju/release-downloader@v1.5
with:
repository: "liquibase/liquibase"
tag: "${{ needs.setup.outputs.tag }}"
Expand Down Expand Up @@ -175,7 +175,7 @@ jobs:
contents: read
steps:
- name: Download release javadocs
uses: robinraju/release-downloader@v1.4
uses: robinraju/release-downloader@v1.5
with:
repository: "liquibase/liquibase"
tag: "${{ needs.setup.outputs.tag }}"
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/snyk.yml
Expand Up @@ -8,6 +8,9 @@ on:
- cron: "5 6 * * *"
workflow_dispatch:

permissions:
contents: read

jobs:
security-scan:
# This workflow only runs on the main liquibase repo, not in forks
Expand Down Expand Up @@ -39,7 +42,7 @@ jobs:
## This builds and installs the sub-modules so they are available. The liquibase-core:test module has to be installed manually since it wasn't coming along with the regular mvn install
- name: Install modules
run: |
mvn -B -pl '!liquibase-dist' test-compile install -DskipTests=true
mvn -B test-compile install -DskipTests=true
mvn -B org.apache.maven.plugins:maven-install-plugin:3.0.0-M1:install-file -Dfile=liquibase-core/target/liquibase-core-0-SNAPSHOT-tests.jar -Dpackaging=jar -Dclassifier=tests -DgroupId=org.liquibase -DartifactId=liquibase-core
## snyk monitor requires --all-projects because otherwise it only reports on the dependencies of one of the sub-modules. It would be nice if we could have one snyk project which included all the sub-modules in it, but that doesn't seem possible at this point
Expand Down
2 changes: 1 addition & 1 deletion LICENSE.txt
Expand Up @@ -187,7 +187,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.

Copyright [yyyy] [name of copyright owner]
Copyright 2022 Liquibase Inc.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand Down
2 changes: 1 addition & 1 deletion liquibase-core/pom.xml
Expand Up @@ -111,7 +111,7 @@
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>5.6</version>
<version>5.7.0</version>
</dependency>
</dependencies>

Expand Down
6 changes: 3 additions & 3 deletions liquibase-core/src/main/java/liquibase/Scope.java
Expand Up @@ -262,7 +262,7 @@ public boolean has(Enum key) {
}


public <T> T get(Enum key, Class<T> type) {
public synchronized <T> T get(Enum key, Class<T> type) {
return get(key.name(), type);
}

Expand All @@ -288,7 +288,7 @@ public <T> T get(String key, Class<T> type) {
* If the value is not defined, the passed defaultValue is returned.
* The value is converted to the given type if necessary using {@link liquibase.util.ObjectUtil#convert(Object, Class)}.
*/
public <T> T get(String key, T defaultValue) {
public synchronized <T> T get(String key, T defaultValue) {
Class type;
if (defaultValue == null) {
type = Object.class;
Expand All @@ -307,7 +307,7 @@ public <T> T get(String key, T defaultValue) {
* Looks up the singleton object of the given type. If the singleton has not been created yet, it will be instantiated.
* The singleton is a singleton based on the root scope and the same object will be returned for all child scopes of the root.
*/
public <T extends SingletonObject> T getSingleton(Class<T> type) {
public synchronized <T extends SingletonObject> T getSingleton(Class<T> type) {
if (getParent() != null) {
return getParent().getSingleton(type);
}
Expand Down
@@ -0,0 +1,42 @@
package liquibase;

/**
* An alternative to {@link SingletonScopeManager} which manages a separate Scope per thread.<br><br>
* Integrations that would prefer to use this scope manager can call <pre>Scope.setScopeManager(new ThreadLocalScopeManager())</pre>.
* <br><br>
* The value of Scope.getCurrentScope() at the time of the ThreadLocalScopeManger's creation will be the basis of all scopes created after setScopeManager() is changed,
* so you will generally want to setScopeManager as soon as possible.
*/
@SuppressWarnings("java:S5164")
public class ThreadLocalScopeManager extends ScopeManager {

private final Scope rootScope;
private final ThreadLocal<Scope> threadLocalScopes = new ThreadLocal<>();

public ThreadLocalScopeManager() {
this.rootScope = Scope.getCurrentScope();
}

@Override
public synchronized Scope getCurrentScope() {
Scope current = threadLocalScopes.get();

if (current == null) {
threadLocalScopes.set(rootScope);
current = rootScope;
}

return current;
}

@Override
protected synchronized void setCurrentScope(Scope scope) {
this.threadLocalScopes.set(scope);
}
@Override
protected Scope init(Scope scope) throws Exception {
return rootScope;
}


}
Expand Up @@ -214,6 +214,7 @@ protected Change[] createInverses() {
List<Change> inverses = new ArrayList<>();

DropColumnChange inverse = new DropColumnChange();
inverse.setCatalogName(getCatalogName());
inverse.setSchemaName(getSchemaName());
inverse.setTableName(getTableName());

Expand Down
Expand Up @@ -81,7 +81,7 @@ public void setMinValue(BigInteger minValue) {
this.minValue = minValue;
}

@DatabaseChangeProperty(description = "Does the sequence need to be guaranteed to be genererated inm the order of request?")
@DatabaseChangeProperty(description = "Does the sequence need to be guaranteed to be generated in the order of request?")
public Boolean isOrdered() {
return ordered;
}
Expand Down
Expand Up @@ -616,7 +616,7 @@ public ExecType execute(DatabaseChangeLog databaseChangeLog, ChangeExecListener
execType = ExecType.MARK_RAN;
skipChange = true;

log.info("Marking ChangeSet: " + toString() + " ran despite precondition failure due to onFail='MARK_RAN': " + message);
log.info("Marking ChangeSet: \"" + toString() + "\" as ran despite precondition failure due to onFail='MARK_RAN': " + message);
} else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.WARN)) {
execType = null; //already warned
} else {
Expand Down
Expand Up @@ -10,6 +10,7 @@
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.core.Schema;
import liquibase.util.StringUtil;
import org.apache.commons.lang3.StringUtils;

public class DatabaseUtils {
/**
Expand Down Expand Up @@ -48,13 +49,15 @@ public static void initializeDatabase(String defaultCatalogName, String defaultS
finalSearchPath = defaultSchemaName;
}

//If existing search path entries are not quoted, quote them. Some databases do not show them as quoted even though they need to be (like $user or case sensitive schemas)
finalSearchPath += ", " + StringUtil.join(StringUtil.splitAndTrim(searchPath, ","), ",", (StringUtil.StringUtilFormatter<String>) obj -> {
if (obj.startsWith("\"")) {
return obj;
}
return ((PostgresDatabase) database).quoteObject(obj, Schema.class);
});
if (StringUtils.isNotBlank(searchPath)) {
//If existing search path entries are not quoted, quote them. Some databases do not show them as quoted even though they need to be (like $user or case sensitive schemas)
finalSearchPath += ", " + StringUtil.join(StringUtil.splitAndTrim(searchPath, ","), ",", (StringUtil.StringUtilFormatter<String>) obj -> {
if (obj.startsWith("\"")) {
return obj;
}
return ((PostgresDatabase) database).quoteObject(obj, Schema.class);
});
}

executor.execute(new RawSqlStatement("SET SEARCH_PATH TO " + finalSearchPath));
}
Expand Down
Expand Up @@ -482,7 +482,7 @@ public void releaseSavepoint(Savepoint savepoint) throws DatabaseException {
@Override
public void rollback() throws DatabaseException {
try {
if (!con.getAutoCommit() && !con.isClosed()) {
if (!con.isClosed() && !con.getAutoCommit()) {
con.rollback();
}
} catch (SQLException e) {
Expand Down
@@ -1,17 +1,17 @@
package liquibase.datatype.core;

import liquibase.GlobalConfiguration;
import liquibase.change.core.LoadDataChange;
import liquibase.database.Database;
import liquibase.database.core.*;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
import liquibase.exception.DatabaseException;
import liquibase.statement.DatabaseFunction;

import java.util.Locale;

@DataTypeInfo(name="smallint", aliases = {"java.sql.Types.SMALLINT", "int2"}, minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
@DataTypeInfo(name="smallint", aliases = {"java.sql.Types.SMALLINT", "int2", "smallserial"}, minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class SmallIntType extends LiquibaseDataType {

private boolean autoIncrement;
Expand Down Expand Up @@ -52,6 +52,12 @@ public DatabaseDataType toDatabaseDataType(Database database) {
if (((PostgresDatabase) database).useSerialDatatypes()) {
return new DatabaseDataType("SMALLSERIAL");
}
} else {
if (GlobalConfiguration.CONVERT_DATA_TYPES.getCurrentValue() || this.getRawDefinition() == null) {
return new DatabaseDataType("SMALLINT");
} else {
return new DatabaseDataType(this.getRawDefinition());
}
}
return new DatabaseDataType("SMALLINT"); //always smallint regardless of parameters passed
}
Expand Down Expand Up @@ -79,4 +85,13 @@ public String objectToSql(Object value, Database database) {
public LoadDataChange.LOAD_DATA_TYPE getLoadTypeName() {
return LoadDataChange.LOAD_DATA_TYPE.NUMERIC;
}

@Override
public void finishInitialization(String originalDefinition) {
super.finishInitialization(originalDefinition);

if (originalDefinition.toLowerCase(Locale.US).contains("serial")) {
autoIncrement = true;
}
}
}
Expand Up @@ -24,6 +24,12 @@
@SuppressWarnings({"unchecked"})
public class ColumnMapRowMapper implements RowMapper {

private final boolean caseSensitiveDatabase;

public ColumnMapRowMapper(boolean caseSensitiveDatabase) {
this.caseSensitiveDatabase = caseSensitiveDatabase;
}

@Override
public Object mapRow(ResultSet rs, int rowNum) throws SQLException {
ResultSetMetaData rsmd = rs.getMetaData();
Expand Down Expand Up @@ -55,6 +61,9 @@ protected Map createColumnMap(int columnCount) {
* @see java.sql.ResultSetMetaData#getColumnName
*/
protected String getColumnKey(String columnName) {
if (this.caseSensitiveDatabase) {
return columnName;
}
return columnName.toUpperCase(Locale.US);
}

Expand Down
Expand Up @@ -153,7 +153,7 @@ public void execute(final SqlStatement sql, final List<SqlVisitor> sqlVisitors)
}
if (sql instanceof CompoundStatement) {
if (database instanceof Db2zDatabase) {
executeDb2ZosComplexStatement(sql);
executeDb2ZosComplexStatement(sql, sqlVisitors);
return;
}
}
Expand Down Expand Up @@ -319,7 +319,7 @@ public SqlStatement getStatement() {
* @see ColumnMapRowMapper
*/
protected RowMapper getColumnMapRowMapper() {
return new ColumnMapRowMapper();
return new ColumnMapRowMapper(database.isCaseSensitive());
}

/**
Expand All @@ -338,20 +338,27 @@ public void comment(String message) throws DatabaseException {
Scope.getCurrentScope().getLog(getClass()).fine(message);
}

private void executeDb2ZosComplexStatement(SqlStatement sqlStatement) throws DatabaseException {
private void executeDb2ZosComplexStatement(final SqlStatement sqlStatement, final List<SqlVisitor> sqlVisitors) throws DatabaseException {
DatabaseConnection con = database.getConnection();

if (con instanceof OfflineConnection) {
throw new DatabaseException("Cannot execute commands against an offline database");
}
Sql[] sqls = SqlGeneratorFactory.getInstance().generateSql(sqlStatement, database);
for (Sql sql : sqls) {
String stmtText = sql.toSql();
if (sqlVisitors != null) {
for (SqlVisitor visitor : sqlVisitors) {
stmtText = visitor.modifySql(stmtText, database);
}
}

try {
if (sql instanceof CallableSql) {
CallableStatement call = null;
ResultSet resultSet = null;
try {
call = ((JdbcConnection) con).getUnderlyingConnection().prepareCall(sql.toSql());
call = ((JdbcConnection) con).getUnderlyingConnection().prepareCall(stmtText);
resultSet = call.executeQuery();
checkCallStatus(resultSet, ((CallableSql) sql).getExpectedStatus());
} finally {
Expand All @@ -361,11 +368,11 @@ private void executeDb2ZosComplexStatement(SqlStatement sqlStatement) throws Dat
Statement stmt = null;
try {
if (sqlStatement instanceof CompoundStatement) {
stmt = ((JdbcConnection) con).getUnderlyingConnection().prepareStatement(sql.toSql());
stmt = ((JdbcConnection) con).getUnderlyingConnection().prepareStatement(stmtText);
((PreparedStatement)stmt).execute();
} else {
stmt = ((JdbcConnection) con).getUnderlyingConnection().createStatement();
stmt.execute(sql.toSql());
stmt.execute(stmtText);
}
con.commit();
} finally {
Expand Down
Expand Up @@ -1035,7 +1035,7 @@ protected List<CachedRow> extract(ResultSet resultSet, boolean informixIndexTrim
List<Map> result;

try {
result = (List<Map>) new RowMapperNotNullConstraintsResultSetExtractor(new ColumnMapRowMapper() {
result = (List<Map>) new RowMapperNotNullConstraintsResultSetExtractor(new ColumnMapRowMapper(database.isCaseSensitive()) {
@Override
protected Object getColumnValue(ResultSet rs, int index) throws SQLException {
Object value = super.getColumnValue(rs, index);
Expand Down
Expand Up @@ -314,7 +314,7 @@ protected List<CachedRow> extract(ResultSet resultSet, final boolean informixInd
List<Map> result;
List<CachedRow> returnList = new ArrayList<>();
try {
result = (List<Map>) new RowMapperResultSetExtractor(new ColumnMapRowMapper() {
result = (List<Map>) new RowMapperResultSetExtractor(new ColumnMapRowMapper(database.isCaseSensitive()) {
@Override
protected Object getColumnValue(ResultSet rs, int index) throws SQLException {
Object value = super.getColumnValue(rs, index);
Expand Down
Expand Up @@ -194,6 +194,10 @@ protected String getSelectSequenceSql(Schema schema, Database database) {
* 12cR1: http://docs.oracle.com/database/121/SQLRF/statements_6017.htm
* 11gR2: http://docs.oracle.com/cd/E11882_01/server.112/e41084/statements_6015.htm
*/
String catalogName = schema.getCatalogName();
if (catalogName == null || catalogName.isEmpty()) {
catalogName = database.getDefaultCatalogName();
}
return "SELECT sequence_name, \n" +
"CASE WHEN increment_by > 0 \n" +
" THEN CASE WHEN min_value=1 THEN NULL ELSE min_value END\n" +
Expand All @@ -208,7 +212,7 @@ protected String getSelectSequenceSql(Schema schema, Database database) {
"CASE WHEN order_flag = 'N' THEN NULL ELSE order_flag END AS is_ordered, \n" +
"LAST_NUMBER as START_VALUE, \n" +
"CASE WHEN cache_size = 20 THEN NULL ELSE cache_size END AS cache_size \n" +
"FROM ALL_SEQUENCES WHERE SEQUENCE_OWNER = '" + schema.getCatalogName() + "'";
"FROM ALL_SEQUENCES WHERE SEQUENCE_OWNER = '" + catalogName + "'";
} else if (database instanceof PostgresDatabase) {
int version = 9;
try {
Expand Down
Expand Up @@ -82,7 +82,7 @@ protected Collection<SqlGenerator> getGenerators() {
return generators;
}

public SortedSet<SqlGenerator> getGenerators(SqlStatement statement, Database database) {
public synchronized SortedSet<SqlGenerator> getGenerators(SqlStatement statement, Database database) {
String databaseName = null;
if (database == null) {
databaseName = "NULL";
Expand Down

0 comments on commit 3f1f125

Please sign in to comment.