Skip to content

Commit

Permalink
Fix to ensure metadata returned follows JDBC data type specs (#2326)
Browse files Browse the repository at this point in the history
  • Loading branch information
barryw-mssql committed Mar 5, 2024
1 parent dc191db commit eae6d7b
Show file tree
Hide file tree
Showing 4 changed files with 189 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -258,11 +258,17 @@ private void checkClosed() throws SQLServerException {
private static final String IS_AUTOINCREMENT = "IS_AUTOINCREMENT";
private static final String ACTIVITY_ID = " ActivityId: ";

private static final String NVARCHAR = JDBCType.NVARCHAR.name();
private static final String VARCHAR = JDBCType.VARCHAR.name();
private static final String INTEGER = JDBCType.INTEGER.name();
private static final String SMALLINT = JDBCType.SMALLINT.name();

private static final String SQL_KEYWORDS = createSqlKeyWords();

// Use LinkedHashMap to force retrieve elements in order they were inserted
/** getColumns columns */
private LinkedHashMap<Integer, String> getColumnsDWColumns = null;
private LinkedHashMap<Integer, String> getTypesDWColumns = null;
/** getImportedKeys columns */
private volatile LinkedHashMap<Integer, String> getImportedKeysDWColumns;
private static final Lock LOCK = new ReentrantLock();
Expand Down Expand Up @@ -630,10 +636,13 @@ public java.sql.ResultSet getColumns(String catalog, String schema, String table

+ "INSERT INTO @mssqljdbc_temp_sp_columns_result EXEC sp_columns_100 ?,?,?,?,?,?;"

+ "SELECT TABLE_QUALIFIER AS TABLE_CAT, TABLE_OWNER AS TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, DATA_TYPE,"
+ "TYPE_NAME, PRECISION AS COLUMN_SIZE, LENGTH AS BUFFER_LENGTH, SCALE AS DECIMAL_DIGITS, RADIX AS NUM_PREC_RADIX,"
+ "NULLABLE, REMARKS, COLUMN_DEF, SQL_DATA_TYPE, SQL_DATETIME_SUB, CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE,"
+ "NULL AS SCOPE_CATALOG, NULL AS SCOPE_SCHEMA, NULL AS SCOPE_TABLE, SS_DATA_TYPE AS SOURCE_DATA_TYPE,"
+ "SELECT TABLE_QUALIFIER AS TABLE_CAT, TABLE_OWNER AS TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, "
+ "CAST(DATA_TYPE AS INT) AS DATA_TYPE,TYPE_NAME, PRECISION AS COLUMN_SIZE, LENGTH AS BUFFER_LENGTH, "
+ "CAST(SCALE AS INT) AS DECIMAL_DIGITS, CAST(RADIX AS INT) AS NUM_PREC_RADIX,CAST(NULLABLE AS INT) AS NULLABLE, "
+ "CAST(REMARKS AS VARCHAR) AS REMARKS, COLUMN_DEF, CAST(SQL_DATA_TYPE AS INT) AS SQL_DATA_TYPE, "
+ "CAST(SQL_DATETIME_SUB AS INT) AS SQL_DATETIME_SUB, CHAR_OCTET_LENGTH, ORDINAL_POSITION, IS_NULLABLE,"
+ "CAST(NULL AS VARCHAR) AS SCOPE_CATALOG, CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA, CAST(NULL AS VARCHAR) AS SCOPE_TABLE, "
+ "CAST(SS_DATA_TYPE AS SMALLINT) AS SOURCE_DATA_TYPE, "
+ "CASE SS_IS_IDENTITY WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_AUTOINCREMENT,"
+ "CASE SS_IS_COMPUTED WHEN 0 THEN 'NO' WHEN 1 THEN 'YES' WHEN '' THEN '' END AS IS_GENERATEDCOLUMN, "
+ "SS_IS_SPARSE, SS_IS_COLUMN_SET, SS_UDT_CATALOG_NAME, SS_UDT_SCHEMA_NAME, SS_UDT_ASSEMBLY_TYPE_NAME,"
Expand Down Expand Up @@ -721,6 +730,53 @@ public java.sql.ResultSet getColumns(String catalog, String schema, String table
getColumnsDWColumns.put(27, SS_XML_SCHEMACOLLECTION_SCHEMA_NAME);
getColumnsDWColumns.put(28, SS_XML_SCHEMACOLLECTION_NAME);
}
if (null == getTypesDWColumns) {
getTypesDWColumns = new LinkedHashMap<>();
getTypesDWColumns.put(1, NVARCHAR); // TABLE_CAT
getTypesDWColumns.put(2, NVARCHAR); // TABLE_SCHEM
getTypesDWColumns.put(3, NVARCHAR); // TABLE_NAME
getTypesDWColumns.put(4, NVARCHAR); // COLUMN_NAME
getTypesDWColumns.put(5, INTEGER); // DATA_TYPE
getTypesDWColumns.put(6, NVARCHAR); // TYPE_NAME
getTypesDWColumns.put(7, INTEGER); // COLUMN_SIZE
getTypesDWColumns.put(8, INTEGER); // BUFFER_LENGTH
getTypesDWColumns.put(9, INTEGER); // DECIMAL_DIGITS
getTypesDWColumns.put(10, INTEGER); // NUM_PREC_RADIX
getTypesDWColumns.put(11, INTEGER); // NULLABLE
getTypesDWColumns.put(12, VARCHAR); // REMARKS
getTypesDWColumns.put(13, NVARCHAR); // COLUMN_DEF
getTypesDWColumns.put(14, INTEGER); // SQL_DATA_TYPE
getTypesDWColumns.put(15, INTEGER); // SQL_DATETIME_SUB
getTypesDWColumns.put(16, INTEGER); // CHAR_OCTET_LENGTH
getTypesDWColumns.put(17, INTEGER); // ORDINAL_POSITION
getTypesDWColumns.put(18, VARCHAR); // IS_NULLABLE
/*
* Use negative value keys to indicate that this column doesn't exist in SQL Server and should just
* be queried as 'NULL'
*/
getTypesDWColumns.put(-1, VARCHAR); // SCOPE_CATALOG
getTypesDWColumns.put(-2, VARCHAR); // SCOPE_SCHEMA
getTypesDWColumns.put(-3, VARCHAR); // SCOPE_TABLE
getTypesDWColumns.put(29, SMALLINT); // SOURCE_DATA_TYPE
getTypesDWColumns.put(22, VARCHAR); // IS_AUTOINCREMENT
getTypesDWColumns.put(21, VARCHAR); // IS_GENERATEDCOLUMN
getTypesDWColumns.put(19, SMALLINT); // SS_IS_SPARSE
getTypesDWColumns.put(20, SMALLINT); // SS_IS_COLUMN_SET
getTypesDWColumns.put(23, NVARCHAR); // SS_UDT_CATALOG_NAME
getTypesDWColumns.put(24, NVARCHAR); // SS_UDT_SCHEMA_NAME
getTypesDWColumns.put(25, NVARCHAR); // SS_UDT_ASSEMBLY_TYPE_NAME
getTypesDWColumns.put(26, NVARCHAR); // SS_XML_SCHEMACOLLECTION_CATALOG_NAME
getTypesDWColumns.put(27, NVARCHAR); // SS_XML_SCHEMACOLLECTION_SCHEMA_NAME
getTypesDWColumns.put(28, NVARCHAR); // SS_XML_SCHEMACOLLECTION_NAME
}

// Ensure there is a data type for every metadata column
if (getColumnsDWColumns.size() != getTypesDWColumns.size()) {
MessageFormat form = new MessageFormat(
SQLServerException.getErrString("R_colCountNotMatchColTypeCount"));
Object[] msgArgs = {getColumnsDWColumns.size(), getTypesDWColumns.size()};
throw new IllegalArgumentException(form.format(msgArgs));
}
} finally {
LOCK.unlock();
}
Expand All @@ -744,7 +800,7 @@ public java.sql.ResultSet getColumns(String catalog, String schema, String table
if (!isFirstRow) {
azureDwSelectBuilder.append(" UNION ALL ");
}
azureDwSelectBuilder.append(generateAzureDWSelect(rs, getColumnsDWColumns));
azureDwSelectBuilder.append(generateAzureDWSelect(rs, getColumnsDWColumns, getTypesDWColumns));
isFirstRow = false;
}

Expand Down Expand Up @@ -780,28 +836,41 @@ public java.sql.ResultSet getColumns(String catalog, String schema, String table
}
}

private String generateAzureDWSelect(ResultSet rs, Map<Integer, String> columns) throws SQLException {
private String generateAzureDWSelect(ResultSet rs, Map<Integer, String> columns,
Map<Integer, String> types) throws SQLException {
StringBuilder sb = new StringBuilder("SELECT ");

for (Entry<Integer, String> p : columns.entrySet()) {
String dataType = types.get(p.getKey());

// Verify there is a valid column entry in the Data Type lookup map
if (dataType == null) {
MessageFormat form = new MessageFormat(SQLServerException.getErrString("R_invalidArgument"));
Object[] msgArgs = {p.getKey()};
throw new SQLServerException(null, form.format(msgArgs), null, 0, true);
}

sb.append("CAST(");
if (p.getKey() < 0) {
sb.append("NULL");
sb.append("NULL AS " + dataType);
} else {
Object o = rs.getObject(p.getKey());
if (null == o) {
sb.append("NULL");
sb.append("NULL AS " + dataType);
} else if (o instanceof Number) {
if (IS_AUTOINCREMENT.equalsIgnoreCase(p.getValue())
|| IS_GENERATEDCOLUMN.equalsIgnoreCase(p.getValue())) {
sb.append("'").append(Util.escapeSingleQuotes(Util.zeroOneToYesNo(((Number) o).intValue())))
.append("'");
.append("' AS ").append(dataType);
} else {
sb.append(o.toString());
sb.append(o.toString()).append(" AS ").append(dataType);
}
} else {
sb.append("'").append(Util.escapeSingleQuotes(o.toString())).append("'");
sb.append("'").append(Util.escapeSingleQuotes(o.toString())).append("' AS ").append(dataType)
.append("(").append(Integer.toString(o.toString().length())).append(")");
}
}
sb.append(" AS ").append(p.getValue()).append(",");
sb.append(") AS ").append(p.getValue()).append(",");
}
sb.setLength(sb.length() - 1);
return sb.toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,8 @@ protected Object[][] getContents() {
{"R_ManagedIdentityTokenAcquisitionFail", "Failed to acquire managed identity token. Request for the token succeeded, but no token was returned. The token is null."},
{"R_AmbiguousRowUpdate", "Failed to execute updateRow(). The update is attempting an ambiguous update on tables \"{0}\" and \"{1}\". Ensure all columns being updated prior to the updateRow() call belong to the same table."},
{"R_InvalidSqlQuery", "Invalid SQL Query: {0}"},
{"R_InvalidScale", "Scale of input value is larger than the maximum allowed by SQL Server."}
{"R_InvalidScale", "Scale of input value is larger than the maximum allowed by SQL Server."},
{"R_colCountNotMatchColTypeCount", "Number of provided columns {0} does not match the column data types definition {1}."},
};
}
// @formatter:on
3 changes: 2 additions & 1 deletion src/test/java/com/microsoft/sqlserver/jdbc/TestResource.java
Original file line number Diff line number Diff line change
Expand Up @@ -212,5 +212,6 @@ protected Object[][] getContents() {
{"R_failedFedauth", "Failed to acquire fedauth token: "},
{"R_noLoginModulesConfiguredForJdbcDriver",
"javax.security.auth.login.LoginException (No LoginModules configured for SQLJDBCDriver)"},
{"R_unexpectedThreadCount", "Thread count is higher than expected."}};
{"R_unexpectedThreadCount", "Thread count is higher than expected."},
{"R_expectedClassDoesNotMatchActualClass", "Expected column class {0} does not match actual column class {1} for column {2}."}};
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,38 @@ public class DatabaseMetaDataTest extends AbstractTest {
private static final String functionName = RandomUtil.getIdentifier("DBMetadataFunction");
private static Map<Integer, String> getColumnsDWColumns = null;
private static Map<Integer, String> getImportedKeysDWColumns = null;
private static final String TABLE_CAT = "TABLE_CAT";
private static final String TABLE_SCHEM = "TABLE_SCHEM";
private static final String TABLE_NAME = "TABLE_NAME";
private static final String COLUMN_NAME = "COLUMN_NAME";
private static final String DATA_TYPE = "DATA_TYPE";
private static final String TYPE_NAME = "TYPE_NAME";
private static final String COLUMN_SIZE = "COLUMN_SIZE";
private static final String BUFFER_LENGTH = "BUFFER_LENGTH";
private static final String DECIMAL_DIGITS = "DECIMAL_DIGITS";
private static final String NUM_PREC_RADIX = "NUM_PREC_RADIX";
private static final String NULLABLE = "NULLABLE";
private static final String REMARKS = "REMARKS";
private static final String COLUMN_DEF = "COLUMN_DEF";
private static final String SQL_DATA_TYPE = "SQL_DATA_TYPE";
private static final String SQL_DATETIME_SUB = "SQL_DATETIME_SUB";
private static final String CHAR_OCTET_LENGTH = "CHAR_OCTET_LENGTH";
private static final String ORDINAL_POSITION = "ORDINAL_POSITION";
private static final String IS_NULLABLE = "IS_NULLABLE";
private static final String SCOPE_CATALOG = "SCOPE_CATALOG";
private static final String SCOPE_SCHEMA = "SCOPE_SCHEMA";
private static final String SCOPE_TABLE = "SCOPE_TABLE";
private static final String SOURCE_DATA_TYPE = "SOURCE_DATA_TYPE";
private static final String IS_AUTOINCREMENT = "IS_AUTOINCREMENT";
private static final String IS_GENERATEDCOLUMN = "IS_GENERATEDCOLUMN";
private static final String SS_IS_SPARSE = "SS_IS_SPARSE";
private static final String SS_IS_COLUMN_SET = "SS_IS_COLUMN_SET";
private static final String SS_UDT_CATALOG_NAME = "SS_UDT_CATALOG_NAME";
private static final String SS_UDT_SCHEMA_NAME = "SS_UDT_SCHEMA_NAME";
private static final String SS_UDT_ASSEMBLY_TYPE_NAME = "SS_UDT_ASSEMBLY_TYPE_NAME";
private static final String SS_XML_SCHEMACOLLECTION_CATALOG_NAME = "SS_XML_SCHEMACOLLECTION_CATALOG_NAME";
private static final String SS_XML_SCHEMACOLLECTION_SCHEMA_NAME = "SS_XML_SCHEMACOLLECTION_SCHEMA_NAME";
private static final String SS_XML_SCHEMACOLLECTION_NAME = "SS_XML_SCHEMACOLLECTION_NAME";

/**
* Verify DatabaseMetaData#isWrapperFor and DatabaseMetaData#unwrap.
Expand Down Expand Up @@ -887,6 +919,78 @@ public void testGetImportedKeysDW() throws SQLException {
}
}

/**
* Validates the metadata data types defined by JDBC spec.
* Refer to <a href="https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html#getColumns-java.lang.String-java.lang.String-java.lang.String-java.lang.String-">DatabaseMetadata getColumns() specs</a>
*
* @throws SQLException
*/
@Test
public void testValidateColumnMetadata() throws SQLException {
Map<String, Class<?>> getColumnMetaDataClass = new LinkedHashMap<>();

getColumnMetaDataClass.put(TABLE_CAT, String.class);
getColumnMetaDataClass.put(TABLE_SCHEM, String.class);
getColumnMetaDataClass.put(TABLE_NAME, String.class);
getColumnMetaDataClass.put(COLUMN_NAME, String.class);
getColumnMetaDataClass.put(DATA_TYPE, Integer.class);
getColumnMetaDataClass.put(TYPE_NAME, String.class);
getColumnMetaDataClass.put(COLUMN_SIZE, Integer.class);
getColumnMetaDataClass.put(BUFFER_LENGTH, Integer.class); // Not used
getColumnMetaDataClass.put(DECIMAL_DIGITS, Integer.class);
getColumnMetaDataClass.put(NUM_PREC_RADIX, Integer.class);
getColumnMetaDataClass.put(NULLABLE, Integer.class);
getColumnMetaDataClass.put(REMARKS, String.class);
getColumnMetaDataClass.put(COLUMN_DEF, String.class);
getColumnMetaDataClass.put(SQL_DATA_TYPE, Integer.class);
getColumnMetaDataClass.put(SQL_DATETIME_SUB, Integer.class);
getColumnMetaDataClass.put(CHAR_OCTET_LENGTH, Integer.class);
getColumnMetaDataClass.put(ORDINAL_POSITION, Integer.class);
getColumnMetaDataClass.put(IS_NULLABLE, String.class);
getColumnMetaDataClass.put(SCOPE_CATALOG, String.class);
getColumnMetaDataClass.put(SCOPE_SCHEMA, String.class);
getColumnMetaDataClass.put(SCOPE_TABLE, String.class);
getColumnMetaDataClass.put(SOURCE_DATA_TYPE, Short.class);
getColumnMetaDataClass.put(IS_AUTOINCREMENT, String.class);
getColumnMetaDataClass.put(IS_GENERATEDCOLUMN, String.class);
getColumnMetaDataClass.put(SS_IS_SPARSE, Short.class);
getColumnMetaDataClass.put(SS_IS_COLUMN_SET, Short.class);
getColumnMetaDataClass.put(SS_UDT_CATALOG_NAME, String.class);
getColumnMetaDataClass.put(SS_UDT_SCHEMA_NAME, String.class);
getColumnMetaDataClass.put(SS_UDT_ASSEMBLY_TYPE_NAME, String.class);
getColumnMetaDataClass.put(SS_XML_SCHEMACOLLECTION_CATALOG_NAME, String.class);
getColumnMetaDataClass.put(SS_XML_SCHEMACOLLECTION_SCHEMA_NAME, String.class);
getColumnMetaDataClass.put(SS_XML_SCHEMACOLLECTION_NAME, String.class);

try (Connection conn = getConnection()) {
ResultSetMetaData metadata = conn.getMetaData().getColumns(null, null, tableName, null).getMetaData();

// Ensure that there is an expected class for every column in the metadata result set
assertEquals(metadata.getColumnCount(), getColumnMetaDataClass.size());

for (int i = 1; i < metadata.getColumnCount(); i++) {
String columnLabel = metadata.getColumnLabel(i);
String columnClassName = metadata.getColumnClassName(i);
Class<?> expectedClass = getColumnMetaDataClass.get(columnLabel);

// Ensure the metadata column is in the metadata column class map
if (expectedClass == null) {
MessageFormat form1 = new MessageFormat(TestResource.getResource("R_objectNullOrEmpty"));
Object[] msgArgs1 = {"expected metadata column class for column " + columnLabel};
fail(form1.format(msgArgs1));
}

// Ensure the actual and expected column metadata types match
if (!columnClassName.equals(expectedClass.getName())) {
MessageFormat form1 = new MessageFormat(
TestResource.getResource("R_expectedClassDoesNotMatchActualClass"));
Object[] msgArgs1 = {expectedClass.getName(), columnClassName, columnLabel};
fail(form1.format(msgArgs1));
}
}
}
}

@BeforeAll
public static void setupTable() throws Exception {
setConnection();
Expand Down

0 comments on commit eae6d7b

Please sign in to comment.