Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
### Updated

### Fixed
- Fix `PreparedStatement.getMetaData()` crash (`IllegalArgumentException`) for SQL type aliases (VARCHAR, INTEGER, NUMERIC, DEC, REAL, NVARCHAR, NCHAR) returned by DESCRIBE QUERY

---
*Note: When making changes, please add your change under the appropriate section
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,18 +449,12 @@ public DatabricksResultSetMetaData(
String columnName = columnNames.get(i);
String columnTypeText = columnDataTypes.get(i);

ColumnInfoTypeName columnTypeName;
if (columnTypeText.equalsIgnoreCase(TIMESTAMP_NTZ)) {
columnTypeName = ColumnInfoTypeName.TIMESTAMP;
String baseTypeName = metadataResultSetBuilder.stripBaseTypeName(columnTypeText);
ColumnInfoTypeName columnTypeName = DatabricksTypeUtil.getColumnInfoType(baseTypeName);

// Normalize columnTypeText for types that have a canonical display name
if (baseTypeName.equals(TIMESTAMP_NTZ)) {
columnTypeText = TIMESTAMP;
} else if (columnTypeText.equalsIgnoreCase(VARIANT)) {
columnTypeName = ColumnInfoTypeName.STRING;
columnTypeText = VARIANT;
} else if (columnTypeText.toUpperCase().startsWith(INTERVAL)) {
columnTypeName = ColumnInfoTypeName.INTERVAL;
} else {
columnTypeName =
ColumnInfoTypeName.valueOf(metadataResultSetBuilder.stripBaseTypeName(columnTypeText));
}

int columnType = DatabricksTypeUtil.getColumnType(columnTypeName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,13 @@ public class DatabricksTypeUtil {
public static final String GEOMETRY = "GEOMETRY";
public static final String GEOGRAPHY = "GEOGRAPHY";
public static final String INTERVAL = "INTERVAL";
public static final String VARCHAR = "VARCHAR";
public static final String NVARCHAR = "NVARCHAR";
public static final String NCHAR = "NCHAR";
public static final String INTEGER = "INTEGER";
public static final String NUMERIC = "NUMERIC";
public static final String DEC = "DEC";
public static final String REAL = "REAL";
public static final String GEOMETRY_CLASS_NAME = "com.databricks.jdbc.api.IGeometry";
public static final String GEOGRAPHY_CLASS_NAME = "com.databricks.jdbc.api.IGeography";
public static final String MEASURE = "measure";
Expand All @@ -73,13 +80,22 @@ public class DatabricksTypeUtil {
ColumnInfoTypeName.BYTE,
ColumnInfoTypeName.BIGINT));

// only used for PreparedStatement
/**
* Maps a SQL type name (as returned by DESCRIBE QUERY or schema metadata) to the corresponding
* {@link ColumnInfoTypeName}. Handles canonical names, Databricks aliases, and standard SQL
* aliases (VARCHAR, INTEGER, NUMERIC, DEC, REAL, NVARCHAR, NCHAR). Returns {@link
* ColumnInfoTypeName#USER_DEFINED_TYPE} for unrecognized types.
*/
public static ColumnInfoTypeName getColumnInfoType(String typeName) {
switch (typeName) {
case DatabricksTypeUtil.CHAR:
case DatabricksTypeUtil.NCHAR:
case DatabricksTypeUtil.STRING:
return ColumnInfoTypeName.STRING; // both char, string passed as STRING param
case DatabricksTypeUtil.VARCHAR:
case DatabricksTypeUtil.NVARCHAR:
return ColumnInfoTypeName.STRING;
case DatabricksTypeUtil.DATE:
return ColumnInfoTypeName.DATE;
case DatabricksTypeUtil.TIMESTAMP:
case DatabricksTypeUtil.TIMESTAMP_NTZ:
return ColumnInfoTypeName.TIMESTAMP;
Expand All @@ -91,11 +107,13 @@ public static ColumnInfoTypeName getColumnInfoType(String typeName) {
case DatabricksTypeUtil.BYTE:
return ColumnInfoTypeName.BYTE;
case DatabricksTypeUtil.INT:
case DatabricksTypeUtil.INTEGER:
return ColumnInfoTypeName.INT;
case DatabricksTypeUtil.BIGINT:
case DatabricksTypeUtil.LONG:
return ColumnInfoTypeName.LONG;
case DatabricksTypeUtil.FLOAT:
case DatabricksTypeUtil.REAL:
return ColumnInfoTypeName.FLOAT;
case DatabricksTypeUtil.DOUBLE:
return ColumnInfoTypeName.DOUBLE;
Expand All @@ -104,6 +122,8 @@ public static ColumnInfoTypeName getColumnInfoType(String typeName) {
case DatabricksTypeUtil.BOOLEAN:
return ColumnInfoTypeName.BOOLEAN;
case DatabricksTypeUtil.DECIMAL:
case DatabricksTypeUtil.NUMERIC:
case DatabricksTypeUtil.DEC:
return ColumnInfoTypeName.DECIMAL;
case DatabricksTypeUtil.STRUCT:
return ColumnInfoTypeName.STRUCT;
Expand All @@ -116,6 +136,16 @@ public static ColumnInfoTypeName getColumnInfoType(String typeName) {
return ColumnInfoTypeName.MAP;
case DatabricksTypeUtil.INTERVAL:
return ColumnInfoTypeName.INTERVAL;
case DatabricksTypeUtil.VARIANT:
return ColumnInfoTypeName.STRING;
case DatabricksTypeUtil.GEOMETRY:
return ColumnInfoTypeName.GEOMETRY;
case DatabricksTypeUtil.GEOGRAPHY:
return ColumnInfoTypeName.GEOGRAPHY;
}
// Handle INTERVAL sub-types like "INTERVAL DAY TO SECOND"
if (typeName.startsWith(DatabricksTypeUtil.INTERVAL)) {
return ColumnInfoTypeName.INTERVAL;
}
return ColumnInfoTypeName.USER_DEFINED_TYPE;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,13 @@ public void testDatabricksResultSetMetaDataInitialization_DescribeQuery() throws
{"col_array", "array<int>", "ARRAY", Types.ARRAY, 255, 0},
{"col_map", "map<string,string>", "MAP", Types.VARCHAR, 255, 0},
{"col_variant", "variant", "VARIANT", Types.VARCHAR, 255, 0},
{"col_geography", "geography", "GEOGRAPHY", Types.OTHER, 255, 0},
{"col_geometry", "geometry", "GEOMETRY", Types.OTHER, 255, 0},
{"col_bigint", "bigint", "BIGINT", Types.BIGINT, 19, 0},
{"col_smallint", "smallint", "SMALLINT", Types.SMALLINT, 5, 0},
{"col_tinyint", "tinyint", "TINYINT", Types.TINYINT, 3, 0},
{"col_varchar", "varchar", "VARCHAR", Types.VARCHAR, 255, 0},
{"col_integer", "integer", "INTEGER", Types.INTEGER, 10, 0},
{"col_interval", "interval", "INTERVAL", Types.VARCHAR, 255, 0},
{"col_interval_second", "interval second", "INTERVAL SECOND", Types.VARCHAR, 255, 0},
{"col_interval_minute", "interval minute", "INTERVAL MINUTE", Types.VARCHAR, 255, 0},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,29 +263,43 @@ void testInferDatabricksType() {
@ParameterizedTest
@CsvSource({
"STRING, STRING",
"DATE, TIMESTAMP",
"DATE, DATE",
"TIMESTAMP, TIMESTAMP",
"TIMESTAMP_NTZ, TIMESTAMP",
"SHORT, SHORT",
"SMALLINT, SHORT",
"TINYINT, TINYINT",
"BYTE, BYTE",
"INT, INT",
"INTEGER, INT",
"BIGINT, LONG",
"LONG, LONG",
"FLOAT, FLOAT",
"REAL, FLOAT",
"DOUBLE, DOUBLE",
"BINARY, BINARY",
"BOOLEAN, BOOLEAN",
"DECIMAL, DECIMAL",
"NUMERIC, DECIMAL",
"DEC, DECIMAL",
"STRUCT, STRUCT",
"ARRAY, ARRAY",
"VOID, NULL",
"NULL, NULL",
"MAP, MAP",
"CHAR, STRING",
"VARCHAR, STRING",
"NVARCHAR, STRING",
"NCHAR, STRING",
"INTERVAL, INTERVAL",
"UNKNOWN, USER_DEFINED_TYPE"
"VARIANT, STRING",
"GEOMETRY, GEOMETRY",
"GEOGRAPHY, GEOGRAPHY",
"UNKNOWN, USER_DEFINED_TYPE",
// Lowercase inputs fall through to USER_DEFINED_TYPE (getColumnInfoType expects uppercase)
"string, USER_DEFINED_TYPE",
"int, USER_DEFINED_TYPE",
"varchar, USER_DEFINED_TYPE"
})
public void testGetColumnInfoType(String inputTypeName, String expectedTypeName) {
assertEquals(
Expand All @@ -296,6 +310,46 @@ public void testGetColumnInfoType(String inputTypeName, String expectedTypeName)
inputTypeName, expectedTypeName, DatabricksTypeUtil.getColumnInfoType(inputTypeName)));
}

@ParameterizedTest
@CsvSource({
"INTERVAL DAY TO SECOND, INTERVAL",
"INTERVAL YEAR TO MONTH, INTERVAL",
"INTERVAL DAY TO HOUR, INTERVAL",
"INTERVAL DAY TO MINUTE, INTERVAL",
"INTERVAL HOUR TO MINUTE, INTERVAL",
"INTERVAL HOUR TO SECOND, INTERVAL",
"INTERVAL MINUTE TO SECOND, INTERVAL"
})
public void testGetColumnInfoTypeIntervalSubTypes(String inputTypeName, String expectedTypeName) {
assertEquals(
ColumnInfoTypeName.valueOf(expectedTypeName),
DatabricksTypeUtil.getColumnInfoType(inputTypeName),
String.format(
"inputType : %s, output should have been %s. But was %s",
inputTypeName, expectedTypeName, DatabricksTypeUtil.getColumnInfoType(inputTypeName)));
}

@ParameterizedTest
@CsvSource({
"VARIANT, STRING, " + Types.VARCHAR,
"TIMESTAMP, TIMESTAMP, " + Types.TIMESTAMP,
"TIMESTAMP_NTZ, TIMESTAMP, " + Types.TIMESTAMP,
"GEOGRAPHY, GEOGRAPHY, " + Types.OTHER,
"GEOMETRY, GEOMETRY, " + Types.OTHER,
})
public void testGetColumnInfoTypeToJdbcType(
String inputTypeName, String expectedEnumName, int expectedJdbcType) {
ColumnInfoTypeName typeName = DatabricksTypeUtil.getColumnInfoType(inputTypeName);
assertEquals(
ColumnInfoTypeName.valueOf(expectedEnumName),
typeName,
"Enum mapping mismatch for " + inputTypeName);
assertEquals(
expectedJdbcType,
DatabricksTypeUtil.getColumnType(typeName),
"JDBC type code mismatch for " + inputTypeName);
}

@Test
void testGetScale() {
assertEquals(0, DatabricksTypeUtil.getScale(Types.DOUBLE));
Expand Down