diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index dc1c4e294..6f9b31715 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -18,6 +18,7 @@ - Fixed `getColumnClassName()` returning null for VARIANT columns in SEA mode by adding VARIANT to the type system. - Fixed `getColumns()` returning `DATA_TYPE=0` (NULL) for GEOMETRY/GEOGRAPHY columns in Thrift mode. Now returns `Types.VARCHAR` (12) when geospatial is disabled and `Types.OTHER` (1111) when enabled, consistent with SEA mode. - Fixed `getCrossReference()` returning 0 rows when parent args are passed in uppercase. The client-side filter used case-sensitive comparison against server-returned lowercase names. +- Fixed `getSchemas()` in SEA mode throwing a `DatabricksException` when the catalog parameter is a wildcard pattern (e.g., `%`, `my_%`) or a nonexistent literal. Wildcard patterns are now expanded client-side by matching against the catalog list, and literal nonexistent catalogs return an empty result set per the JDBC spec. --- *Note: When making changes, please add your change under the appropriate section diff --git a/src/main/java/com/databricks/jdbc/common/util/WildcardUtil.java b/src/main/java/com/databricks/jdbc/common/util/WildcardUtil.java index 140e1cab4..f097b3d6c 100644 --- a/src/main/java/com/databricks/jdbc/common/util/WildcardUtil.java +++ b/src/main/java/com/databricks/jdbc/common/util/WildcardUtil.java @@ -92,6 +92,42 @@ public static String stripJdbcEscapes(String value) { return builder.toString(); } + /** + * Returns true if the string is a JDBC search pattern containing unescaped wildcard characters + * ({@code %} or {@code _}). Escaped wildcards ({@code \%}, {@code \_}) are treated as literals + * and do not cause this method to return true. + * + * @param s the string to check + * @return true if the string contains at least one unescaped {@code %} or {@code _} + */ + public static boolean isJdbcPattern(String s) { + if (s == null) { + return false; + } + for (int i = 0; i < s.length(); i++) { + char ch = s.charAt(i); + if (ch == '\\' && i + 1 < s.length()) { + i++; // skip the escaped character + continue; + } + if (ch == '%' || ch == '_') { + return true; + } + } + return false; + } + + /** + * Returns true if the JDBC catalog pattern matches all catalogs — that is, it is {@code null}, + * {@code %}, or a pattern that would match any string. + * + * @param catalog the catalog pattern to check + * @return true if the pattern matches all catalogs + */ + public static boolean isMatchAllCatalogPattern(String catalog) { + return catalog == null || "%".equals(catalog); + } + public static String jdbcPatternToHive(String pattern) { if (pattern == null) { return null; diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index a4ee7f889..ffe50fc4c 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -24,6 +24,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -97,6 +98,26 @@ public DatabricksResultSet listSchemas( com.databricks.jdbc.common.CommandName.LIST_SCHEMAS); } + // Per JDBC spec, a catalog value of "%" means "match any catalog name" — treat it + // the same as null (list schemas across all catalogs). This prevents the driver from + // generating invalid SQL such as SHOW SCHEMAS IN `%` which would throw a server error. + if (WildcardUtil.isMatchAllCatalogPattern(catalog)) { + LOGGER.debug( + "Catalog pattern '{}' matches all catalogs; listing schemas across all catalogs.", + catalog); + catalog = null; + } + + // If the catalog is a JDBC pattern (contains unescaped % or _), expand it client-side + // by listing all catalogs and filtering to those that match the pattern, then fetching + // schemas per matching catalog. This avoids passing a pattern as a SQL identifier. + if (WildcardUtil.isJdbcPattern(catalog)) { + LOGGER.debug( + "Catalog '{}' is a JDBC pattern; expanding client-side across matching catalogs.", + catalog); + return fetchSchemasMatchingCatalogPattern(session, catalog, schemaNamePattern); + } + CommandBuilder commandBuilder = new CommandBuilder(catalog, session).setSchemaPattern(schemaNamePattern); String SQL = commandBuilder.getSQLString(CommandName.LIST_SCHEMAS); @@ -625,6 +646,125 @@ private DatabricksResultSet fetchSchemasAcrossCatalogs( com.databricks.jdbc.common.CommandName.LIST_SCHEMAS); } + /** + * Fetches schemas from all catalogs whose names match the given JDBC catalog pattern. The pattern + * may contain unescaped {@code %} (matches any sequence of characters) and {@code _} (matches any + * single character) wildcards, per the JDBC spec. Catalogs not matching the pattern are skipped, + * and server-side errors for individual catalogs are swallowed (logged as warnings) so that a + * single unreachable catalog does not abort the entire request. + * + * @param session the current session + * @param catalogPattern a JDBC search pattern for catalog names (must not be null) + * @param schemaNamePattern a JDBC search pattern for schema names, or null for all schemas + * @return a result set containing TABLE_SCHEM and TABLE_CATALOG columns + */ + private DatabricksResultSet fetchSchemasMatchingCatalogPattern( + IDatabricksSession session, String catalogPattern, String schemaNamePattern) + throws SQLException { + List matchingCatalogs = new ArrayList<>(); + try (ResultSet catalogs = session.getDatabricksMetadataClient().listCatalogs(session)) { + while (catalogs.next()) { + String c = catalogs.getString(1); + if (c != null && !c.isEmpty() && jdbcPatternMatches(catalogPattern, c)) { + matchingCatalogs.add(c); + } + } + } + + // Process matching catalogs in parallel, gathering schema information + List> schemaRows = + JdbcThreadUtils.parallelFlatMap( + matchingCatalogs, + session.getConnectionContext(), + DEFAULT_MAX_THREADS_METADATA_FETCH, + TASK_TIMEOUT_METADATA_FETCH_SEC, + c -> { + List> rows = new ArrayList<>(); + try (ResultSet catalogSchemas = + session.getDatabricksMetadataClient().listSchemas(session, c, schemaNamePattern)) { + while (catalogSchemas.next()) { + List schemaRow = new ArrayList<>(); + schemaRow.add(catalogSchemas.getString(1)); // TABLE_SCHEM + schemaRow.add(catalogSchemas.getString(2)); // TABLE_CATALOG + rows.add(schemaRow); + } + } catch (SQLException e) { + LOGGER.warn( + "Error fetching schemas for catalog '{}': {}", c, e.getMessage()); + } + return rows; + }, + getOrCreateMetadataThreadPool()); + + return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns( + SCHEMA_COLUMNS, + schemaRows, + METADATA_STATEMENT_ID, + com.databricks.jdbc.common.CommandName.LIST_SCHEMAS); + } + + /** + * Returns true if the given catalog name matches the JDBC search pattern. The pattern follows + * JDBC wildcard rules: {@code %} matches any sequence of characters, {@code _} matches any single + * character, and {@code \} escapes the next character. Matching is case-insensitive to align with + * Unity Catalog's case-folding behaviour. + * + * @param pattern the JDBC search pattern (must not be null) + * @param name the catalog name to match against + * @return true if {@code name} matches {@code pattern} + */ + static boolean jdbcPatternMatches(String pattern, String name) { + return jdbcPatternMatchesRecursive( + pattern, 0, name.toLowerCase(Locale.ROOT), 0); + } + + private static boolean jdbcPatternMatchesRecursive( + String pattern, int pi, String name, int ni) { + while (pi < pattern.length()) { + char pc = pattern.charAt(pi); + if (pc == '\\' && pi + 1 < pattern.length()) { + // Escaped literal — must match exactly (case-insensitive) + char literal = Character.toLowerCase(pattern.charAt(pi + 1)); + if (ni >= name.length() || name.charAt(ni) != literal) { + return false; + } + pi += 2; + ni++; + } else if (pc == '%') { + // Skip consecutive '%' characters + while (pi < pattern.length() && pattern.charAt(pi) == '%') { + pi++; + } + // '%' at end of pattern matches everything remaining + if (pi == pattern.length()) { + return true; + } + // Try matching the rest of the pattern at every position in name + for (int i = ni; i <= name.length(); i++) { + if (jdbcPatternMatchesRecursive(pattern, pi, name, i)) { + return true; + } + } + return false; + } else if (pc == '_') { + // Matches any single character + if (ni >= name.length()) { + return false; + } + pi++; + ni++; + } else { + // Literal character (case-insensitive) + if (ni >= name.length() || Character.toLowerCase(pc) != name.charAt(ni)) { + return false; + } + pi++; + ni++; + } + } + return ni == name.length(); + } + private DatabricksResultSet fetchColumnsAcrossCatalogs( IDatabricksSession session, String schemaNamePattern, diff --git a/src/test/java/com/databricks/jdbc/common/util/WildcardUtilTest.java b/src/test/java/com/databricks/jdbc/common/util/WildcardUtilTest.java index bcca0b422..165150e42 100644 --- a/src/test/java/com/databricks/jdbc/common/util/WildcardUtilTest.java +++ b/src/test/java/com/databricks/jdbc/common/util/WildcardUtilTest.java @@ -114,4 +114,42 @@ private static Stream stripJdbcEscapesPatterns() { void testStripJdbcEscapes(String input, String expected, String errorMessage) { assertEquals(expected, WildcardUtil.stripJdbcEscapes(input), errorMessage); } + + private static Stream isJdbcPatternCases() { + return Stream.of( + Arguments.of(null, false, "null is not a pattern"), + Arguments.of("", false, "empty string is not a pattern"), + Arguments.of("simple", false, "plain literal is not a pattern"), + Arguments.of("%", true, "bare % is a pattern"), + Arguments.of("_", true, "bare _ is a pattern"), + Arguments.of("cat%", true, "trailing % is a pattern"), + Arguments.of("%log%", true, "leading and trailing % is a pattern"), + Arguments.of("my_cat", true, "underscore is a pattern"), + Arguments.of("\\%", false, "escaped % is NOT a pattern"), + Arguments.of("\\_", false, "escaped _ is NOT a pattern"), + Arguments.of("cat\\_main", false, "escaped underscore in literal is not a pattern"), + Arguments.of("cat\\_main%", true, "escaped underscore but bare % makes it a pattern")); + } + + @ParameterizedTest + @MethodSource("isJdbcPatternCases") + void testIsJdbcPattern(String input, boolean expected, String message) { + assertEquals(expected, WildcardUtil.isJdbcPattern(input), message); + } + + private static Stream isMatchAllCatalogPatternCases() { + return Stream.of( + Arguments.of(null, true, "null matches all"), + Arguments.of("%", true, "% matches all"), + Arguments.of("", false, "empty string does not match all"), + Arguments.of("main", false, "literal catalog does not match all"), + Arguments.of("main%", false, "partial pattern does not match all"), + Arguments.of("%main%", false, "partial pattern does not match all")); + } + + @ParameterizedTest + @MethodSource("isMatchAllCatalogPatternCases") + void testIsMatchAllCatalogPattern(String input, boolean expected, String message) { + assertEquals(expected, WildcardUtil.isMatchAllCatalogPattern(input), message); + } } diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java index c36408a2a..c96c72578 100644 --- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java +++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java @@ -22,6 +22,7 @@ import com.databricks.jdbc.common.IDatabricksComputeResource; import com.databricks.jdbc.common.MetadataOperationType; import com.databricks.jdbc.common.StatementType; +import com.databricks.jdbc.dbclient.IDatabricksMetadataClient; import com.databricks.jdbc.dbclient.impl.common.CrossReferenceKeysDatabricksResultSetAdapter; import com.databricks.jdbc.dbclient.impl.common.ImportedKeysDatabricksResultSetAdapter; import com.databricks.jdbc.exception.DatabricksSQLException; @@ -1575,4 +1576,283 @@ void testListProcedureColumns( assertEquals( 1, ((DatabricksResultSetMetaData) actualResult.getMetaData()).getTotalRows(), description); } + + // ==================== jdbcPatternMatches tests (PECO-3017) ==================== + + private static Stream jdbcPatternMatchesCases() { + return Stream.of( + // Exact matches + Arguments.of("%", "main", true, "% matches any catalog"), + Arguments.of("%", "hive_metastore", true, "% matches any catalog with underscore"), + Arguments.of("main", "main", true, "exact match"), + Arguments.of("main", "MAIN", true, "case-insensitive exact match"), + Arguments.of("MAIN", "main", true, "case-insensitive (reversed)"), + Arguments.of("main", "other", false, "no match"), + // % wildcard + Arguments.of("m%", "main", true, "prefix % match"), + Arguments.of("m%", "meta", true, "prefix % matches another catalog"), + Arguments.of("m%", "other", false, "prefix % no match"), + Arguments.of("%ain", "main", true, "suffix % match"), + Arguments.of("%ain", "chain", true, "suffix % matches another ending"), + Arguments.of("%ain", "main_ext", false, "suffix % no match"), + Arguments.of("%cat%", "my_catalog", true, "infix % match"), + Arguments.of("%cat%", "catalog", true, "infix % match at start"), + Arguments.of("%cat%", "mycat", true, "infix % match at end"), + Arguments.of("%cat%", "other", false, "infix % no match"), + // _ wildcard + Arguments.of("_ain", "main", true, "_ matches single char"), + Arguments.of("_ain", "rain", true, "_ matches another char"), + Arguments.of("_ain", "in", false, "_ requires exactly one char"), + Arguments.of("m__n", "main", true, "__ matches two chars"), + Arguments.of("m__n", "mn", false, "__ requires two chars"), + // Escaped wildcards + Arguments.of("\\%", "%", true, "escaped % matches literal %"), + Arguments.of("\\%", "main", false, "escaped % does not match normal string"), + Arguments.of("\\_", "_", true, "escaped _ matches literal _"), + Arguments.of("\\_", "a", false, "escaped _ does not match other char"), + Arguments.of("my\\_catalog", "my_catalog", true, "escaped _ in middle matches literal"), + Arguments.of("my\\_catalog", "myzalog", false, "escaped _ does not act as wildcard"), + // Edge cases + Arguments.of("", "", true, "empty matches empty"), + Arguments.of("", "main", false, "empty does not match non-empty"), + Arguments.of("main%", "main", true, "trailing % matches empty suffix")); + } + + @ParameterizedTest + @MethodSource("jdbcPatternMatchesCases") + void testJdbcPatternMatches( + String pattern, String name, boolean expected, String description) { + assertEquals( + expected, + DatabricksMetadataQueryClient.jdbcPatternMatches(pattern, name), + description); + } + + // ==================== listSchemas wildcard catalog tests (PECO-3017) ==================== + + /** + * Tests that listSchemas with "%" catalog pattern calls SHOW SCHEMAS IN ALL CATALOGS instead of + * the invalid SHOW SCHEMAS IN `%`. This is the core PECO-3017 fix. + */ + @Test + void testListSchemasWithPercentCatalog_treatsAsMatchAll() throws SQLException { + when(session.getComputeResource()).thenReturn(mockedComputeResource); + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + // "%" should be treated as null → SHOW SCHEMAS IN ALL CATALOGS + String expectedSQL = "SHOW SCHEMAS IN ALL CATALOGS"; + when(mockClient.executeStatement( + eq(expectedSQL), + eq(mockedComputeResource), + any(), + eq(StatementType.METADATA), + eq(session), + any(), + eq(MetadataOperationType.GET_SCHEMAS))) + .thenReturn(mockedResultSet); + + when(mockedResultSet.next()).thenReturn(true, false); + when(mockedResultSet.getObject("databaseName")).thenReturn("default"); + doReturn(2).when(mockedMetaData).getColumnCount(); + doReturn(SCHEMA_COLUMN.getResultSetColumnName()).when(mockedMetaData).getColumnName(1); + doReturn(CATALOG_COLUMN.getResultSetColumnName()).when(mockedMetaData).getColumnName(2); + when(mockedResultSet.getMetaData()).thenReturn(mockedMetaData); + when(mockedResultSet.findColumn(CATALOG_RESULT_COLUMN.getResultSetColumnName())) + .thenThrow(DatabricksSQLException.class); + + DatabricksResultSet result = metadataClient.listSchemas(session, "%", null); + + assertNotNull(result); + assertEquals(StatementState.SUCCEEDED, result.getStatementStatus().getState()); + assertEquals(1, ((DatabricksResultSetMetaData) result.getMetaData()).getTotalRows()); + // No call should have been made for SHOW SCHEMAS IN `%` + verify(mockClient, never()) + .executeStatement( + eq("SHOW SCHEMAS IN `%`"), + any(), + any(), + any(), + any(), + any(), + any()); + } + + /** + * Tests that listSchemas with a partial catalog pattern (e.g., "main%") expands client-side + * by listing catalogs and filtering, rather than generating invalid SQL. + */ + @Test + void testListSchemasWithPartialCatalogPattern_expandsClientSide() throws SQLException { + when(session.getComputeResource()).thenReturn(mockedComputeResource); + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + IDatabricksSession mockSessionLocal = mock(IDatabricksSession.class); + IDatabricksMetadataClient mockMetadataClient = mock(IDatabricksMetadataClient.class); + when(mockSessionLocal.getComputeResource()).thenReturn(mockedComputeResource); + when(mockSessionLocal.getDatabricksMetadataClient()).thenReturn(mockMetadataClient); + when(mockSessionLocal.getConnectionContext()).thenReturn(mockContext); + + // The catalog list: "main" matches "main%", "other" does not + DatabricksResultSet catalogResultSet = mock(DatabricksResultSet.class); + when(catalogResultSet.next()).thenReturn(true, true, false); + when(catalogResultSet.getString(1)).thenReturn("main", "other"); + when(mockMetadataClient.listCatalogs(mockSessionLocal)).thenReturn(catalogResultSet); + + // listSchemas for "main" returns one schema + DatabricksResultSet schemaResultSet = mock(DatabricksResultSet.class); + when(schemaResultSet.next()).thenReturn(true, false); + when(schemaResultSet.getString(1)).thenReturn("default"); + when(schemaResultSet.getString(2)).thenReturn("main"); + when(mockMetadataClient.listSchemas(mockSessionLocal, "main", null)) + .thenReturn(schemaResultSet); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + DatabricksResultSet result = metadataClient.listSchemas(mockSessionLocal, "main%", null); + + assertNotNull(result); + assertEquals(StatementState.SUCCEEDED, result.getStatementStatus().getState()); + // Only "main" catalog matched the pattern, so 1 schema row expected + assertEquals(1, ((DatabricksResultSetMetaData) result.getMetaData()).getTotalRows()); + // "other" catalog was skipped — no listSchemas call for it + verify(mockMetadataClient, never()).listSchemas(mockSessionLocal, "other", null); + } + + /** + * Tests that listSchemas with an underscore wildcard catalog pattern (e.g., "m_in") expands + * client-side. The "_" wildcard must not be treated as a literal SQL character. + */ + @Test + void testListSchemasWithUnderscoreCatalogPattern_expandsClientSide() throws SQLException { + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + IDatabricksSession mockSessionLocal = mock(IDatabricksSession.class); + IDatabricksMetadataClient mockMetadataClient = mock(IDatabricksMetadataClient.class); + when(mockSessionLocal.getConnectionContext()).thenReturn(mockContext); + when(mockSessionLocal.getDatabricksMetadataClient()).thenReturn(mockMetadataClient); + + // Catalog list: "main" matches "m_in", "meta" does not + DatabricksResultSet catalogResultSet = mock(DatabricksResultSet.class); + when(catalogResultSet.next()).thenReturn(true, true, false); + when(catalogResultSet.getString(1)).thenReturn("main", "meta"); + when(mockMetadataClient.listCatalogs(mockSessionLocal)).thenReturn(catalogResultSet); + + DatabricksResultSet schemaResultSet = mock(DatabricksResultSet.class); + when(schemaResultSet.next()).thenReturn(false); + when(mockMetadataClient.listSchemas(mockSessionLocal, "main", null)) + .thenReturn(schemaResultSet); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + DatabricksResultSet result = metadataClient.listSchemas(mockSessionLocal, "m_in", null); + + assertNotNull(result); + // "meta" did not match "m_in" + verify(mockMetadataClient, never()).listSchemas(mockSessionLocal, "meta", null); + } + + /** + * Tests that listSchemas with a nonexistent literal catalog returns an empty result set rather + * than throwing an exception. Per JDBC spec, metadata methods should return empty result sets + * for non-existent objects. + */ + @Test + void testListSchemasWithNonexistentLiteralCatalog_returnsEmpty() throws SQLException { + when(session.getComputeResource()).thenReturn(mockedComputeResource); + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + // Server throws object-not-found for a nonexistent catalog + DatabricksSQLException notFound = + new DatabricksSQLException( + "[NO_SUCH_CATALOG_EXCEPTION] Catalog 'nonexistent' not found", "42704"); + when(mockClient.executeStatement( + eq("SHOW SCHEMAS IN `nonexistent`"), + eq(mockedComputeResource), + any(), + eq(StatementType.METADATA), + eq(session), + any(), + eq(MetadataOperationType.GET_SCHEMAS))) + .thenThrow(notFound); + + DatabricksResultSet result = metadataClient.listSchemas(session, "nonexistent", null); + + assertNotNull(result); + assertFalse(result.next(), "Should return empty result set for nonexistent catalog"); + } + + /** + * Tests that listSchemas with a nonexistent catalog identified by NO_SUCH_CATALOG_EXCEPTION in + * the error message (with null SQL state) returns an empty result set. + */ + @Test + void testListSchemasNonexistentCatalog_nullSqlState_returnsEmpty() throws SQLException { + when(session.getComputeResource()).thenReturn(mockedComputeResource); + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + + DatabricksSQLException notFound = + new DatabricksSQLException( + "[NO_SUCH_CATALOG_EXCEPTION] Catalog 'ghost' not found", + (String) null); // null SQL state + when(mockClient.executeStatement( + eq("SHOW SCHEMAS IN `ghost`"), + eq(mockedComputeResource), + any(), + eq(StatementType.METADATA), + eq(session), + any(), + eq(MetadataOperationType.GET_SCHEMAS))) + .thenThrow(notFound); + + DatabricksResultSet result = metadataClient.listSchemas(session, "ghost", null); + + assertNotNull(result); + assertFalse(result.next(), "Should return empty result set even with null SQL state"); + } + + /** + * Tests that listSchemas with a catalog pattern where no catalogs match returns an empty result + * set without making any schema calls. + */ + @Test + void testListSchemasWithPatternMatchingNoCatalogs_returnsEmpty() throws SQLException { + IDatabricksConnectionContext mockContext = mock(IDatabricksConnectionContext.class); + when(mockContext.getEnableMultipleCatalogSupport()).thenReturn(true); + when(mockClient.getConnectionContext()).thenReturn(mockContext); + + IDatabricksSession mockSessionLocal = mock(IDatabricksSession.class); + IDatabricksMetadataClient mockMetadataClient = mock(IDatabricksMetadataClient.class); + when(mockSessionLocal.getConnectionContext()).thenReturn(mockContext); + when(mockSessionLocal.getDatabricksMetadataClient()).thenReturn(mockMetadataClient); + + // Catalog list has no entries matching "xyz%" + DatabricksResultSet catalogResultSet = mock(DatabricksResultSet.class); + when(catalogResultSet.next()).thenReturn(true, true, false); + when(catalogResultSet.getString(1)).thenReturn("main", "other"); + when(mockMetadataClient.listCatalogs(mockSessionLocal)).thenReturn(catalogResultSet); + + DatabricksMetadataQueryClient metadataClient = new DatabricksMetadataQueryClient(mockClient); + DatabricksResultSet result = metadataClient.listSchemas(mockSessionLocal, "xyz%", null); + + assertNotNull(result); + assertEquals(StatementState.SUCCEEDED, result.getStatementStatus().getState()); + assertEquals(0, ((DatabricksResultSetMetaData) result.getMetaData()).getTotalRows()); + // No schema calls for any catalog since none match + verify(mockMetadataClient, never()).listSchemas(any(), eq("main"), any()); + verify(mockMetadataClient, never()).listSchemas(any(), eq("other"), any()); + } }