diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index edb74d15fa..4dd2985e62 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -17,6 +17,7 @@ ### Updated - `EnableGeoSpatialSupport` no longer requires `EnableComplexDatatypeSupport=1`. Geospatial types (GEOMETRY, GEOGRAPHY) can now be enabled independently of complex type support (ARRAY, MAP, STRUCT). - Arrow schema deserialization failures (Thrift metadata path) now surface a dedicated driver error code `ARROW_SCHEMA_PARSING_ERROR` (vendor code `22000`) and a proper SQLSTATE `22000` (Data Exception) on the thrown `SQLException`, instead of the generic `RESULT_SET_ERROR` (1004) and the enum name as SQLSTATE. The exception message is unchanged. +- Replaced the generic `INVALID_STATE` driver error code at ~30 call sites with more specific codes so telemetry buckets are actionable. New codes: `CURSOR_INVALID_POSITION` (1045), `COLUMN_INDEX_OUT_OF_BOUNDS` (1046), `ROW_INDEX_OUT_OF_BOUNDS` (1047), `THRIFT_RPC_ERROR` (1048), `THRIFT_RESPONSE_MISMATCH` (1049), `INVALID_RESPONSE_FORMAT` (1050), `THREAD_POOL_EXECUTION_ERROR` (1051), `STREAM_READ_ERROR` (1052). Null/argument validation sites now reuse `INPUT_VALIDATION_ERROR` (1015); volume-operation state errors now reuse `VOLUME_OPERATION_INVALID_STATE` (1028). Applications that key on `getSQLState() == "INVALID_STATE"` for result-set navigation, Thrift RPC, or stream-read failures will now see the new state names. ### Fixed - Fixed `?` characters inside SQL comments, string literals, and quoted identifiers being incorrectly counted as parameter placeholders when `supportManyParameters=1`. `SQLInterpolator` now uses `SqlCommentParser` to locate only real placeholders. Fixes #1331. diff --git a/src/main/java/com/databricks/jdbc/api/impl/ColumnarRowView.java b/src/main/java/com/databricks/jdbc/api/impl/ColumnarRowView.java index 97580cbb3e..bad42708c2 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/ColumnarRowView.java +++ b/src/main/java/com/databricks/jdbc/api/impl/ColumnarRowView.java @@ -46,11 +46,13 @@ public int getColumnCount() { public Object getValue(int rowIndex, int columnIndex) throws DatabricksSQLException { if (rowIndex < 0 || rowIndex >= rowCount) { throw new DatabricksSQLException( - "Row index out of bounds: " + rowIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Row index out of bounds: " + rowIndex, + DatabricksDriverErrorCode.ROW_INDEX_OUT_OF_BOUNDS); } if (columnIndex < 0 || columnIndex >= columnAccessors.length) { throw new DatabricksSQLException( - "Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds: " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } return columnAccessors[columnIndex].getValue(rowIndex); @@ -63,7 +65,8 @@ public Object getValue(int rowIndex, int columnIndex) throws DatabricksSQLExcept public Object[] materializeRow(int rowIndex) throws DatabricksSQLException { if (rowIndex < 0 || rowIndex >= rowCount) { throw new DatabricksSQLException( - "Row index out of bounds: " + rowIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Row index out of bounds: " + rowIndex, + DatabricksDriverErrorCode.ROW_INDEX_OUT_OF_BOUNDS); } Object[] row = new Object[columnAccessors.length]; diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java index a53b228c78..04a2664423 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java @@ -1137,14 +1137,14 @@ public ResultSet getCrossReference( LOGGER.debug("getCrossReference: parentTable is null or empty, throwing"); throw new DatabricksSQLException( "Invalid argument: parentTable may not be null or empty", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } // Empty foreign table is also invalid — Thrift server rejects it if (foreignTable != null && foreignTable.isEmpty()) { LOGGER.debug("getCrossReference: foreignTable is empty string, throwing"); throw new DatabricksSQLException( "Invalid argument: foreignTable may not be empty", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } return session @@ -1578,7 +1578,7 @@ public T unwrap(Class iface) throws SQLException { } throw new DatabricksSQLException( - "Cannot unwrap to " + iface.getName(), DatabricksDriverErrorCode.INVALID_STATE); + "Cannot unwrap to " + iface.getName(), DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } /** {@inheritDoc} */ diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksPreparedStatement.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksPreparedStatement.java index fcdbab3b92..0ed7dbb34f 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksPreparedStatement.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksPreparedStatement.java @@ -594,7 +594,8 @@ public void setObject(int parameterIndex, Object x, int targetSqlType, int scale bd = BigDecimal.valueOf(((Number) x).doubleValue()); } else { throw new DatabricksSQLException( - "Invalid object type for DECIMAL/NUMERIC", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid object type for DECIMAL/NUMERIC", + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } bd = bd.setScale(scaleOrLength, RoundingMode.HALF_UP); // Round up to nearest value. setObject(parameterIndex, bd, databricksType); @@ -750,7 +751,7 @@ private void checkLength(long targetLength, long sourceLength) throws SQLExcepti "Unexpected number of bytes read from the stream. Expected: %d, got: %d", targetLength, sourceLength); LOGGER.error(errorMessage); - throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.STREAM_READ_ERROR); } } @@ -767,7 +768,8 @@ private byte[] readBytesFromInputStream(InputStream x, int length) throws SQLExc if (x == null) { String errorMessage = "InputStream cannot be null"; LOGGER.error(errorMessage); - throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException( + errorMessage, DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } byte[] bytes = new byte[length]; try { @@ -776,7 +778,8 @@ private byte[] readBytesFromInputStream(InputStream x, int length) throws SQLExc } catch (IOException e) { String errorMessage = "Error reading from the InputStream"; LOGGER.error(errorMessage); - throw new DatabricksSQLException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException( + errorMessage, e, DatabricksDriverErrorCode.STREAM_READ_ERROR); } return bytes; } @@ -815,7 +818,7 @@ private String readStringFromInputStream(InputStream inputStream, long length, C } catch (IOException e) { String message = "Error reading from the InputStream"; LOGGER.error(message); - throw new DatabricksSQLException(message, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException(message, e, DatabricksDriverErrorCode.STREAM_READ_ERROR); } } diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksResultSet.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksResultSet.java index a83956528e..a3b9294335 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksResultSet.java +++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksResultSet.java @@ -1997,7 +1997,7 @@ private Object getObjectInternal(int columnIndex) throws SQLException { if (columnIndex <= 0) { throw new DatabricksSQLException( "Invalid column index", - DatabricksDriverErrorCode.INVALID_STATE, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS, silenceNonTerminalExceptions); } Object object = executionResult.getObject(columnIndex - 1); diff --git a/src/main/java/com/databricks/jdbc/api/impl/ExecutionResultFactory.java b/src/main/java/com/databricks/jdbc/api/impl/ExecutionResultFactory.java index b9aec3fc1b..d7dd90a8ad 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/ExecutionResultFactory.java +++ b/src/main/java/com/databricks/jdbc/api/impl/ExecutionResultFactory.java @@ -51,7 +51,7 @@ private static IExecutionResult getResultHandler( throws DatabricksSQLException { if (manifest.getFormat() == null) { throw new DatabricksParsingException( - "Empty response format", DatabricksDriverErrorCode.INVALID_STATE); + "Empty response format", DatabricksDriverErrorCode.INVALID_RESPONSE_FORMAT); } TelemetryHelper.setResultFormat( session.getConnectionContext(), statementId, manifest.getFormat()); @@ -66,7 +66,8 @@ private static IExecutionResult getResultHandler( default: String errorMessage = String.format("Invalid response format %s", manifest.getFormat()); LOGGER.error(errorMessage); - throw new DatabricksParsingException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksParsingException( + errorMessage, DatabricksDriverErrorCode.INVALID_RESPONSE_FORMAT); } } diff --git a/src/main/java/com/databricks/jdbc/api/impl/InlineJsonResult.java b/src/main/java/com/databricks/jdbc/api/impl/InlineJsonResult.java index e3541510ed..f08369c74c 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/InlineJsonResult.java +++ b/src/main/java/com/databricks/jdbc/api/impl/InlineJsonResult.java @@ -52,13 +52,14 @@ public Object getObject(int columnIndex) throws DatabricksSQLException { } if (currentRow == -1) { throw new DatabricksSQLException( - "Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE); + "Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (columnIndex < data.get((int) currentRow).size()) { return data.get((int) currentRow).get(columnIndex); } throw new DatabricksSQLException( - "Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } @Override diff --git a/src/main/java/com/databricks/jdbc/api/impl/LazyThriftResult.java b/src/main/java/com/databricks/jdbc/api/impl/LazyThriftResult.java index 854da86566..2c3b9ba9b6 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/LazyThriftResult.java +++ b/src/main/java/com/databricks/jdbc/api/impl/LazyThriftResult.java @@ -73,15 +73,16 @@ public Object getObject(int columnIndex) throws DatabricksSQLException { } if (globalRowIndex == -1) { throw new DatabricksSQLException( - "Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE); + "Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (currentBatchIndex < 0 || currentBatchIndex >= currentBatch.getRowCount()) { throw new DatabricksSQLException( - "Invalid cursor position", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid cursor position", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (columnIndex < 0 || columnIndex >= currentBatch.getColumnCount()) { throw new DatabricksSQLException( - "Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } return currentBatch.getValue(currentBatchIndex, columnIndex); } diff --git a/src/main/java/com/databricks/jdbc/api/impl/PreparedStatementBatchExecutor.java b/src/main/java/com/databricks/jdbc/api/impl/PreparedStatementBatchExecutor.java index 7387cf67f4..07c5628671 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/PreparedStatementBatchExecutor.java +++ b/src/main/java/com/databricks/jdbc/api/impl/PreparedStatementBatchExecutor.java @@ -100,7 +100,7 @@ private long[] executeBatchedInsert(List batchParam if (configuredBatchSize < 1) { throw new DatabricksSQLException( "BatchInsertSize must be at least 1, got: " + configuredBatchSize, - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } maxRowsPerChunk = Math.min(configuredBatchSize, batchParameterMetaData.size()); } else { diff --git a/src/main/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResult.java b/src/main/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResult.java index 98d97a8992..4c33ea9087 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResult.java +++ b/src/main/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResult.java @@ -123,17 +123,18 @@ private void validateGetObjectState(int columnIndex) throws DatabricksSQLExcepti if (globalRowIndex == -1) { LOGGER.warn("Attempted to get object before calling next()"); throw new DatabricksSQLException( - "Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE); + "Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (currentChunkIterator == null) { LOGGER.warn("No current chunk available when getting object"); throw new DatabricksSQLException( - "No current chunk available", DatabricksDriverErrorCode.INVALID_STATE); + "No current chunk available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (columnIndex < 0 || columnIndex >= columnInfos.size()) { LOGGER.warn("Column index {} out of bounds (size: {})", columnIndex, columnInfos.size()); throw new DatabricksSQLException( - "Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } } diff --git a/src/main/java/com/databricks/jdbc/api/impl/arrow/StreamingInlineArrowResult.java b/src/main/java/com/databricks/jdbc/api/impl/arrow/StreamingInlineArrowResult.java index 7c9025c3f0..3abc5d089a 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/arrow/StreamingInlineArrowResult.java +++ b/src/main/java/com/databricks/jdbc/api/impl/arrow/StreamingInlineArrowResult.java @@ -144,17 +144,18 @@ private void validateGetObjectState(int columnIndex) throws DatabricksSQLExcepti if (globalRowIndex == -1) { LOGGER.error("Attempted to access data before first row"); throw new DatabricksSQLException( - "Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE); + "Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (currentChunkIterator == null) { LOGGER.error("No current chunk available at row {}", globalRowIndex); throw new DatabricksSQLException( - "No current chunk available", DatabricksDriverErrorCode.INVALID_STATE); + "No current chunk available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (columnIndex < 0 || columnIndex >= columnInfos.size()) { LOGGER.error("Column index {} out of bounds (0-{})", columnIndex, columnInfos.size() - 1); throw new DatabricksSQLException( - "Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds: " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } } diff --git a/src/main/java/com/databricks/jdbc/api/impl/thrift/StreamingColumnarResult.java b/src/main/java/com/databricks/jdbc/api/impl/thrift/StreamingColumnarResult.java index bf5a95d346..1921d3b6dd 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/thrift/StreamingColumnarResult.java +++ b/src/main/java/com/databricks/jdbc/api/impl/thrift/StreamingColumnarResult.java @@ -116,13 +116,13 @@ public Object getObject(int columnIndex) throws DatabricksSQLException { if (globalRowIndex == -1) { LOGGER.error("Attempted to access data before first row"); throw new DatabricksSQLException( - "Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE); + "Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (currentBatch == null || currentBatchRowIndex < 0) { LOGGER.error( "Invalid cursor position: batch={}, rowIndex={}", currentBatch, currentBatchRowIndex); throw new DatabricksSQLException( - "Invalid cursor position", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid cursor position", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } // Type-safe: getData() returns ColumnarRowView directly, no casting! @@ -130,12 +130,13 @@ public Object getObject(int columnIndex) throws DatabricksSQLException { if (view == null) { LOGGER.error("Batch data not available at row {}", globalRowIndex); throw new DatabricksSQLException( - "Batch data not available", DatabricksDriverErrorCode.INVALID_STATE); + "Batch data not available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION); } if (columnIndex < 0 || columnIndex >= view.getColumnCount()) { LOGGER.error("Column index {} out of bounds (0-{})", columnIndex, view.getColumnCount() - 1); throw new DatabricksSQLException( - "Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE); + "Column index out of bounds: " + columnIndex, + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS); } return view.getValue(currentBatchRowIndex, columnIndex); diff --git a/src/main/java/com/databricks/jdbc/api/impl/volume/DBFSVolumeClient.java b/src/main/java/com/databricks/jdbc/api/impl/volume/DBFSVolumeClient.java index a3e11cfbe0..d5550e3b84 100644 --- a/src/main/java/com/databricks/jdbc/api/impl/volume/DBFSVolumeClient.java +++ b/src/main/java/com/databricks/jdbc/api/impl/volume/DBFSVolumeClient.java @@ -571,12 +571,12 @@ private void checkVolumeOperationError(VolumeOperationProcessor volumeOperationP if (volumeOperationProcessor.getStatus() == VolumeOperationStatus.FAILED) { throw new DatabricksSQLException( "Volume operation failed: " + volumeOperationProcessor.getErrorMessage(), - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.VOLUME_OPERATION_INVALID_STATE); } if (volumeOperationProcessor.getStatus() == VolumeOperationStatus.ABORTED) { throw new DatabricksSQLException( "Volume operation aborted: " + volumeOperationProcessor.getErrorMessage(), - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.VOLUME_OPERATION_INVALID_STATE); } } diff --git a/src/main/java/com/databricks/jdbc/common/util/JdbcThreadUtils.java b/src/main/java/com/databricks/jdbc/common/util/JdbcThreadUtils.java index 13aa06b96f..d52a215b92 100644 --- a/src/main/java/com/databricks/jdbc/common/util/JdbcThreadUtils.java +++ b/src/main/java/com/databricks/jdbc/common/util/JdbcThreadUtils.java @@ -89,7 +89,9 @@ public static List parallelMap( throw sqlEx; } else { throw new DatabricksSQLException( - "Error in parallel execution", e, DatabricksDriverErrorCode.INVALID_STATE); + "Error in parallel execution", + e, + DatabricksDriverErrorCode.THREAD_POOL_EXECUTION_ERROR); } } catch (TimeoutException e) { throw new DatabricksSQLException( diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java index a4ee7f8894..81b3952db8 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java @@ -393,7 +393,8 @@ public DatabricksResultSet listExportedKeys( if (table == null) { LOGGER.debug("listExportedKeys: table is null, throwing"); throw new DatabricksSQLException( - "Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE); + "Invalid argument: tableName may not be null", + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } // Only fetch currentCatalog if multiple catalog support is disabled @@ -527,7 +528,7 @@ private String[] resolveKeyBasedParams( LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing"); throw new DatabricksSQLException( "Invalid argument: tableName may not be null or empty", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } if (catalog == null) { @@ -541,7 +542,7 @@ private String[] resolveKeyBasedParams( "resolveKeyBasedParams: schema is null with explicit catalog '{}', throwing", catalog); throw new DatabricksSQLException( "Invalid argument: schema may not be null when catalog is specified", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } // Safety net: getCurrentCatalogAndSchema() returned null values @@ -552,7 +553,7 @@ private String[] resolveKeyBasedParams( schema); throw new DatabricksSQLException( "Invalid argument: could not resolve catalog or schema", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR); } return new String[] {catalog, schema, table}; diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftAccessor.java b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftAccessor.java index a8fc8623e3..ce1c6e5452 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftAccessor.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftAccessor.java @@ -117,7 +117,7 @@ TBase getThriftResponse(TBase request) throws DatabricksSQLException { while (cause != null) { if (cause instanceof HttpException) { throw new DatabricksHttpException( - cause.getMessage(), cause, DatabricksDriverErrorCode.INVALID_STATE); + cause.getMessage(), cause, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } cause = cause.getCause(); } @@ -129,7 +129,8 @@ TBase getThriftResponse(TBase request) throws DatabricksSQLException { if (e instanceof SQLException) { throw new DatabricksSQLException(errorMessage, e, ((SQLException) e).getSQLState()); } else { - throw new DatabricksSQLException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } } } @@ -170,7 +171,8 @@ TCancelOperationResp cancelOperation(TCancelOperationReq req) throws DatabricksH "Error while canceling operation from Thrift server. Request {%s}, Error {%s}", req.toString(), e.getMessage()); LOGGER.error(e, errorMessage); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } } @@ -183,7 +185,8 @@ TCloseOperationResp closeOperation(TCloseOperationReq req) throws DatabricksHttp "Error while closing operation from Thrift server. Request {%s}, Error {%s}", req.toString(), e.getMessage()); LOGGER.error(e, errorMessage); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } } @@ -294,7 +297,8 @@ DatabricksResultSet execute( "Error while receiving response from Thrift server. Request {%s}, Error {%s}", request, e.getMessage()); LOGGER.error(e, errorMessage); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } } @@ -409,7 +413,8 @@ DatabricksResultSet executeAsync( if (e instanceof DatabricksSQLException) { throw new DatabricksHttpException(errorMessage, ((DatabricksSQLException) e).getSQLState()); } else { - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } } StatementId statementId = new StatementId(response.getOperationHandle().operationId); @@ -519,7 +524,8 @@ DatabricksResultSet getStatementResult( "Error while receiving response from Thrift server. Request {%s}, Error {%s}", request.toString(), e.getMessage()); LOGGER.error(e, errorMessage); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } StatementStatus executionStatus = getStatementStatus(response); @@ -559,7 +565,8 @@ private TFetchResultsResp executeFetchRequest(TFetchResultsReq request) throws S "Error while fetching results from Thrift server. Request maxRows=%d, " + "maxBytes=%d, Error {%s}", request.getMaxRows(), request.getMaxBytes(), e.getMessage()); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } String statementId = StatementId.loggableStatementId(request.getOperationHandle()); @@ -625,7 +632,8 @@ TFetchResultsResp fetchResultsWithAbsoluteOffset( + "startRowOffset=%d, maxRows=%d, Error {%s}", startRowOffset, request.getMaxRows(), e.getMessage()); LOGGER.error(e, errorMessage); - throw new DatabricksHttpException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksHttpException( + errorMessage, e, DatabricksDriverErrorCode.THRIFT_RPC_ERROR); } verifySuccessStatus( diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java index 0057c7faa6..dde28a1142 100644 --- a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java +++ b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java @@ -370,7 +370,7 @@ public ChunkLinkFetchResult getResultChunks( + chunkStartRowOffset + " actual=" + chunkLinks.get(0).getRowOffset(); - throw new DatabricksSQLException(error, DatabricksDriverErrorCode.INVALID_STATE); + throw new DatabricksSQLException(error, DatabricksDriverErrorCode.THRIFT_RESPONSE_MISMATCH); } LOGGER.debug( @@ -390,7 +390,7 @@ public ResultData getResultChunksData(StatementId statementId, long chunkIndex) throws DatabricksSQLException { throw new DatabricksSQLException( "getResultChunksData method is not yet implemented for thrift client", - DatabricksDriverErrorCode.INVALID_STATE); + DatabricksDriverErrorCode.NOT_IMPLEMENTED_OPERATION); } @Override diff --git a/src/main/java/com/databricks/jdbc/model/telemetry/enums/DatabricksDriverErrorCode.java b/src/main/java/com/databricks/jdbc/model/telemetry/enums/DatabricksDriverErrorCode.java index 6ae558b5ac..d0d2d56ed8 100644 --- a/src/main/java/com/databricks/jdbc/model/telemetry/enums/DatabricksDriverErrorCode.java +++ b/src/main/java/com/databricks/jdbc/model/telemetry/enums/DatabricksDriverErrorCode.java @@ -45,6 +45,14 @@ public enum DatabricksDriverErrorCode { TRANSACTION_COMMIT_ERROR(1042), TRANSACTION_ROLLBACK_ERROR(1043), RATE_LIMIT_EXCEEDED(1044), + CURSOR_INVALID_POSITION(1045), + COLUMN_INDEX_OUT_OF_BOUNDS(1046), + ROW_INDEX_OUT_OF_BOUNDS(1047), + THRIFT_RPC_ERROR(1048), + THRIFT_RESPONSE_MISMATCH(1049), + INVALID_RESPONSE_FORMAT(1050), + THREAD_POOL_EXECUTION_ERROR(1051), + STREAM_READ_ERROR(1052), ARROW_SCHEMA_PARSING_ERROR(22000); private final int code; diff --git a/src/test/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResultTest.java b/src/test/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResultTest.java index 3cac2c52c0..ac7b512572 100644 --- a/src/test/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResultTest.java +++ b/src/test/java/com/databricks/jdbc/api/impl/arrow/LazyThriftInlineArrowResultTest.java @@ -198,7 +198,7 @@ void testGetObjectThrowsWhenBeforeFirstRow() throws SQLException { DatabricksSQLException exception = assertThrows(DatabricksSQLException.class, () -> result.getObject(0)); assertEquals("Cursor is before first row", exception.getMessage()); - assertEquals(DatabricksDriverErrorCode.INVALID_STATE.name(), exception.getSQLState()); + assertEquals(DatabricksDriverErrorCode.CURSOR_INVALID_POSITION.name(), exception.getSQLState()); } @Test @@ -323,13 +323,16 @@ void testGetObjectThrowsForColumnIndexOutOfBounds() throws SQLException { DatabricksSQLException negativeException = assertThrows(DatabricksSQLException.class, () -> result.getObject(-1)); assertTrue(negativeException.getMessage().contains("Column index out of bounds")); - assertEquals(DatabricksDriverErrorCode.INVALID_STATE.name(), negativeException.getSQLState()); + assertEquals( + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS.name(), + negativeException.getSQLState()); // Test index beyond column count (we have 2 columns: 0 and 1) DatabricksSQLException beyondException = assertThrows(DatabricksSQLException.class, () -> result.getObject(2)); assertTrue(beyondException.getMessage().contains("Column index out of bounds")); - assertEquals(DatabricksDriverErrorCode.INVALID_STATE.name(), beyondException.getSQLState()); + assertEquals( + DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS.name(), beyondException.getSQLState()); } @Test