Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
### Updated
- `EnableGeoSpatialSupport` no longer requires `EnableComplexDatatypeSupport=1`. Geospatial types (GEOMETRY, GEOGRAPHY) can now be enabled independently of complex type support (ARRAY, MAP, STRUCT).
- Arrow schema deserialization failures (Thrift metadata path) now surface a dedicated driver error code `ARROW_SCHEMA_PARSING_ERROR` (vendor code `22000`) and a proper SQLSTATE `22000` (Data Exception) on the thrown `SQLException`, instead of the generic `RESULT_SET_ERROR` (1004) and the enum name as SQLSTATE. The exception message is unchanged.
- Replaced the generic `INVALID_STATE` driver error code at ~30 call sites with more specific codes so telemetry buckets are actionable. New codes: `CURSOR_INVALID_POSITION` (1045), `COLUMN_INDEX_OUT_OF_BOUNDS` (1046), `ROW_INDEX_OUT_OF_BOUNDS` (1047), `THRIFT_RPC_ERROR` (1048), `THRIFT_RESPONSE_MISMATCH` (1049), `INVALID_RESPONSE_FORMAT` (1050), `THREAD_POOL_EXECUTION_ERROR` (1051), `STREAM_READ_ERROR` (1052). Null/argument validation sites now reuse `INPUT_VALIDATION_ERROR` (1015); volume-operation state errors now reuse `VOLUME_OPERATION_INVALID_STATE` (1028). Applications that key on `getSQLState() == "INVALID_STATE"` for result-set navigation, Thrift RPC, or stream-read failures will now see the new state names.

### Fixed
- Fixed `?` characters inside SQL comments, string literals, and quoted identifiers being incorrectly counted as parameter placeholders when `supportManyParameters=1`. `SQLInterpolator` now uses `SqlCommentParser` to locate only real placeholders. Fixes #1331.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,13 @@ public int getColumnCount() {
public Object getValue(int rowIndex, int columnIndex) throws DatabricksSQLException {
if (rowIndex < 0 || rowIndex >= rowCount) {
throw new DatabricksSQLException(
"Row index out of bounds: " + rowIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Row index out of bounds: " + rowIndex,
DatabricksDriverErrorCode.ROW_INDEX_OUT_OF_BOUNDS);
}
if (columnIndex < 0 || columnIndex >= columnAccessors.length) {
throw new DatabricksSQLException(
"Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds: " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}

return columnAccessors[columnIndex].getValue(rowIndex);
Expand All @@ -63,7 +65,8 @@ public Object getValue(int rowIndex, int columnIndex) throws DatabricksSQLExcept
public Object[] materializeRow(int rowIndex) throws DatabricksSQLException {
if (rowIndex < 0 || rowIndex >= rowCount) {
throw new DatabricksSQLException(
"Row index out of bounds: " + rowIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Row index out of bounds: " + rowIndex,
DatabricksDriverErrorCode.ROW_INDEX_OUT_OF_BOUNDS);
}

Object[] row = new Object[columnAccessors.length];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1137,14 +1137,14 @@ public ResultSet getCrossReference(
LOGGER.debug("getCrossReference: parentTable is null or empty, throwing");
throw new DatabricksSQLException(
"Invalid argument: parentTable may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}
// Empty foreign table is also invalid — Thrift server rejects it
if (foreignTable != null && foreignTable.isEmpty()) {
LOGGER.debug("getCrossReference: foreignTable is empty string, throwing");
throw new DatabricksSQLException(
"Invalid argument: foreignTable may not be empty",
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

return session
Expand Down Expand Up @@ -1578,7 +1578,7 @@ public <T> T unwrap(Class<T> iface) throws SQLException {
}

throw new DatabricksSQLException(
"Cannot unwrap to " + iface.getName(), DatabricksDriverErrorCode.INVALID_STATE);
"Cannot unwrap to " + iface.getName(), DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

/** {@inheritDoc} */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,8 @@ public void setObject(int parameterIndex, Object x, int targetSqlType, int scale
bd = BigDecimal.valueOf(((Number) x).doubleValue());
} else {
throw new DatabricksSQLException(
"Invalid object type for DECIMAL/NUMERIC", DatabricksDriverErrorCode.INVALID_STATE);
"Invalid object type for DECIMAL/NUMERIC",
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}
bd = bd.setScale(scaleOrLength, RoundingMode.HALF_UP); // Round up to nearest value.
setObject(parameterIndex, bd, databricksType);
Expand Down Expand Up @@ -750,7 +751,7 @@ private void checkLength(long targetLength, long sourceLength) throws SQLExcepti
"Unexpected number of bytes read from the stream. Expected: %d, got: %d",
targetLength, sourceLength);
LOGGER.error(errorMessage);
throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE);
throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.STREAM_READ_ERROR);
}
}

Expand All @@ -767,7 +768,8 @@ private byte[] readBytesFromInputStream(InputStream x, int length) throws SQLExc
if (x == null) {
String errorMessage = "InputStream cannot be null";
LOGGER.error(errorMessage);
throw new DatabricksSQLException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE);
throw new DatabricksSQLException(
errorMessage, DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}
byte[] bytes = new byte[length];
try {
Expand All @@ -776,7 +778,8 @@ private byte[] readBytesFromInputStream(InputStream x, int length) throws SQLExc
} catch (IOException e) {
String errorMessage = "Error reading from the InputStream";
LOGGER.error(errorMessage);
throw new DatabricksSQLException(errorMessage, e, DatabricksDriverErrorCode.INVALID_STATE);
throw new DatabricksSQLException(
errorMessage, e, DatabricksDriverErrorCode.STREAM_READ_ERROR);
}
return bytes;
}
Expand Down Expand Up @@ -815,7 +818,7 @@ private String readStringFromInputStream(InputStream inputStream, long length, C
} catch (IOException e) {
String message = "Error reading from the InputStream";
LOGGER.error(message);
throw new DatabricksSQLException(message, e, DatabricksDriverErrorCode.INVALID_STATE);
throw new DatabricksSQLException(message, e, DatabricksDriverErrorCode.STREAM_READ_ERROR);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1997,7 +1997,7 @@ private Object getObjectInternal(int columnIndex) throws SQLException {
if (columnIndex <= 0) {
throw new DatabricksSQLException(
"Invalid column index",
DatabricksDriverErrorCode.INVALID_STATE,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS,
silenceNonTerminalExceptions);
}
Object object = executionResult.getObject(columnIndex - 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ private static IExecutionResult getResultHandler(
throws DatabricksSQLException {
if (manifest.getFormat() == null) {
throw new DatabricksParsingException(
"Empty response format", DatabricksDriverErrorCode.INVALID_STATE);
"Empty response format", DatabricksDriverErrorCode.INVALID_RESPONSE_FORMAT);
}
TelemetryHelper.setResultFormat(
session.getConnectionContext(), statementId, manifest.getFormat());
Expand All @@ -66,7 +66,8 @@ private static IExecutionResult getResultHandler(
default:
String errorMessage = String.format("Invalid response format %s", manifest.getFormat());
LOGGER.error(errorMessage);
throw new DatabricksParsingException(errorMessage, DatabricksDriverErrorCode.INVALID_STATE);
throw new DatabricksParsingException(
errorMessage, DatabricksDriverErrorCode.INVALID_RESPONSE_FORMAT);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,14 @@ public Object getObject(int columnIndex) throws DatabricksSQLException {
}
if (currentRow == -1) {
throw new DatabricksSQLException(
"Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE);
"Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (columnIndex < data.get((int) currentRow).size()) {
return data.get((int) currentRow).get(columnIndex);
}
throw new DatabricksSQLException(
"Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,16 @@ public Object getObject(int columnIndex) throws DatabricksSQLException {
}
if (globalRowIndex == -1) {
throw new DatabricksSQLException(
"Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE);
"Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (currentBatchIndex < 0 || currentBatchIndex >= currentBatch.getRowCount()) {
throw new DatabricksSQLException(
"Invalid cursor position", DatabricksDriverErrorCode.INVALID_STATE);
"Invalid cursor position", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (columnIndex < 0 || columnIndex >= currentBatch.getColumnCount()) {
throw new DatabricksSQLException(
"Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}
return currentBatch.getValue(currentBatchIndex, columnIndex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ private long[] executeBatchedInsert(List<DatabricksParameterMetaData> batchParam
if (configuredBatchSize < 1) {
throw new DatabricksSQLException(
"BatchInsertSize must be at least 1, got: " + configuredBatchSize,
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}
maxRowsPerChunk = Math.min(configuredBatchSize, batchParameterMetaData.size());
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,17 +123,18 @@ private void validateGetObjectState(int columnIndex) throws DatabricksSQLExcepti
if (globalRowIndex == -1) {
LOGGER.warn("Attempted to get object before calling next()");
throw new DatabricksSQLException(
"Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE);
"Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (currentChunkIterator == null) {
LOGGER.warn("No current chunk available when getting object");
throw new DatabricksSQLException(
"No current chunk available", DatabricksDriverErrorCode.INVALID_STATE);
"No current chunk available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (columnIndex < 0 || columnIndex >= columnInfos.size()) {
LOGGER.warn("Column index {} out of bounds (size: {})", columnIndex, columnInfos.size());
throw new DatabricksSQLException(
"Column index out of bounds " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,17 +144,18 @@ private void validateGetObjectState(int columnIndex) throws DatabricksSQLExcepti
if (globalRowIndex == -1) {
LOGGER.error("Attempted to access data before first row");
throw new DatabricksSQLException(
"Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE);
"Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (currentChunkIterator == null) {
LOGGER.error("No current chunk available at row {}", globalRowIndex);
throw new DatabricksSQLException(
"No current chunk available", DatabricksDriverErrorCode.INVALID_STATE);
"No current chunk available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (columnIndex < 0 || columnIndex >= columnInfos.size()) {
LOGGER.error("Column index {} out of bounds (0-{})", columnIndex, columnInfos.size() - 1);
throw new DatabricksSQLException(
"Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds: " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,26 +116,27 @@ public Object getObject(int columnIndex) throws DatabricksSQLException {
if (globalRowIndex == -1) {
LOGGER.error("Attempted to access data before first row");
throw new DatabricksSQLException(
"Cursor is before first row", DatabricksDriverErrorCode.INVALID_STATE);
"Cursor is before first row", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (currentBatch == null || currentBatchRowIndex < 0) {
LOGGER.error(
"Invalid cursor position: batch={}, rowIndex={}", currentBatch, currentBatchRowIndex);
throw new DatabricksSQLException(
"Invalid cursor position", DatabricksDriverErrorCode.INVALID_STATE);
"Invalid cursor position", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}

// Type-safe: getData() returns ColumnarRowView directly, no casting!
ColumnarRowView view = currentBatch.getData();
if (view == null) {
LOGGER.error("Batch data not available at row {}", globalRowIndex);
throw new DatabricksSQLException(
"Batch data not available", DatabricksDriverErrorCode.INVALID_STATE);
"Batch data not available", DatabricksDriverErrorCode.CURSOR_INVALID_POSITION);
}
if (columnIndex < 0 || columnIndex >= view.getColumnCount()) {
LOGGER.error("Column index {} out of bounds (0-{})", columnIndex, view.getColumnCount() - 1);
throw new DatabricksSQLException(
"Column index out of bounds: " + columnIndex, DatabricksDriverErrorCode.INVALID_STATE);
"Column index out of bounds: " + columnIndex,
DatabricksDriverErrorCode.COLUMN_INDEX_OUT_OF_BOUNDS);
}

return view.getValue(currentBatchRowIndex, columnIndex);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -571,12 +571,12 @@ private void checkVolumeOperationError(VolumeOperationProcessor volumeOperationP
if (volumeOperationProcessor.getStatus() == VolumeOperationStatus.FAILED) {
throw new DatabricksSQLException(
"Volume operation failed: " + volumeOperationProcessor.getErrorMessage(),
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.VOLUME_OPERATION_INVALID_STATE);
}
if (volumeOperationProcessor.getStatus() == VolumeOperationStatus.ABORTED) {
throw new DatabricksSQLException(
"Volume operation aborted: " + volumeOperationProcessor.getErrorMessage(),
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.VOLUME_OPERATION_INVALID_STATE);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,9 @@ public static <T, R> List<R> parallelMap(
throw sqlEx;
} else {
throw new DatabricksSQLException(
"Error in parallel execution", e, DatabricksDriverErrorCode.INVALID_STATE);
"Error in parallel execution",
e,
DatabricksDriverErrorCode.THREAD_POOL_EXECUTION_ERROR);
}
} catch (TimeoutException e) {
throw new DatabricksSQLException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,8 @@ public DatabricksResultSet listExportedKeys(
if (table == null) {
LOGGER.debug("listExportedKeys: table is null, throwing");
throw new DatabricksSQLException(
"Invalid argument: tableName may not be null", DatabricksDriverErrorCode.INVALID_STATE);
"Invalid argument: tableName may not be null",
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

// Only fetch currentCatalog if multiple catalog support is disabled
Expand Down Expand Up @@ -527,7 +528,7 @@ private String[] resolveKeyBasedParams(
LOGGER.debug("resolveKeyBasedParams: table is null or empty, throwing");
throw new DatabricksSQLException(
"Invalid argument: tableName may not be null or empty",
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

if (catalog == null) {
Expand All @@ -541,7 +542,7 @@ private String[] resolveKeyBasedParams(
"resolveKeyBasedParams: schema is null with explicit catalog '{}', throwing", catalog);
throw new DatabricksSQLException(
"Invalid argument: schema may not be null when catalog is specified",
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

// Safety net: getCurrentCatalogAndSchema() returned null values
Expand All @@ -552,7 +553,7 @@ private String[] resolveKeyBasedParams(
schema);
throw new DatabricksSQLException(
"Invalid argument: could not resolve catalog or schema",
DatabricksDriverErrorCode.INVALID_STATE);
DatabricksDriverErrorCode.INPUT_VALIDATION_ERROR);
}

return new String[] {catalog, schema, table};
Expand Down
Loading
Loading