@@ -1099,21 +1099,31 @@ class DatabricksTelemetryExporter {
10991099
11001100 private async exportInternal(metrics : TelemetryMetric []): Promise <void > {
11011101 const config = this .context .getConfig ();
1102- const connectionProvider = await this .context .getConnectionProvider ();
1102+ const authenticatedExport = config .telemetryAuthenticatedExport ?? true ;
1103+
1104+ const endpoint = authenticatedExport
1105+ ? ` https://${this .host }/telemetry-ext `
1106+ : ` https://${this .host }/telemetry-unauth ` ;
11031107
1104- const endpoint = config . telemetryAuthenticatedExport
1105- ? ` https://${ this .host }/api/2.0/sql/telemetry-ext `
1106- : ` https://${ this . host }/api/2.0/sql/telemetry-unauth ` ;
1108+ // CRITICAL: Format payload to match JDBC TelemetryRequest with protoLogs
1109+ const telemetryLogs = metrics . map ( m => this .toTelemetryLog ( m ));
1110+ const protoLogs = telemetryLogs . map ( log => JSON . stringify ( log )) ;
11071111
11081112 const payload = {
1109- frontend_logs: metrics .map (m => this .toTelemetryLog (m )),
1113+ uploadTime: Date .now (),
1114+ items: [], // Required but unused
1115+ protoLogs , // Array of JSON-stringified log objects
11101116 };
11111117
1118+ // Get authentication headers if using authenticated endpoint
1119+ const authHeaders = authenticatedExport ? await this .context .getAuthHeaders () : {};
1120+
11121121 const response = await fetch (endpoint , {
11131122 method: ' POST' ,
11141123 headers: {
1124+ ... authHeaders ,
11151125 ' Content-Type' : ' application/json' ,
1116- // Use connection provider's auth headers
1126+ ' User-Agent ' : this . userAgent ,
11171127 },
11181128 body: JSON .stringify (payload ),
11191129 });
@@ -1124,34 +1134,60 @@ class DatabricksTelemetryExporter {
11241134 }
11251135
11261136 private toTelemetryLog(metric : TelemetryMetric ): any {
1127- return {
1128- workspace_id: metric .workspaceId ,
1137+ const log = {
11291138 frontend_log_event_id: this .generateUUID (),
11301139 context: {
11311140 client_context: {
11321141 timestamp_millis: metric .timestamp ,
1133- user_agent: this .httpClient . userAgent ,
1142+ user_agent: this .userAgent ,
11341143 },
11351144 },
11361145 entry: {
11371146 sql_driver_log: {
11381147 session_id: metric .sessionId ,
11391148 sql_statement_id: metric .statementId ,
1140- operation_latency_ms: metric .latencyMs ,
1141- sql_operation: {
1142- execution_result_format: metric .resultFormat ,
1143- chunk_details: metric .chunkCount ? {
1144- chunk_count: metric .chunkCount ,
1145- total_bytes: metric .bytesDownloaded ,
1146- } : undefined ,
1147- },
1148- error_info: metric .errorName ? {
1149- error_name: metric .errorName ,
1150- stack_trace: metric .errorMessage ,
1151- } : undefined ,
11521149 },
11531150 },
11541151 };
1152+
1153+ // Add metric-specific fields based on type
1154+ if (metric .metricType === ' connection' && metric .driverConfig ) {
1155+ log .entry .sql_driver_log .system_configuration = {
1156+ driver_version: metric .driverConfig .driverVersion ,
1157+ driver_name: metric .driverConfig .driverName ,
1158+ runtime_name: ' Node.js' ,
1159+ runtime_version: metric .driverConfig .nodeVersion ,
1160+ runtime_vendor: metric .driverConfig .runtimeVendor ,
1161+ os_name: metric .driverConfig .platform ,
1162+ os_version: metric .driverConfig .osVersion ,
1163+ os_arch: metric .driverConfig .osArch ,
1164+ locale_name: metric .driverConfig .localeName ,
1165+ char_set_encoding: metric .driverConfig .charSetEncoding ,
1166+ process_name: metric .driverConfig .processName ,
1167+ };
1168+ } else if (metric .metricType === ' statement' ) {
1169+ log .entry .sql_driver_log .operation_latency_ms = metric .latencyMs ;
1170+
1171+ if (metric .resultFormat || metric .chunkCount ) {
1172+ log .entry .sql_driver_log .sql_operation = {
1173+ execution_result: metric .resultFormat ,
1174+ };
1175+
1176+ if (metric .chunkCount && metric .chunkCount > 0 ) {
1177+ log .entry .sql_driver_log .sql_operation .chunk_details = {
1178+ total_chunks_present: metric .chunkCount ,
1179+ total_chunks_iterated: metric .chunkCount ,
1180+ };
1181+ }
1182+ }
1183+ } else if (metric .metricType === ' error' ) {
1184+ log .entry .sql_driver_log .error_info = {
1185+ error_name: metric .errorName || ' UnknownError' ,
1186+ stack_trace: metric .errorMessage || ' ' ,
1187+ };
1188+ }
1189+
1190+ return log ;
11551191 }
11561192
11571193 private generateUUID(): string {
@@ -1189,10 +1225,15 @@ Collected once per connection:
11891225``` typescript
11901226interface DriverConfiguration {
11911227 driverVersion: string ;
1192- driverName: string ;
1228+ driverName: string ; // 'nodejs-sql-driver' (matches JDBC naming)
11931229 nodeVersion: string ;
11941230 platform: string ;
11951231 osVersion: string ;
1232+ osArch: string ; // Architecture (x64, arm64, etc.)
1233+ runtimeVendor: string ; // 'Node.js Foundation'
1234+ localeName: string ; // Locale (e.g., 'en_US')
1235+ charSetEncoding: string ; // Character encoding (e.g., 'UTF-8')
1236+ processName: string ; // Process name from process.title or script name
11961237
11971238 // Feature flags
11981239 cloudFetchEnabled: boolean ;
@@ -1207,6 +1248,14 @@ interface DriverConfiguration {
12071248}
12081249```
12091250
1251+ ** System Configuration Fields** (matches JDBC implementation):
1252+ - ** driverName** : Always set to ` 'nodejs-sql-driver' ` to match JDBC driver naming convention
1253+ - ** osArch** : Obtained from ` os.arch() ` - reports CPU architecture (x64, arm64, ia32, etc.)
1254+ - ** runtimeVendor** : Always set to ` 'Node.js Foundation' ` (equivalent to JDBC's java.vendor)
1255+ - ** localeName** : Extracted from ` LANG ` environment variable in format ` language_country ` (e.g., ` en_US ` ), defaults to ` en_US `
1256+ - ** charSetEncoding** : Always ` 'UTF-8' ` (Node.js default encoding), equivalent to JDBC's Charset.defaultCharset()
1257+ - ** processName** : Obtained from ` process.title ` or extracted from ` process.argv[1] ` (script name), equivalent to JDBC's ProcessNameUtil.getProcessName()
1258+
12101259### 4.3 Statement Metrics
12111260
12121261Aggregated per statement:
@@ -1277,14 +1326,104 @@ flowchart TD
12771326 L --> M[Lumberjack]
12781327```
12791328
1280- ### 5.2 Batching Strategy
1329+ ### 5.2 Payload Format
1330+
1331+ ** CRITICAL** : The Node.js driver uses the same payload format as JDBC with ` protoLogs ` (NOT ` frontend_logs ` ).
1332+
1333+ #### Payload Structure
1334+
1335+ ``` typescript
1336+ interface DatabricksTelemetryPayload {
1337+ uploadTime: number ; // Timestamp in milliseconds
1338+ items: string []; // Required but unused (empty array)
1339+ protoLogs: string []; // Array of JSON-stringified log objects
1340+ }
1341+ ```
1342+
1343+ #### Example Payload
1344+
1345+ ``` json
1346+ {
1347+ "uploadTime" : 1706634000000 ,
1348+ "items" : [],
1349+ "protoLogs" : [
1350+ " {\" frontend_log_event_id\" :\" 550e8400-e29b-41d4-a716-446655440000\" ,\" context\" :{\" client_context\" :{\" timestamp_millis\" :1706634000000,\" user_agent\" :\" databricks-sql-nodejs/1.12.0\" }},\" entry\" :{\" sql_driver_log\" :{\" session_id\" :\" 01f0fd4d-2ed0-1469-bfee-b6c9c31cb586\" ,\" sql_statement_id\" :null,\" system_configuration\" :{\" driver_version\" :\" 1.12.0\" ,\" driver_name\" :\" nodejs-sql-driver\" ,\" runtime_name\" :\" Node.js\" ,\" runtime_version\" :\" v22.16.0\" ,\" runtime_vendor\" :\" Node.js Foundation\" ,\" os_name\" :\" linux\" ,\" os_version\" :\" 5.4.0-1153-aws-fips\" ,\" os_arch\" :\" x64\" ,\" locale_name\" :\" en_US\" ,\" char_set_encoding\" :\" UTF-8\" ,\" process_name\" :\" node\" }}}}" ,
1351+ " {\" frontend_log_event_id\" :\" 550e8400-e29b-41d4-a716-446655440001\" ,\" context\" :{\" client_context\" :{\" timestamp_millis\" :1706634001000,\" user_agent\" :\" databricks-sql-nodejs/1.12.0\" }},\" entry\" :{\" sql_driver_log\" :{\" session_id\" :\" 01f0fd4d-2ed0-1469-bfee-b6c9c31cb586\" ,\" sql_statement_id\" :\" 01f0fd4d-2ed0-1469-bfee-b6c9c31cb587\" ,\" operation_latency_ms\" :123,\" sql_operation\" :{\" execution_result\" :\" arrow\" ,\" chunk_details\" :{\" total_chunks_present\" :5,\" total_chunks_iterated\" :5}}}}}"
1352+ ]
1353+ }
1354+ ```
1355+
1356+ #### Log Object Structure
1357+
1358+ Each item in ` protoLogs ` is a JSON-stringified object with this structure:
1359+
1360+ ``` typescript
1361+ interface DatabricksTelemetryLog {
1362+ frontend_log_event_id: string ; // UUID v4
1363+ context: {
1364+ client_context: {
1365+ timestamp_millis: number ;
1366+ user_agent: string ; // "databricks-sql-nodejs/<version>"
1367+ };
1368+ };
1369+ entry: {
1370+ sql_driver_log: {
1371+ session_id? : string ; // Session UUID
1372+ sql_statement_id? : string ; // Statement UUID (null for connection events)
1373+
1374+ // Connection events only
1375+ system_configuration? : {
1376+ driver_version? : string ; // e.g., "1.12.0"
1377+ driver_name? : string ; // "nodejs-sql-driver"
1378+ runtime_name? : string ; // "Node.js"
1379+ runtime_version? : string ; // e.g., "v22.16.0"
1380+ runtime_vendor? : string ; // "Node.js Foundation"
1381+ os_name? : string ; // e.g., "linux"
1382+ os_version? : string ; // e.g., "5.4.0-1153-aws-fips"
1383+ os_arch? : string ; // e.g., "x64"
1384+ locale_name? : string ; // e.g., "en_US"
1385+ char_set_encoding? : string ; // e.g., "UTF-8"
1386+ process_name? : string ; // e.g., "node"
1387+ };
1388+
1389+ // Statement events only
1390+ operation_latency_ms? : number ;
1391+ sql_operation? : {
1392+ execution_result? : string ; // "inline" | "cloudfetch" | "arrow"
1393+ chunk_details? : {
1394+ total_chunks_present? : number ;
1395+ total_chunks_iterated? : number ;
1396+ };
1397+ };
1398+
1399+ // Error events only
1400+ error_info? : {
1401+ error_name: string ;
1402+ stack_trace: string ;
1403+ };
1404+ };
1405+ };
1406+ }
1407+ ```
1408+
1409+ ** Key Points** :
1410+ - Each telemetry log is ** JSON-stringified** before being added to ` protoLogs ` array
1411+ - The ` items ` field is required but always empty
1412+ - The ` uploadTime ` is the timestamp when the batch is being exported
1413+ - Each log has a unique ` frontend_log_event_id ` (UUID v4)
1414+ - Connection events have ` system_configuration ` populated with all driver metadata
1415+ - Statement events have ` operation_latency_ms ` and optional ` sql_operation ` details
1416+ - Error events have ` error_info ` with error name and message
1417+ - The ` sql_statement_id ` is ` null ` for connection events
1418+
1419+ ### 5.3 Batching Strategy
12811420
12821421- ** Batch size** : Default 100 metrics
12831422- ** Flush interval** : Default 5 seconds
12841423- ** Force flush** : On connection close
12851424- ** Background flushing** : Non-blocking with setInterval
12861425
1287- ### 5.3 Retry Strategy
1426+ ### 5.4 Retry Strategy
12881427
12891428- ** Retryable errors** : 429, 500, 502, 503, 504, network timeouts
12901429- ** Terminal errors** : 400, 401, 403, 404
0 commit comments