@@ -1446,6 +1446,33 @@ def tool_messages(tool_requests_ids, tool_call_result, expect_content):
1446
1446
def choice_content (response ):
1447
1447
return response ["content" ]
1448
1448
1449
+ @staticmethod
1450
+ def get_stream_body_content (body ):
1451
+ content = []
1452
+ content_block = {}
1453
+ input_json_buf = ""
1454
+ for event in body :
1455
+ json_bytes = event ["chunk" ].get ("bytes" , b"" )
1456
+ decoded = json_bytes .decode ("utf-8" )
1457
+ chunk = json .loads (decoded )
1458
+
1459
+ if (message_type := chunk .get ("type" )) is not None :
1460
+ if message_type == "content_block_start" :
1461
+ content_block = chunk ["content_block" ]
1462
+ elif message_type == "content_block_delta" :
1463
+ if chunk ["delta" ]["type" ] == "text_delta" :
1464
+ content_block ["text" ] += chunk ["delta" ]["text" ]
1465
+ elif chunk ["delta" ]["type" ] == "input_json_delta" :
1466
+ input_json_buf += chunk ["delta" ]["partial_json" ]
1467
+ elif message_type == "content_block_stop" :
1468
+ if input_json_buf :
1469
+ content_block ["input" ] = json .loads (input_json_buf )
1470
+ content .append (content_block )
1471
+ content_block = None
1472
+ input_json_buf = ""
1473
+
1474
+ return content
1475
+
1449
1476
1450
1477
class AmazonNovaModel :
1451
1478
@staticmethod
@@ -1524,6 +1551,43 @@ def tool_messages(tool_requests_ids, tool_call_result, expect_content):
1524
1551
def choice_content (response ):
1525
1552
return response ["output" ]["message" ]["content" ]
1526
1553
1554
+ @staticmethod
1555
+ def get_stream_body_content (body ):
1556
+ content = []
1557
+ content_block = {}
1558
+ tool_use = {}
1559
+ for event in body :
1560
+ json_bytes = event ["chunk" ].get ("bytes" , b"" )
1561
+ decoded = json_bytes .decode ("utf-8" )
1562
+ chunk = json .loads (decoded )
1563
+
1564
+ if "contentBlockDelta" in chunk :
1565
+ delta = chunk ["contentBlockDelta" ]["delta" ]
1566
+ if "text" in delta :
1567
+ content_block .setdefault ("text" , "" )
1568
+ content_block ["text" ] += delta ["text" ]
1569
+ elif "toolUse" in delta :
1570
+ tool_use ["toolUse" ]["input" ] = json .loads (
1571
+ delta ["toolUse" ]["input" ]
1572
+ )
1573
+ elif "contentBlockStart" in chunk :
1574
+ if content_block :
1575
+ content .append (content_block )
1576
+ content_block = {}
1577
+ start = chunk ["contentBlockStart" ]["start" ]
1578
+ if "toolUse" in start :
1579
+ tool_use = start
1580
+ elif "contentBlockStop" in chunk :
1581
+ if tool_use :
1582
+ content .append (tool_use )
1583
+ tool_use = {}
1584
+ elif "messageStop" in chunk :
1585
+ if content_block :
1586
+ content .append (content_block )
1587
+ content_block = {}
1588
+
1589
+ return content
1590
+
1527
1591
1528
1592
def invoke_model_tool_call (
1529
1593
span_exporter ,
@@ -2110,54 +2174,7 @@ def invoke_model_with_response_stream_tool_call(
2110
2174
modelId = llm_model_value ,
2111
2175
)
2112
2176
2113
- content = []
2114
- content_block = {}
2115
- # used only by anthropic claude
2116
- input_json_buf = ""
2117
- # used only by amazon nova
2118
- tool_use = {}
2119
- for event in response_0 ["body" ]:
2120
- json_bytes = event ["chunk" ].get ("bytes" , b"" )
2121
- decoded = json_bytes .decode ("utf-8" )
2122
- chunk = json .loads (decoded )
2123
-
2124
- # anthropic claude
2125
- if (message_type := chunk .get ("type" )) is not None :
2126
- if message_type == "content_block_start" :
2127
- content_block = chunk ["content_block" ]
2128
- elif message_type == "content_block_delta" :
2129
- if chunk ["delta" ]["type" ] == "text_delta" :
2130
- content_block ["text" ] += chunk ["delta" ]["text" ]
2131
- elif chunk ["delta" ]["type" ] == "input_json_delta" :
2132
- input_json_buf += chunk ["delta" ]["partial_json" ]
2133
- elif message_type == "content_block_stop" :
2134
- if input_json_buf :
2135
- content_block ["input" ] = json .loads (input_json_buf )
2136
- content .append (content_block )
2137
- content_block = None
2138
- input_json_buf = ""
2139
- else :
2140
- if "contentBlockDelta" in chunk :
2141
- delta = chunk ["contentBlockDelta" ]["delta" ]
2142
- if "text" in delta :
2143
- content_block .setdefault ("text" , "" )
2144
- content_block ["text" ] += delta ["text" ]
2145
- elif "toolUse" in delta :
2146
- tool_use ["toolUse" ]["input" ] = json .loads (
2147
- delta ["toolUse" ]["input" ]
2148
- )
2149
- elif "contentBlockStart" in chunk :
2150
- if content_block :
2151
- content .append (content_block )
2152
- content_block = {}
2153
- start = chunk ["contentBlockStart" ]["start" ]
2154
- if "toolUse" in start :
2155
- tool_use = start
2156
- elif "contentBlockStop" in chunk :
2157
- if tool_use :
2158
- content .append (tool_use )
2159
- tool_use = {}
2160
-
2177
+ content = llm_model_config .get_stream_body_content (response_0 ["body" ])
2161
2178
assert content
2162
2179
2163
2180
tool_requests_ids = llm_model_config .tool_requests_ids_from_stream (content )
@@ -2179,34 +2196,9 @@ def invoke_model_with_response_stream_tool_call(
2179
2196
modelId = llm_model_value ,
2180
2197
)
2181
2198
2182
- content_block = {}
2183
- response_1_content = []
2184
- for event in response_1 ["body" ]:
2185
- json_bytes = event ["chunk" ].get ("bytes" , b"" )
2186
- decoded = json_bytes .decode ("utf-8" )
2187
- chunk = json .loads (decoded )
2188
-
2189
- # anthropic claude
2190
- if (message_type := chunk .get ("type" )) is not None :
2191
- if message_type == "content_block_start" :
2192
- content_block = chunk ["content_block" ]
2193
- elif message_type == "content_block_delta" :
2194
- if chunk ["delta" ]["type" ] == "text_delta" :
2195
- content_block ["text" ] += chunk ["delta" ]["text" ]
2196
- elif message_type == "content_block_stop" :
2197
- response_1_content .append (content_block )
2198
- content_block = None
2199
- else :
2200
- if "contentBlockDelta" in chunk :
2201
- delta = chunk ["contentBlockDelta" ]["delta" ]
2202
- if "text" in delta :
2203
- content_block .setdefault ("text" , "" )
2204
- content_block ["text" ] += delta ["text" ]
2205
- elif "messageStop" in chunk :
2206
- if content_block :
2207
- response_1_content .append (content_block )
2208
- content_block = {}
2209
-
2199
+ response_1_content = llm_model_config .get_stream_body_content (
2200
+ response_1 ["body" ]
2201
+ )
2210
2202
assert response_1_content
2211
2203
2212
2204
(span_0 , span_1 ) = span_exporter .get_finished_spans ()
0 commit comments