Skip to content

Commit eae8841

Browse files
committed
Move stream content extraction for tool calls tests to a couple of helpers
1 parent aba0291 commit eae8841

File tree

1 file changed

+66
-76
lines changed

1 file changed

+66
-76
lines changed

instrumentation/opentelemetry-instrumentation-botocore/tests/test_botocore_bedrock.py

+66-76
Original file line numberDiff line numberDiff line change
@@ -1446,6 +1446,32 @@ def tool_messages(tool_requests_ids, tool_call_result, expect_content):
14461446
def choice_content(response):
14471447
return response["content"]
14481448

1449+
def get_stream_body_content(body):
1450+
content = []
1451+
content_block = {}
1452+
input_json_buf = ""
1453+
for event in body:
1454+
json_bytes = event["chunk"].get("bytes", b"")
1455+
decoded = json_bytes.decode("utf-8")
1456+
chunk = json.loads(decoded)
1457+
1458+
if (message_type := chunk.get("type")) is not None:
1459+
if message_type == "content_block_start":
1460+
content_block = chunk["content_block"]
1461+
elif message_type == "content_block_delta":
1462+
if chunk["delta"]["type"] == "text_delta":
1463+
content_block["text"] += chunk["delta"]["text"]
1464+
elif chunk["delta"]["type"] == "input_json_delta":
1465+
input_json_buf += chunk["delta"]["partial_json"]
1466+
elif message_type == "content_block_stop":
1467+
if input_json_buf:
1468+
content_block["input"] = json.loads(input_json_buf)
1469+
content.append(content_block)
1470+
content_block = None
1471+
input_json_buf = ""
1472+
1473+
return content
1474+
14491475

14501476
class AmazonNovaModel:
14511477
@staticmethod
@@ -1524,6 +1550,42 @@ def tool_messages(tool_requests_ids, tool_call_result, expect_content):
15241550
def choice_content(response):
15251551
return response["output"]["message"]["content"]
15261552

1553+
def get_stream_body_content(body):
1554+
content = []
1555+
content_block = {}
1556+
tool_use = {}
1557+
for event in body:
1558+
json_bytes = event["chunk"].get("bytes", b"")
1559+
decoded = json_bytes.decode("utf-8")
1560+
chunk = json.loads(decoded)
1561+
1562+
if "contentBlockDelta" in chunk:
1563+
delta = chunk["contentBlockDelta"]["delta"]
1564+
if "text" in delta:
1565+
content_block.setdefault("text", "")
1566+
content_block["text"] += delta["text"]
1567+
elif "toolUse" in delta:
1568+
tool_use["toolUse"]["input"] = json.loads(
1569+
delta["toolUse"]["input"]
1570+
)
1571+
elif "contentBlockStart" in chunk:
1572+
if content_block:
1573+
content.append(content_block)
1574+
content_block = {}
1575+
start = chunk["contentBlockStart"]["start"]
1576+
if "toolUse" in start:
1577+
tool_use = start
1578+
elif "contentBlockStop" in chunk:
1579+
if tool_use:
1580+
content.append(tool_use)
1581+
tool_use = {}
1582+
elif "messageStop" in chunk:
1583+
if content_block:
1584+
content.append(content_block)
1585+
content_block = {}
1586+
1587+
return content
1588+
15271589

15281590
def invoke_model_tool_call(
15291591
span_exporter,
@@ -2110,54 +2172,7 @@ def invoke_model_with_response_stream_tool_call(
21102172
modelId=llm_model_value,
21112173
)
21122174

2113-
content = []
2114-
content_block = {}
2115-
# used only by anthropic claude
2116-
input_json_buf = ""
2117-
# used only by amazon nova
2118-
tool_use = {}
2119-
for event in response_0["body"]:
2120-
json_bytes = event["chunk"].get("bytes", b"")
2121-
decoded = json_bytes.decode("utf-8")
2122-
chunk = json.loads(decoded)
2123-
2124-
# anthropic claude
2125-
if (message_type := chunk.get("type")) is not None:
2126-
if message_type == "content_block_start":
2127-
content_block = chunk["content_block"]
2128-
elif message_type == "content_block_delta":
2129-
if chunk["delta"]["type"] == "text_delta":
2130-
content_block["text"] += chunk["delta"]["text"]
2131-
elif chunk["delta"]["type"] == "input_json_delta":
2132-
input_json_buf += chunk["delta"]["partial_json"]
2133-
elif message_type == "content_block_stop":
2134-
if input_json_buf:
2135-
content_block["input"] = json.loads(input_json_buf)
2136-
content.append(content_block)
2137-
content_block = None
2138-
input_json_buf = ""
2139-
else:
2140-
if "contentBlockDelta" in chunk:
2141-
delta = chunk["contentBlockDelta"]["delta"]
2142-
if "text" in delta:
2143-
content_block.setdefault("text", "")
2144-
content_block["text"] += delta["text"]
2145-
elif "toolUse" in delta:
2146-
tool_use["toolUse"]["input"] = json.loads(
2147-
delta["toolUse"]["input"]
2148-
)
2149-
elif "contentBlockStart" in chunk:
2150-
if content_block:
2151-
content.append(content_block)
2152-
content_block = {}
2153-
start = chunk["contentBlockStart"]["start"]
2154-
if "toolUse" in start:
2155-
tool_use = start
2156-
elif "contentBlockStop" in chunk:
2157-
if tool_use:
2158-
content.append(tool_use)
2159-
tool_use = {}
2160-
2175+
content = llm_model_config.get_stream_body_content(response_0["body"])
21612176
assert content
21622177

21632178
tool_requests_ids = llm_model_config.tool_requests_ids_from_stream(content)
@@ -2179,34 +2194,9 @@ def invoke_model_with_response_stream_tool_call(
21792194
modelId=llm_model_value,
21802195
)
21812196

2182-
content_block = {}
2183-
response_1_content = []
2184-
for event in response_1["body"]:
2185-
json_bytes = event["chunk"].get("bytes", b"")
2186-
decoded = json_bytes.decode("utf-8")
2187-
chunk = json.loads(decoded)
2188-
2189-
# anthropic claude
2190-
if (message_type := chunk.get("type")) is not None:
2191-
if message_type == "content_block_start":
2192-
content_block = chunk["content_block"]
2193-
elif message_type == "content_block_delta":
2194-
if chunk["delta"]["type"] == "text_delta":
2195-
content_block["text"] += chunk["delta"]["text"]
2196-
elif message_type == "content_block_stop":
2197-
response_1_content.append(content_block)
2198-
content_block = None
2199-
else:
2200-
if "contentBlockDelta" in chunk:
2201-
delta = chunk["contentBlockDelta"]["delta"]
2202-
if "text" in delta:
2203-
content_block.setdefault("text", "")
2204-
content_block["text"] += delta["text"]
2205-
elif "messageStop" in chunk:
2206-
if content_block:
2207-
response_1_content.append(content_block)
2208-
content_block = {}
2209-
2197+
response_1_content = llm_model_config.get_stream_body_content(
2198+
response_1["body"]
2199+
)
22102200
assert response_1_content
22112201

22122202
(span_0, span_1) = span_exporter.get_finished_spans()

0 commit comments

Comments
 (0)