-
Notifications
You must be signed in to change notification settings - Fork 6.5k
feat: google genai integration with tool block #20096
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
428400d
21a2ba4
c5223be
c76a6a6
53bf1ca
f08199c
d294549
251b547
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,16 +1,9 @@ | ||
| import asyncio | ||
| import json | ||
| import logging | ||
| from collections.abc import Sequence | ||
| from io import BytesIO | ||
| from typing import ( | ||
| TYPE_CHECKING, | ||
| Any, | ||
| Dict, | ||
| Union, | ||
| Optional, | ||
| Type, | ||
| Tuple, | ||
| ) | ||
| from typing import TYPE_CHECKING, Any, Dict, Union, Optional, Type, Tuple, cast | ||
| import typing | ||
|
|
||
| import google.genai.types as types | ||
|
|
@@ -29,6 +22,7 @@ | |
| DocumentBlock, | ||
| VideoBlock, | ||
| ThinkingBlock, | ||
| ToolCallBlock, | ||
| ) | ||
| from llama_index.core.program.utils import _repair_incomplete_json | ||
| from tenacity import ( | ||
|
|
@@ -188,16 +182,33 @@ def chat_from_gemini_response( | |
| ) | ||
| additional_kwargs["thought_signatures"].append(part.thought_signature) | ||
| if part.function_call: | ||
| if "tool_calls" not in additional_kwargs: | ||
| additional_kwargs["tool_calls"] = [] | ||
| additional_kwargs["tool_calls"].append( | ||
| { | ||
| "id": part.function_call.id if part.function_call.id else "", | ||
| "name": part.function_call.name, | ||
| "args": part.function_call.args, | ||
| "thought_signature": part.thought_signature, | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we might be losing thought signatures here? Although not totally sure either
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah, we might be, but ToolCallBlock does not have something that could properly handle the signature and I did not want to open it to additional_kwargs chaos... Although I can do that if we think it is necessary |
||
| } | ||
| if ( | ||
| part.thought_signature | ||
| not in additional_kwargs["thought_signatures"] | ||
| ): | ||
| additional_kwargs["thought_signatures"].append( | ||
| part.thought_signature | ||
| ) | ||
| content_blocks.append( | ||
| ToolCallBlock( | ||
| tool_call_id=part.function_call.id or "", | ||
| tool_name=part.function_call.name or "", | ||
| tool_kwargs=part.function_call.args or {}, | ||
| ) | ||
| ) | ||
| if part.function_response: | ||
| # follow the same pattern as for transforming a chatmessage into a gemini message: if it's a function response, package it alone and return it | ||
| additional_kwargs["tool_call_id"] = part.function_response.id | ||
| role = ROLES_FROM_GEMINI[top_candidate.content.role] | ||
| print("RESPONSE", json.dumps(part.function_response.response)) | ||
| return ChatResponse( | ||
| message=ChatMessage( | ||
| role=role, content=json.dumps(part.function_response.response) | ||
| ), | ||
| raw=raw, | ||
| additional_kwargs=additional_kwargs, | ||
| ) | ||
|
|
||
| if thought_tokens: | ||
| thinking_blocks = [ | ||
| i | ||
|
|
@@ -271,6 +282,7 @@ async def chat_message_to_gemini( | |
| message: ChatMessage, use_file_api: bool = False, client: Optional[Client] = None | ||
| ) -> Union[types.Content, types.File]: | ||
| """Convert ChatMessages to Gemini-specific history, including ImageDocuments.""" | ||
| unique_tool_calls = [] | ||
| parts = [] | ||
| part = None | ||
| for index, block in enumerate(message.blocks): | ||
|
|
@@ -326,6 +338,11 @@ async def chat_message_to_gemini( | |
| part.thought_signature = block.additional_information.get( | ||
| "thought_signature", None | ||
| ) | ||
| elif isinstance(block, ToolCallBlock): | ||
| part = types.Part.from_function_call( | ||
| name=block.tool_name, args=cast(Dict[str, Any], block.tool_kwargs) | ||
| ) | ||
| unique_tool_calls.append((block.tool_name, str(block.tool_kwargs))) | ||
| else: | ||
| msg = f"Unsupported content block type: {type(block).__name__}" | ||
| raise ValueError(msg) | ||
|
|
@@ -343,15 +360,20 @@ async def chat_message_to_gemini( | |
|
|
||
| for tool_call in message.additional_kwargs.get("tool_calls", []): | ||
| if isinstance(tool_call, dict): | ||
| part = types.Part.from_function_call( | ||
| name=tool_call.get("name"), args=tool_call.get("args") | ||
| ) | ||
| part.thought_signature = tool_call.get("thought_signature") | ||
| if ( | ||
| tool_call.get("name", ""), | ||
| str(tool_call.get("args", {})), | ||
| ) not in unique_tool_calls: | ||
| part = types.Part.from_function_call( | ||
| name=tool_call.get("name", ""), args=tool_call.get("args", {}) | ||
| ) | ||
| part.thought_signature = tool_call.get("thought_signature") | ||
| else: | ||
| part = types.Part.from_function_call( | ||
| name=tool_call.name, args=tool_call.args | ||
| ) | ||
| part.thought_signature = tool_call.thought_signature | ||
| if (tool_call.name, str(tool_call.args)) not in unique_tool_calls: | ||
| part = types.Part.from_function_call( | ||
| name=tool_call.name, args=tool_call.args | ||
| ) | ||
| part.thought_signature = tool_call.thought_signature | ||
| parts.append(part) | ||
|
|
||
| # the tool call id is the name of the tool | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -27,7 +27,7 @@ dev = [ | |
|
|
||
| [project] | ||
| name = "llama-index-llms-google-genai" | ||
| version = "0.6.2" | ||
| version = "0.7.0" | ||
| description = "llama-index llms google genai integration" | ||
| authors = [{name = "Your Name", email = "[email protected]"}] | ||
| requires-python = ">=3.9,<4.0" | ||
|
|
@@ -36,7 +36,7 @@ license = "MIT" | |
| dependencies = [ | ||
| "pillow>=10.2.0", | ||
| "google-genai>=1.24.0,<2", | ||
| "llama-index-core>=0.14.3,<0.15", | ||
| "llama-index-core>=0.14.5,<0.15", | ||
| ] | ||
|
|
||
| [tool.codespell] | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.