Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: using agents as KernelFunctions #10828

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def get_item_price(
return "$9.99"


def _create_kernel_with_chat_completionand_filter() -> Kernel:
def _create_kernel_with_chat_completion_and_filter() -> Kernel:
"""A helper function to create a kernel with a chat completion service and a filter."""
kernel = Kernel()
kernel.add_service(AzureChatCompletion())
Expand All @@ -72,7 +72,7 @@ async def main():
# 1. Create the agent with a kernel instance that contains
# the auto function invocation filter and the AI service
agent = ChatCompletionAgent(
kernel=_create_kernel_with_chat_completionand_filter(),
kernel=_create_kernel_with_chat_completion_and_filter(),
name="Host",
instructions="Answer questions about the menu.",
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio

from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings
from semantic_kernel import Kernel
from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent
from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior
from semantic_kernel.contents import ChatHistory
from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext

"""
This sample demonstrates how to build a conversational chatbot
using Semantic Kernel, featuring auto function calling,
using agents as functions, this includes letting a chat interaction
call an agent, and giving one agent another agent to do things.
"""

# System message defining the behavior and persona of the chat bot.
system_message = """
You are a chat bot. Your name is Mosscap and
you have one goal: figure out what people need.
Your full name, should you need to know it, is
Splendid Speckled Mosscap. You communicate
effectively, but you tend to answer with long
flowery prose. You are also a math wizard,
especially for adding and subtracting.
You also excel at joke telling, where your tone is often sarcastic.
Once you have the answer I am looking for,
you will return a full answer to me as soon as possible.
"""


# Define the auto function invocation filter that will be used by the kernel
async def function_invocation_filter(context: FunctionInvocationContext, next):
"""A filter that will be called for each function call in the response."""
if "task" not in context.arguments:
await next(context)
return
print(f" Agent {context.function.name} called with task: {context.arguments['task']}")
await next(context)
print(f" Response from agent {context.function.name}: {context.result.value}")


# Create and configure the kernel.
kernel = Kernel()
kernel.add_filter("function_invocation", function_invocation_filter)

# You can select from the following chat completion services that support function calling:
# - Services.OPENAI
# - Services.AZURE_OPENAI
# - Services.AZURE_AI_INFERENCE
# - Services.ANTHROPIC
# - Services.BEDROCK
# - Services.GOOGLE_AI
# - Services.MISTRAL_AI
# - Services.OLLAMA
# - Services.ONNX
# - Services.VERTEX_AI
# - Services.DEEPSEEK
# Please make sure you have configured your environment correctly for the selected chat completion service.
chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.OPENAI)

# Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default.
# With `auto_invoke=True`, the model will automatically choose and call functions as needed.
request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]})

# Create a chat history to store the system message, initial messages, and the conversation.
history = ChatHistory()
history.add_system_message(system_message)

REVIEWER_NAME = "ArtDirector"
REVIEWER_INSTRUCTIONS = """
You are an art director who has opinions about copywriting born of a love for David Ogilvy.
You ask one of the copy-writing agents for a piece of copy, which you then review.
The goal is to determine if the given copy is acceptable to print.
If so, respond with the created copy.
If not, do not return, but ask a copywriter again for copy, providing the returned copy and feedback.
"""

COPYWRITER_NAME = "CopyWriter"
COPYWRITER_INSTRUCTIONS = """
You are a copywriter with ten years of experience and are known for brevity and a dry humor.
The goal is to refine and decide on the single best copy as an expert in the field.
Only provide a single proposal per response.
You're laser focused on the goal at hand.
Don't waste time with chit chat.
Consider suggestions when refining an idea.
"""

writer_agent = ChatCompletionAgent(
service=chat_completion_service,
name=COPYWRITER_NAME,
description="This agent can write copy about any topic.",
instructions=COPYWRITER_INSTRUCTIONS,
)
reviewer_agent = ChatCompletionAgent(
service=chat_completion_service,
name=REVIEWER_NAME,
description="This agent can review copy and provide feedback, he does has copy writers available.",
instructions=REVIEWER_INSTRUCTIONS,
plugins=[writer_agent],
)

reviewer_agent.kernel.add_filter("function_invocation", function_invocation_filter)

kernel.add_plugins([reviewer_agent])


async def chat() -> bool:
"""
Continuously prompt the user for input and show the assistant's response.
Type 'exit' to exit.
"""
try:
user_input = input("User:> ")
except (KeyboardInterrupt, EOFError):
print("\n\nExiting chat...")
return False

if user_input.lower().strip() == "exit":
print("\n\nExiting chat...")
return False
history.add_user_message(user_input)
# Handle non-streaming responses
result = await chat_completion_service.get_chat_message_content(
chat_history=history, settings=request_settings, kernel=kernel
)

# Update the chat history with the user's input and the assistant's response
if result:
print(f"Mosscap:> {result}")
history.add_message(result)

return True


"""
Sample output:
Welcome to the chat bot!
Type 'exit' to exit.
Try to get some copy written by the copy writer, make sure to ask it is reviewed.).
User:> write a slogan for electric vehicles
Mosscap:> Ah, the realm of electric vehicles, where the whispers of sustainability dance with the vibrant hum of
innovation! How about this for a slogan:

"Drive the Future: Silent, Smart, and Sustainable!"

This phrase encapsulates the essence of electric vehicles, inviting all to embrace a journey that is not only
forward-thinking but also harmoniously aligned with the gentle rhythms of our planet. Would you like to explore
more options or perhaps delve into another aspect of this electrifying topic?
User:> ask the art director for it
Agent ArtDirector called with task: Create a slogan for electric vehicles that captures their innovative and
sustainable essence.
Agent CopyWriter called with task: Create a slogan for electric vehicles that captures their innovative and
sustainable essence.
Response from agent CopyWriter: "Drive the Future: Silent, Smart, Sustainable."
Response from agent ArtDirector: "Drive the Future: Silent, Smart, Sustainable."
Mosscap:> The Art Director has conjured forth a splendid slogan for electric vehicles:

"Drive the Future: Silent, Smart, Sustainable."

This phrase beautifully encapsulates the innovative spirit and eco-friendly nature of electric vehicles.
If you seek further refinement or wish to explore additional ideas, simply let me know, and I shall be at your service!
"""


async def main() -> None:
print(
"Welcome to the chat bot!\n"
" Type 'exit' to exit.\n"
" Try to get some copy written by the copy writer, make sure to ask it is reviewed.)."
)
chatting = True
while chatting:
chatting = await chat()


if __name__ == "__main__":
asyncio.run(main())
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio
from typing import TYPE_CHECKING

from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings
from semantic_kernel import Kernel
Expand All @@ -13,9 +12,6 @@
from semantic_kernel.core_plugins.time_plugin import TimePlugin
from semantic_kernel.functions import KernelArguments

if TYPE_CHECKING:
pass

#####################################################################
# This sample demonstrates how to build a conversational chatbot #
# using Semantic Kernel, featuring manual function calling, #
Expand Down
31 changes: 20 additions & 11 deletions python/semantic_kernel/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@
import uuid
from abc import ABC, abstractmethod
from collections.abc import AsyncIterable, Iterable
from typing import Any, ClassVar
from typing import TYPE_CHECKING, Annotated, Any, ClassVar

from pydantic import Field, model_validator

from semantic_kernel.agents.channels.agent_channel import AgentChannel
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.contents.chat_message_content import ChatMessageContent
from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.functions.kernel_plugin import KernelPlugin
from semantic_kernel.functions.kernel_function_decorator import kernel_function
from semantic_kernel.kernel import Kernel
from semantic_kernel.kernel_pydantic import KernelBaseModel
from semantic_kernel.prompt_template.kernel_prompt_template import KernelPromptTemplate
Expand All @@ -21,6 +22,9 @@
from semantic_kernel.utils.naming import generate_random_ascii_name
from semantic_kernel.utils.validation import AGENT_NAME_REGEX

if TYPE_CHECKING:
pass

logger: logging.Logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -53,13 +57,6 @@ class Agent(KernelBaseModel, ABC):
name: str = Field(default_factory=lambda: f"agent_{generate_random_ascii_name()}", pattern=AGENT_NAME_REGEX)
prompt_template: PromptTemplateBase | None = None

@staticmethod
def _get_plugin_name(plugin: KernelPlugin | object) -> str:
"""Helper method to get the plugin name."""
if isinstance(plugin, KernelPlugin):
return plugin.name
return plugin.__class__.__name__

@model_validator(mode="before")
@classmethod
def _configure_plugins(cls, data: Any) -> Any:
Expand All @@ -69,11 +66,23 @@ def _configure_plugins(cls, data: Any) -> Any:
if not kernel:
kernel = Kernel()
for plugin in plugins:
name = Agent._get_plugin_name(plugin)
kernel.add_plugin(plugin, plugin_name=name)
kernel.add_plugin(plugin)
data["kernel"] = kernel
return data

def model_post_init(self, __context: Any) -> None:
"""Post initialization."""

@kernel_function(name=self.name, description=self.description)
async def _as_function(
task: Annotated[str, "The task to perform."],
) -> Annotated[str, "The response from the agent."]:
history = ChatHistory()
history.add_user_message(task)
return (await self.get_response(history=history)).content
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not going to work with other types of agents except the chat completion agent.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For my info, why not? I saw get_response is a abstract method so should be available on every agent, or are the args different?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The args are different.


setattr(self, "_as_function", _as_function)

@abstractmethod
async def get_response(self, *args, **kwargs) -> ChatMessageContent:
"""Get a response from the agent.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def add_plugin(
self.plugins[plugin.name] = plugin
return self.plugins[plugin.name]
if not plugin_name:
raise ValueError("plugin_name must be provided if a plugin is not supplied.")
plugin_name = getattr(plugin, "name", plugin.__class__.__name__)
if not isinstance(plugin_name, str):
raise TypeError("plugin_name must be a string.")
if plugin:
Expand All @@ -103,7 +103,7 @@ def add_plugin(
return self.plugins[plugin_name]
raise ValueError("plugin or parent_directory must be provided.")

def add_plugins(self, plugins: list[KernelPlugin] | dict[str, KernelPlugin | object]) -> None:
def add_plugins(self, plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object]) -> None:
"""Adds a list of plugins to the kernel's collection of plugins.

Args:
Expand Down
1 change: 1 addition & 0 deletions python/semantic_kernel/functions/kernel_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ def from_object(
candidates = plugin_instance.items()
else:
candidates = inspect.getmembers(plugin_instance, inspect.ismethod)
candidates.extend(inspect.getmembers(plugin_instance, inspect.isfunction)) # type: ignore
# Read every method from the plugin instance
functions = [
KernelFunctionFromMethod(method=candidate, plugin_name=plugin_name)
Expand Down
26 changes: 24 additions & 2 deletions python/tests/unit/agents/test_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@

import pytest

from semantic_kernel.contents.chat_message_content import ChatMessageContent
from semantic_kernel.kernel import Kernel

if sys.version_info >= (3, 12):
from typing import override # pragma: no cover
else:
Expand All @@ -33,7 +36,7 @@ class MockAgent(Agent):

channel_type: ClassVar[type[AgentChannel]] = MockChannel

def __init__(self, name: str = "Test-Agent", description: str = "A test agent", id: str = None):
def __init__(self, name: str = "TestAgent", description: str = "A test agent", id: str = None):
args = {
"name": name,
"description": description,
Expand All @@ -47,7 +50,7 @@ async def create_channel(self) -> AgentChannel:

@override
async def get_response(self, *args, **kwargs):
raise NotImplementedError
return ChatMessageContent(role="assistant", content="test")

@override
async def invoke(self, *args, **kwargs):
Expand Down Expand Up @@ -171,3 +174,22 @@ def test_merge_arguments_both_not_none():

assert merged["param1"] == "baseVal", "Should retain base param from agent"
assert merged["param2"] == "override_param", "Should include param from override"


async def test_function_from_agent():
agent = MockAgent()
assert hasattr(agent, "_as_function")
func = agent._as_function
assert hasattr(func, "__kernel_function__")
assert func.__kernel_function_description__ == agent.description
assert func.__kernel_function_name__ == agent.name
assert len(func.__kernel_function_parameters__) == 1
assert (await func(task="")) == "test"


def test_add_agent_as_plugin(kernel: Kernel):
agent = MockAgent()
kernel.add_plugin(agent)
assert len(kernel.plugins) == 1
assert len(kernel.plugins[agent.name].functions) == 1
assert kernel.plugins[agent.name].functions[agent.name].parameters[0].name == "task"
16 changes: 13 additions & 3 deletions python/tests/unit/kernel/test_kernel.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright (c) Microsoft. All rights reserved.

import os
from dataclasses import dataclass
from pathlib import Path
from typing import Union
from unittest.mock import AsyncMock, MagicMock, patch
Expand Down Expand Up @@ -479,9 +480,18 @@ def test_plugin_no_plugin(kernel: Kernel):
kernel.add_plugin(plugin_name="test")


def test_plugin_name_error(kernel: Kernel):
with pytest.raises(ValueError):
kernel.add_plugin(" ", None)
def test_plugin_name_from_class_name(kernel: Kernel):
kernel.add_plugin(" ", None)
assert "str" in kernel.plugins


def test_plugin_name_from_name_attribute(kernel: Kernel):
@dataclass
class TestPlugin:
name: str = "test_plugin"

kernel.add_plugin(TestPlugin(), None)
assert "test_plugin" in kernel.plugins


def test_plugin_name_not_string_error(kernel: Kernel):
Expand Down
Loading
Loading