Skip to content

Commit

Permalink
Python: improve agent samples and chat history handling (#10301)
Browse files Browse the repository at this point in the history
### Motivation and Context

<!-- Thank you for your contribution to the semantic-kernel repo!
Please help reviewers and future users, providing the following
information:
  1. Why is this change required?
  2. What problem does it solve?
  3. What scenario does it contribute to?
  4. If it fixes an open issue, please link to the issue here.
-->
When passing the same object to `agent.reduce_history` as is present in
the `history_reducer` attribute of the agent that the function doesn't
accurately behave. This fixes that.

Also updates the sample to be a bit more concise.

Also fixes the way the single_dispatch is setup in ChatHistory.

Also ensures system/developer messages are not reduced away as that
might impact performance.

The `reduce_history` method was removed from the agent base class, in
favor of having the caller manage the change history and reduction as
needed. The `reduce_history` was added to the agent group chat, as the
chat history is managed internally as agents are invoked.

### Description

<!-- Describe your changes, the overall approach, the underlying design.
These notes will help understanding how your code works. Thanks! -->

### Contribution Checklist

<!-- Before submitting this PR, please make sure: -->

- [x] The code builds clean without any errors or warnings
- [x] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [x] All unit tests pass, and I have added new tests where possible
- [x] I didn't break anyone 😄

---------

Co-authored-by: Evan Mattson <[email protected]>
Co-authored-by: Evan Mattson <[email protected]>
  • Loading branch information
3 people authored Jan 29, 2025
1 parent 7e20631 commit ec9b980
Show file tree
Hide file tree
Showing 28 changed files with 763 additions and 858 deletions.
303 changes: 131 additions & 172 deletions python/samples/concepts/agents/chat_completion_history_reducer.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
# The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer.
# To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly.

# Toggle this flag to view the chat history summary after a reduction was performed.
view_chat_history_summary_after_reduction = True

# You can select from the following chat completion services:
# - Services.OPENAI
# - Services.AZURE_OPENAI
Expand Down Expand Up @@ -122,7 +125,8 @@ async def chat() -> bool:
print("\n\nExiting chat...")
return False

await summarization_reducer.reduce()
if is_reduced := await summarization_reducer.reduce():
print(f"@ History reduced to {len(summarization_reducer.messages)} messages.")

kernel_arguments = KernelArguments(
settings=request_settings,
Expand All @@ -136,6 +140,15 @@ async def chat() -> bool:
summarization_reducer.add_user_message(user_input)
summarization_reducer.add_message(answer.value[0])

if view_chat_history_summary_after_reduction and is_reduced:
for msg in summarization_reducer.messages:
if msg.metadata and msg.metadata.get("__summary__"):
print("*" * 60)
print(f"Chat History Reduction Summary: {msg.content}")
print("*" * 60)
break
print("\n")

return True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
# The purpose of this sample is to demonstrate how to use a kernel function and use a chat history reducer.
# To build a basic chatbot, it is sufficient to use a ChatCompletionService with a chat history directly.

# Toggle this flag to view the chat history summary after a reduction was performed.
view_chat_history_summary_after_reduction = True

# You can select from the following chat completion services:
# - Services.OPENAI
# - Services.AZURE_OPENAI
Expand Down Expand Up @@ -136,7 +139,8 @@ async def chat() -> bool:
print("\n\nExiting chat...")
return False

await summarization_reducer.reduce()
if is_reduced := await summarization_reducer.reduce():
print(f"@ History reduced to {len(summarization_reducer.messages)} messages.")

kernel_arguments = KernelArguments(
settings=request_settings,
Expand Down Expand Up @@ -169,17 +173,26 @@ async def chat() -> bool:
frc.append(item)

for i, item in enumerate(fcc):
summarization_reducer.add_assistant_message_list([item])
summarization_reducer.add_assistant_message([item])
processed_fccs.add(item.id)
# Safely check if there's a matching FunctionResultContent
if i < len(frc):
assert fcc[i].id == frc[i].id # nosec
summarization_reducer.add_tool_message_list([frc[i]])
summarization_reducer.add_tool_message([frc[i]])
processed_frcs.add(item.id)

# Since this example is showing how to include FunctionCallContent and FunctionResultContent
# in the summary, we need to add them to the chat history and also to the processed sets.

if view_chat_history_summary_after_reduction and is_reduced:
for msg in summarization_reducer.messages:
if msg.metadata and msg.metadata.get("__summary__"):
print("*" * 60)
print(f"Chat History Reduction Summary: {msg.content}")
print("*" * 60)
break
print("\n")

return True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ class Reasoning(KernelBaseModel):


async def main():
stream = True
stream = False
if stream:
answer = kernel.invoke_stream(
chat_function,
Expand All @@ -127,7 +127,8 @@ async def main():
chat_function,
chat_history=history,
)
print(f"Mosscap:> {result}")
reasoned_result = Reasoning.model_validate_json(result.value[0].content)
print(f"Mosscap:> {reasoned_result}")
history.add_assistant_message(str(result))


Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio
import os

from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent
from semantic_kernel.agents.open_ai.open_ai_assistant_agent import OpenAIAssistantAgent
from semantic_kernel.contents.chat_message_content import ChatMessageContent
from semantic_kernel.contents.utils.author_role import AuthorRole
from semantic_kernel.kernel import Kernel
from semantic_kernel import Kernel
from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent
from semantic_kernel.contents import AuthorRole, ChatMessageContent

#####################################################################
# The following sample demonstrates how to create an OpenAI #
Expand All @@ -15,35 +14,21 @@
#####################################################################


AGENT_NAME = "FileSearch"
AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file."
# Create the instance of the Kernel
kernel = Kernel()

# Note: you may toggle this to switch between AzureOpenAI and OpenAI
use_azure_openai = True


# A helper method to invoke the agent with the user input
async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None:
"""Invoke the agent with the user input."""
await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input))

print(f"# {AuthorRole.USER}: '{input}'")

async for content in agent.invoke(thread_id=thread_id):
if content.role != AuthorRole.TOOL:
print(f"# {content.role}: {content.content}")
use_azure_openai = False


async def main():
# Create the instance of the Kernel
kernel = Kernel()
# Get the path to the employees.pdf file
pdf_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources", "employees.pdf")

# Define a service_id for the sample
service_id = "agent"

# Get the path to the travelinfo.txt file
pdf_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources", "employees.pdf")

AGENT_NAME = "FileSearch"
AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file."
# Create the agent configuration
if use_azure_openai:
agent = await AzureAssistantAgent.create(
Expand All @@ -67,10 +52,23 @@ async def main():
# Define a thread and invoke the agent with the user input
thread_id = await agent.create_thread()

user_inputs = {
"Who is the youngest employee?",
"Who works in sales?",
"I have a customer request, who can help me?",
}

try:
await invoke_agent(agent, thread_id=thread_id, input="Who is the youngest employee?")
await invoke_agent(agent, thread_id=thread_id, input="Who works in sales?")
await invoke_agent(agent, thread_id=thread_id, input="I have a customer request, who can help me?")
for user_input in user_inputs:
await agent.add_chat_message(
thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input)
)

print(f"# User: '{user_input}'")

async for content in agent.invoke(thread_id=thread_id):
if content.role != AuthorRole.TOOL:
print(f"# Agent: {content.content}")
finally:
[await agent.delete_file(file_id) for file_id in agent.file_search_file_ids]
await agent.delete_thread(thread_id)
Expand Down
73 changes: 28 additions & 45 deletions python/samples/getting_started_with_agents/step1_agent.py
Original file line number Diff line number Diff line change
@@ -1,66 +1,49 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio
from functools import reduce

from semantic_kernel import Kernel
from semantic_kernel.agents import ChatCompletionAgent
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.contents.utils.author_role import AuthorRole
from semantic_kernel.kernel import Kernel
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion
from semantic_kernel.contents import ChatHistory

###################################################################
# The following sample demonstrates how to create a simple, #
# non-group agent that repeats the user message in the voice #
# of a pirate and then ends with a parrot sound. #
###################################################################

# To toggle streaming or non-streaming mode, change the following boolean
streaming = True
# Create the instance of the Kernel
kernel = Kernel()

# Define the agent name and instructions
PARROT_NAME = "Parrot"
PARROT_INSTRUCTIONS = "Repeat the user message in the voice of a pirate and then end with a parrot sound."
# Add the OpenAIChatCompletion AI Service to the Kernel
kernel.add_service(OpenAIChatCompletion(service_id="agent"))


async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory):
"""Invoke the agent with the user input."""
chat.add_user_message(input)

print(f"# {AuthorRole.USER}: '{input}'")

if streaming:
contents = []
content_name = ""
async for content in agent.invoke_stream(chat):
content_name = content.name
contents.append(content)
streaming_chat_message = reduce(lambda first, second: first + second, contents)
print(f"# {content.role} - {content_name or '*'}: '{streaming_chat_message}'")
chat.add_message(streaming_chat_message)
else:
async for content in agent.invoke(chat):
print(f"# {content.role} - {content.name or '*'}: '{content.content}'")
chat.add_message(content)
# Define the agent with name and instructions
AGENT_NAME = "Parrot"
AGENT_INSTRUCTIONS = "You are a helpful parrot that repeats the user message in a pirate voice."
agent = ChatCompletionAgent(service_id="agent", kernel=kernel, name=AGENT_NAME)


async def main():
# Create the instance of the Kernel
kernel = Kernel()

# Add the OpenAIChatCompletion AI Service to the Kernel
kernel.add_service(AzureChatCompletion(service_id="agent"))

# Create the agent
agent = ChatCompletionAgent(service_id="agent", kernel=kernel, name=PARROT_NAME, instructions=PARROT_INSTRUCTIONS)

# Define the chat history
chat = ChatHistory()

# Respond to user input
await invoke_agent(agent, "Fortune favors the bold.", chat)
await invoke_agent(agent, "I came, I saw, I conquered.", chat)
await invoke_agent(agent, "Practice makes perfect.", chat)
chat_history = ChatHistory()
chat_history.add_developer_message(AGENT_INSTRUCTIONS)

user_inputs = [
"Fortune favors the bold.",
"I came, I saw, I conquered.",
"Practice makes perfect.",
]
for user_input in user_inputs:
# Add the user input to the chat history
chat_history.add_user_message(user_input)
print(f"# User: '{user_input}'")
# Invoke the agent to get a response
async for content in agent.invoke(chat_history):
# Add the response to the chat history
chat_history.add_message(content)
print(f"# Agent - {content.name or '*'}: '{content.content}'")


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit ec9b980

Please sign in to comment.