Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions genai-function-calling/openai-agents/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ COPY requirements.txt /tmp
RUN --mount=type=cache,target=/root/.cache/pip pip install -r /tmp/requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip edot-bootstrap --action=install

COPY main.py /
COPY mcp_server.py /
COPY *.py /

ENTRYPOINT [ "opentelemetry-instrument", "python", "main.py" ]
6 changes: 3 additions & 3 deletions genai-function-calling/openai-agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ dotenv run --no-override -- opentelemetry-instrument python main.py

## Run with Model Context Protocol (MCP)

[mcp_server](mcp_server.py) includes code needed to decouple tool discovery and invocation
via the [Model Context Protocol (MCP) flow][flow-mcp]. To run using MCP, append
`-- --mcp` flag to `dotenv run` or `docker compose run` command.
[main_mcp.py](main_mcp.py) includes code needed to decouple tool discovery and
invocation via the [Model Context Protocol (MCP) flow][flow-mcp]. To run using
MCP, append `-- --mcp` flag to `dotenv run` or `docker compose run` command.

For example, to run with Docker:
```bash
Expand Down
35 changes: 11 additions & 24 deletions genai-function-calling/openai-agents/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import argparse
import asyncio
import os
import sys

from httpx import AsyncClient
from agents import (
Expand All @@ -10,11 +10,12 @@
RunConfig,
Runner,
function_tool,
Tool,
)
from agents.tracing import GLOBAL_TRACE_PROVIDER
from openai import AsyncAzureOpenAI

from mcp_server import mcp_client_main, SERVER_ARG
from main_mcp import run_main as mcp_main

# Shut down the global tracer as it sends to the OpenAI "/traces/ingest"
# endpoint, which we aren't using and doesn't exist on alternative backends
Expand Down Expand Up @@ -51,15 +52,15 @@ async def get_latest_elasticsearch_version(major_version: int = 0) -> str:
return max(versions, key=lambda v: tuple(map(int, v.split("."))))


async def run_agent(**agent_kwargs: dict):
async def run_agent(tools: list[Tool]):
model_name = os.getenv("CHAT_MODEL", "gpt-4o-mini")
openai_client = AsyncAzureOpenAI() if os.getenv("AZURE_OPENAI_API_KEY") else None
model = OpenAIProvider(openai_client=openai_client, use_responses=False).get_model(model_name)
agent = Agent(
name="version_assistant",
model=model,
model_settings=ModelSettings(temperature=0),
**agent_kwargs,
tools=tools,
)

result = await Runner.run(
Expand All @@ -71,27 +72,13 @@ async def run_agent(**agent_kwargs: dict):


async def main():
parser = argparse.ArgumentParser(
prog="genai-function-calling",
description="Fetches the latest version of Elasticsearch 8",
)
parser.add_argument(
"--mcp",
action="store_true",
help="Run tools via a MCP server instead of directly",
)
parser.add_argument(
SERVER_ARG,
action="store_true",
help="Run the MCP server",
)

args, _ = parser.parse_known_args()
"""Run tools with the agent directly unless in MCP mode"""

if args.mcp:
await mcp_client_main(run_agent, [get_latest_elasticsearch_version], args.mcp_server)
else:
await run_agent(tools=[function_tool(strict_mode=False)(get_latest_elasticsearch_version)])
fns = [get_latest_elasticsearch_version]
if any(arg.startswith("--mcp") for arg in sys.argv):
await mcp_main(fns, run_agent) # start an MCP server and use its tools
else: # run the tools directly
await run_agent([function_tool(strict_mode=False)(fn) for fn in fns])


if __name__ == "__main__":
Expand Down
55 changes: 55 additions & 0 deletions genai-function-calling/openai-agents/main_mcp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import signal
from typing import Callable, Awaitable

from agents.mcp import MCPServerStdio, MCPUtil
from mcp.server.fastmcp import FastMCP
import os
import sys

from mcp.types import AnyFunction, Tool

SERVER_ARG = "--mcp-server"


def handler(signum, frame):
sys.exit(0)


async def server_main(fns: list[AnyFunction]):
"""Runs an MCP server which publishes the tool get_latest_elasticsearch_version."""

mcp_server = FastMCP(log_level="WARNING")
for fn in fns:
mcp_server.add_tool(fn)
# Mysteriously, cleanup such as from opentelemetry-instrument does not run on exit
# without registering an effectively no-op termination handler.
signal.signal(signal.SIGTERM, handler)
await mcp_server.run_stdio_async()


async def client_main(tools_callback: Callable[[list[Tool]], Awaitable[None]]):
"""Starts an MCP server subprocess and invokes tools_callback with its tools."""

env = os.environ.copy()
# Make sure PYTHONPATH is set to the same as what started this
# process. Notably, opentelemetry-instrument removes itself from the value
# in os.environ, and we'd like to restore it if it was used.
env["PYTHONPATH"] = os.pathsep.join(sys.path)
async with MCPServerStdio(
{
"command": sys.executable,
"args": sys.argv + [SERVER_ARG],
"env": env,
}
) as server:
tools = await server.list_tools()
util = MCPUtil()
tools = [util.to_function_tool(tool, server, False) for tool in tools]
await tools_callback(tools)


async def run_main(fns: list[AnyFunction], tools_callback: Callable[[list[Tool]], Awaitable[None]]):
if SERVER_ARG in sys.argv:
await server_main(fns)
else:
await client_main(tools_callback)
45 changes: 0 additions & 45 deletions genai-function-calling/openai-agents/mcp_server.py

This file was deleted.