Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion genai-function-calling/openai-agents/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,6 @@ RUN --mount=type=cache,target=/root/.cache/pip pip install -r /tmp/requirements.
RUN --mount=type=cache,target=/root/.cache/pip edot-bootstrap --action=install

COPY main.py /
COPY mcp_server.py /

CMD [ "opentelemetry-instrument", "python", "main.py" ]
ENTRYPOINT [ "opentelemetry-instrument", "python", "main.py" ]
17 changes: 17 additions & 0 deletions genai-function-calling/openai-agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,22 @@ Finally, run `main.py` (notice the prefix of `opentelemetry-instrument):
dotenv run --no-override -- opentelemetry-instrument python main.py
```

## Run with Model Context Protocol (MCP)

[mcp_server](mcp_server.py) includes code needed to decouple tool discovery and invocation
via the [Model Context Protocol (MCP) flow][flow-mcp]. To run using MCP, append
`-- --mcp` flag to `dotenv run` or `docker compose run` command.

For example, to run with Docker:
```bash
docker compose run --build --rm genai-function-calling --mcp
```

Or to run with Python:
```bash
dotenv run --no-override -- opentelemetry-instrument python main.py --mcp
```

## Tests

Tests use [pytest-vcr][pytest-vcr] to capture HTTP traffic for offline unit
Expand Down Expand Up @@ -88,3 +104,4 @@ OpenAI Agents SDK's OpenTelemetry instrumentation is via
[pytest-vcr]: https://pytest-vcr.readthedocs.io/
[test_main.yaml]: cassettes/test_main.yaml
[openinference]: https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-openai-agents
[flow-mcp]: ../README.md#model-context-protocol-flow
32 changes: 29 additions & 3 deletions genai-function-calling/openai-agents/main.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import argparse
import asyncio
import os

Expand All @@ -13,13 +14,14 @@
from agents.tracing import GLOBAL_TRACE_PROVIDER
from openai import AsyncAzureOpenAI

from mcp_server import mcp_client_main, SERVER_ARG

# Shut down the global tracer as it sends to the OpenAI "/traces/ingest"
# endpoint, which we aren't using and doesn't exist on alternative backends
# like Ollama.
GLOBAL_TRACE_PROVIDER.shutdown()


@function_tool(strict_mode=False)
async def get_latest_elasticsearch_version(major_version: int = 0) -> str:
"""Returns the latest GA version of Elasticsearch in "X.Y.Z" format.

Expand Down Expand Up @@ -49,15 +51,15 @@ async def get_latest_elasticsearch_version(major_version: int = 0) -> str:
return max(versions, key=lambda v: tuple(map(int, v.split("."))))


async def main():
async def run_agent(**agent_kwargs: dict):
model_name = os.getenv("CHAT_MODEL", "gpt-4o-mini")
openai_client = AsyncAzureOpenAI() if os.getenv("AZURE_OPENAI_API_KEY") else None
model = OpenAIProvider(openai_client=openai_client, use_responses=False).get_model(model_name)
agent = Agent(
name="version_assistant",
tools=[get_latest_elasticsearch_version],
model=model,
model_settings=ModelSettings(temperature=0),
**agent_kwargs,
)

result = await Runner.run(
Expand All @@ -68,5 +70,29 @@ async def main():
print(result.final_output)


async def main():
parser = argparse.ArgumentParser(
prog="genai-function-calling",
description="Fetches the latest version of Elasticsearch 8",
)
parser.add_argument(
"--mcp",
action="store_true",
help="Run tools via a MCP server instead of directly",
)
parser.add_argument(
SERVER_ARG,
action="store_true",
help="Run the MCP server",
)

args, _ = parser.parse_known_args()

if args.mcp:
await mcp_client_main(run_agent, [get_latest_elasticsearch_version], args.mcp_server)
else:
await run_agent(tools=[function_tool(strict_mode=False)(get_latest_elasticsearch_version)])


if __name__ == "__main__":
asyncio.run(main())
45 changes: 45 additions & 0 deletions genai-function-calling/openai-agents/mcp_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from agents.mcp import MCPServerStdio
from mcp.server.fastmcp import FastMCP
import os
import signal
import sys


SERVER_ARG = "--mcp-server"


def handler(signum, frame):
sys.exit(0)


async def mcp_server_main(tools):
mcp_server = FastMCP(log_level="WARNING")
for tool in tools:
mcp_server.add_tool(tool)
# Mysteriously, cleanup such as from opentelemetry-instrument does not run on exit
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry absolutely no clue what's going on here

# without registering an effectively no-op termination handler.
signal.signal(signal.SIGTERM, handler)
await mcp_server.run_stdio_async()


async def run_agent_with_mcp_client(run_agent):
env = os.environ.copy()
# Make sure PYTHONPATH is set to the same as what started this
# process. Notably, opentelemetry-instrument removes itself from the value
# in os.environ and we'd like to restore it if it was used.
env["PYTHONPATH"] = os.pathsep.join(sys.path)
async with MCPServerStdio(
{
"command": sys.executable,
"args": sys.argv + [SERVER_ARG],
"env": env,
}
) as mcp_client:
await run_agent(mcp_servers=[mcp_client])


async def mcp_client_main(run_agent, tools, is_server):
if is_server:
await mcp_server_main(tools)
else:
await run_agent_with_mcp_client(run_agent)
3 changes: 2 additions & 1 deletion genai-function-calling/openai-agents/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
openai-agents~=0.0.8
openai-agents~=0.0.9
httpx~=0.28.1
mcp~=1.6.0

elastic-opentelemetry~=1.0.0
# Use openai-agents instrumentation from OpenInference
Expand Down