diff --git a/genai-function-calling/vercel-ai/README.md b/genai-function-calling/vercel-ai/README.md index e57d610..c3e0205 100644 --- a/genai-function-calling/vercel-ai/README.md +++ b/genai-function-calling/vercel-ai/README.md @@ -53,13 +53,17 @@ npm run start -- --mcp ## Notes The LLM should generate something like "The latest stable version of -Elasticsearch is 8.17.4", unless it hallucinates. Just run it again, if you -see something else. +Elasticsearch is 8.18.0," unless it hallucinates. Run it again, if you see +something else. Vercel AI's OpenTelemetry instrumentation only produces traces (not logs or metrics). +This uses [OpenInference][openinference] to propagate trace identifiers when +using MCP. + --- [flow]: ../README.md#example-application-flow [vercel-ai]: https://github.com/vercel/ai [flow-mcp]: ../README.md#model-context-protocol-flow +[openinference]: https://github.com/Arize-ai/openinference/tree/main/js/packages/openinference-instrumentation-mcp diff --git a/genai-function-calling/vercel-ai/package.json b/genai-function-calling/vercel-ai/package.json index f803afb..5a71c83 100644 --- a/genai-function-calling/vercel-ai/package.json +++ b/genai-function-calling/vercel-ai/package.json @@ -7,13 +7,14 @@ "node": ">=22" }, "scripts": { - "start": "node --env-file .env -r @elastic/opentelemetry-node index.js" + "start": "node --env-file .env --import @elastic/opentelemetry-node --import ./telemetry.js index.js" }, "dependencies": { - "@ai-sdk/azure": "^1.3.8", - "@ai-sdk/openai": "^1.3.7", + "ai": "^4.3.9", + "@ai-sdk/azure": "^1.3.19", + "@ai-sdk/openai": "^1.3.18", + "@modelcontextprotocol/sdk": "^1.10.2", "@elastic/opentelemetry-node": "^1", - "@modelcontextprotocol/sdk": "^1.9.0", - "ai": "^4.3.2" + "@arizeai/openinference-instrumentation-mcp": "^0.2.0" } } diff --git a/genai-function-calling/vercel-ai/telemetry.js b/genai-function-calling/vercel-ai/telemetry.js new file mode 100644 index 0000000..62a4b96 --- /dev/null +++ b/genai-function-calling/vercel-ai/telemetry.js @@ -0,0 +1,8 @@ +const { MCPInstrumentation } = require("@arizeai/openinference-instrumentation-mcp"); + +const mcpInstrumentation = new MCPInstrumentation(); +// MCP must be manually instrumented as it doesn't have a traditional module structure +mcpInstrumentation.manuallyInstrument({ + clientStdioModule: require("@modelcontextprotocol/sdk/client/stdio.js"), + serverStdioModule: require("@modelcontextprotocol/sdk/server/stdio.js"), +});