Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion genai-function-calling/vercel-ai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ npm run start -- --mcp
## Notes

The LLM should generate something like "The latest stable version of
Elasticsearch is 8.18.0," unless it hallucinates. Run it again, if you see
Elasticsearch is 8.19.3," unless it hallucinates. Run it again, if you see
something else.

Vercel AI's OpenTelemetry instrumentation only produces traces (not logs or
Expand Down
4 changes: 2 additions & 2 deletions genai-function-calling/vercel-ai/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const model = process.env.CHAT_MODEL || 'gpt-4o-mini';

const getLatestElasticsearchVersion = tool({
description: 'Get the latest version of Elasticsearch',
parameters: z.object({
inputSchema: z.object({
majorVersion: z.number().optional().describe('Major version to filter by (e.g. 7, 8). Defaults to latest'),
}),
execute: async ({majorVersion}) => {
Expand Down Expand Up @@ -55,7 +55,7 @@ async function runAgent(tools) {
// If using reasoning models, remove the format rewards from output. Non-reasoning models will not have
// them making it effectively a no-op.
model: wrapLanguageModel({
model: openai(model),
model: openai.chat(model),
middleware: [extractReasoningMiddleware({ tagName: 'think' })],
}),
messages: [{role: 'user', content: "What is the latest version of Elasticsearch 8?"}],
Expand Down
2 changes: 1 addition & 1 deletion genai-function-calling/vercel-ai/mcp.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ async function mcpServerMain(tools) {
server.tool(
toolName,
tool.description,
tool.parameters.shape,
tool.inputSchema.shape,
async (params) => {
try {
const result = await tool.execute(params);
Expand Down
12 changes: 6 additions & 6 deletions genai-function-calling/vercel-ai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
"start": "node --env-file .env --import @elastic/opentelemetry-node --import ./telemetry.js index.js"
},
"dependencies": {
"ai": "^4.3.11",
"@ai-sdk/azure": "^1.3.21",
"@ai-sdk/openai": "^1.3.20",
"@modelcontextprotocol/sdk": "^1.10.2",
"@elastic/opentelemetry-node": "^1",
"@arizeai/openinference-instrumentation-mcp": "^0.2.0"
"ai": "^5.0.35",
"@ai-sdk/azure": "^2.0.25",
"@ai-sdk/openai": "^2.0.25",
"@modelcontextprotocol/sdk": "^1.17.5",
"@elastic/opentelemetry-node": "^1.2.0",
"@arizeai/openinference-instrumentation-mcp": "^0.2.4"
}
}