diff --git a/genai-function-calling/openai-agents/README.md b/genai-function-calling/openai-agents/README.md
index 3e8e3d7..9e263c3 100644
--- a/genai-function-calling/openai-agents/README.md
+++ b/genai-function-calling/openai-agents/README.md
@@ -76,7 +76,7 @@ dotenv -f ../.env run -- pytest
## Notes
The LLM should generate something like "The latest stable version of
-Elasticsearch is 8.17.3", unless it hallucinates. Just run it again, if you
+Elasticsearch is 8.17.4", unless it hallucinates. Just run it again, if you
see something else.
OpenAI Agents SDK's OpenTelemetry instrumentation is via
diff --git a/genai-function-calling/openai-agents/requirements.txt b/genai-function-calling/openai-agents/requirements.txt
index 61132a4..766b7fd 100644
--- a/genai-function-calling/openai-agents/requirements.txt
+++ b/genai-function-calling/openai-agents/requirements.txt
@@ -1,6 +1,6 @@
-openai-agents~=0.0.5
+openai-agents~=0.0.8
httpx~=0.28.1
-elastic-opentelemetry~=0.8.0
+elastic-opentelemetry~=1.0.0
# Use openai-agents instrumentation from OpenInference
-openinference-instrumentation-openai-agents~=0.1.1
+openinference-instrumentation-openai-agents~=0.1.7
diff --git a/genai-function-calling/semantic-kernel-dotnet/Dockerfile b/genai-function-calling/semantic-kernel-dotnet/Dockerfile
index 6a79487..87859cf 100644
--- a/genai-function-calling/semantic-kernel-dotnet/Dockerfile
+++ b/genai-function-calling/semantic-kernel-dotnet/Dockerfile
@@ -1,7 +1,7 @@
ARG DOTNET_VERSION=9.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-alpine AS edot
-ARG EDOT_VERSION=1.0.0-beta.2
+ARG EDOT_VERSION=1.0.1
ARG EDOT_INSTALL=https://github.com/elastic/elastic-otel-dotnet/releases/download/${EDOT_VERSION}/elastic-dotnet-auto-install.sh
ENV OTEL_DOTNET_AUTO_HOME=/edot
WORKDIR /edot
diff --git a/genai-function-calling/semantic-kernel-dotnet/README.md b/genai-function-calling/semantic-kernel-dotnet/README.md
index 8d71547..c9c9ad8 100644
--- a/genai-function-calling/semantic-kernel-dotnet/README.md
+++ b/genai-function-calling/semantic-kernel-dotnet/README.md
@@ -27,7 +27,7 @@ docker compose run --build --rm genai-function-calling
## Notes
The LLM should generate something like "The latest stable version of
-Elasticsearch is 8.17.3", unless it hallucinates. Just run it again, if you
+Elasticsearch is 8.17.4", unless it hallucinates. Just run it again, if you
see something else.
Semantic Kernel .NET's OpenTelemetry instrumentation uses the following custom
diff --git a/genai-function-calling/semantic-kernel-dotnet/app.csproj b/genai-function-calling/semantic-kernel-dotnet/app.csproj
index 8604489..bde2b69 100644
--- a/genai-function-calling/semantic-kernel-dotnet/app.csproj
+++ b/genai-function-calling/semantic-kernel-dotnet/app.csproj
@@ -11,9 +11,9 @@
-
-
-
+
+
+
diff --git a/genai-function-calling/spring-ai/README.md b/genai-function-calling/spring-ai/README.md
index 06f7047..10f2120 100644
--- a/genai-function-calling/spring-ai/README.md
+++ b/genai-function-calling/spring-ai/README.md
@@ -44,7 +44,7 @@ Run maven after setting ENV variables like this:
## Notes
The LLM should generate something like "The latest stable version of
-Elasticsearch is 8.17.3", unless it hallucinates. Just run it again, if you
+Elasticsearch is 8.17.4", unless it hallucinates. Just run it again, if you
see something else.
Spring AI uses Micrometer which bridges to OpenTelemetry, but needs a few
diff --git a/genai-function-calling/spring-ai/pom.xml b/genai-function-calling/spring-ai/pom.xml
index f310052..bea1b2e 100644
--- a/genai-function-calling/spring-ai/pom.xml
+++ b/genai-function-calling/spring-ai/pom.xml
@@ -5,7 +5,7 @@
org.springframework.boot
spring-boot-starter-parent
- 3.4.3
+ 3.4.4
co.elastic.observability-labs
diff --git a/genai-function-calling/vercel-ai/README.md b/genai-function-calling/vercel-ai/README.md
index a828ca7..07857cb 100644
--- a/genai-function-calling/vercel-ai/README.md
+++ b/genai-function-calling/vercel-ai/README.md
@@ -36,7 +36,7 @@ npm start
## Notes
The LLM should generate something like "The latest stable version of
-Elasticsearch is 8.17.3", unless it hallucinates. Just run it again, if you
+Elasticsearch is 8.17.4", unless it hallucinates. Just run it again, if you
see something else.
Vercel AI's OpenTelemetry instrumentation only produces traces (not logs or
diff --git a/genai-function-calling/vercel-ai/env.example b/genai-function-calling/vercel-ai/env.example
index ed3ee6a..dbcf342 100644
--- a/genai-function-calling/vercel-ai/env.example
+++ b/genai-function-calling/vercel-ai/env.example
@@ -29,3 +29,5 @@ OPENAI_API_KEY=
# OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
OTEL_SERVICE_NAME=genai-function-calling
+# Don't print status message on startup
+OTEL_LOG_LEVEL=warn
diff --git a/genai-function-calling/vercel-ai/index.js b/genai-function-calling/vercel-ai/index.js
index c780c5b..eed562f 100644
--- a/genai-function-calling/vercel-ai/index.js
+++ b/genai-function-calling/vercel-ai/index.js
@@ -1,6 +1,5 @@
const {createAzure} = require('@ai-sdk/azure');
const {createOpenAI} = require('@ai-sdk/openai')
-const fetch = require('node-fetch');
const {generateText, tool} = require('ai');
const {z} = require('zod');
diff --git a/genai-function-calling/vercel-ai/package.json b/genai-function-calling/vercel-ai/package.json
index e344c57..6553c0a 100644
--- a/genai-function-calling/vercel-ai/package.json
+++ b/genai-function-calling/vercel-ai/package.json
@@ -10,15 +10,9 @@
"start": "node --env-file .env -r @elastic/opentelemetry-node index.js"
},
"dependencies": {
- "ai": "^4.1.63",
- "@ai-sdk/azure": "^1.2.6",
- "@ai-sdk/openai": "^1.2.6",
- "@elastic/opentelemetry-node": "*"
- },
- "_comment": "Override to avoid punycode warnings in recent versions of Node.JS",
- "overrides": {
- "node-fetch@2.x": {
- "whatwg-url": "14.x"
- }
+ "ai": "^4.3.2",
+ "@ai-sdk/azure": "^1.3.8",
+ "@ai-sdk/openai": "^1.3.7",
+ "@elastic/opentelemetry-node": "^1"
}
}