Skip to content

Commit b3b9c93

Browse files
committed
fix(otel): replace middleware with fastapi middleware
1 parent 7897899 commit b3b9c93

File tree

4 files changed

+134
-9
lines changed

4 files changed

+134
-9
lines changed

llama_stack/core/server/server.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -410,6 +410,7 @@ def create_app() -> StackApp:
410410

411411
if Api.telemetry in impls:
412412
setup_logger(impls[Api.telemetry])
413+
TelemetryAdapter.fastapi_middleware(app) # hold us over until we can move to programmatic instrumentation
413414
else:
414415
setup_logger(TelemetryAdapter(TelemetryConfig(), {}))
415416

llama_stack/providers/inline/telemetry/meta_reference/telemetry.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,11 @@
88
import threading
99
from typing import Any
1010

11+
from fastapi import FastAPI
1112
from opentelemetry import metrics, trace
1213
from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
1314
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
15+
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
1416
from opentelemetry.sdk.metrics import MeterProvider
1517
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
1618
from opentelemetry.sdk.resources import Resource
@@ -362,3 +364,8 @@ async def get_span_tree(
362364
max_depth=max_depth,
363365
)
364366
)
367+
368+
@staticmethod
369+
def fastapi_middleware(app: FastAPI):
370+
"""Inject telemetry middleware into the FastAPI app"""
371+
FastAPIInstrumentor.instrument_app(app)

pyproject.toml

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,14 +25,14 @@ classifiers = [
2525
]
2626
dependencies = [
2727
"aiohttp",
28-
"fastapi>=0.115.0,<1.0", # server
29-
"fire", # for MCP in LLS client
28+
"fastapi>=0.115.0,<1.0", # server
29+
"fire", # for MCP in LLS client
3030
"httpx",
3131
"huggingface-hub>=0.34.0,<1.0",
3232
"jinja2>=3.1.6",
3333
"jsonschema",
3434
"llama-stack-client>=0.2.23",
35-
"openai>=1.107", # for expires_after support
35+
"openai>=1.107", # for expires_after support
3636
"prompt-toolkit",
3737
"python-dotenv",
3838
"python-jose[cryptography]",
@@ -43,13 +43,14 @@ dependencies = [
4343
"tiktoken",
4444
"pillow",
4545
"h11>=0.16.0",
46-
"python-multipart>=0.0.20", # For fastapi Form
47-
"uvicorn>=0.34.0", # server
48-
"opentelemetry-sdk>=1.30.0", # server
46+
"python-multipart>=0.0.20", # For fastapi Form
47+
"uvicorn>=0.34.0", # server
48+
"opentelemetry-sdk>=1.30.0", # server
4949
"opentelemetry-exporter-otlp-proto-http>=1.30.0", # server
50-
"aiosqlite>=0.21.0", # server - for metadata store
51-
"asyncpg", # for metadata store
52-
"sqlalchemy[asyncio]>=2.0.41", # server - for conversations
50+
"aiosqlite>=0.21.0", # server - for metadata store
51+
"asyncpg", # for metadata store
52+
"sqlalchemy[asyncio]>=2.0.41", # server - for conversations
53+
"opentelemetry-instrumentation-fastapi>=0.57b0",
5354
]
5455

5556
[project.optional-dependencies]

0 commit comments

Comments
 (0)