Skip to content

Commit 4cda646

Browse files
authored
feat(llm-observability): $ai_tools capture in Langchain (#199)
1 parent ea4e7fa commit 4cda646

File tree

4 files changed

+80
-4
lines changed

4 files changed

+80
-4
lines changed

CHANGELOG.md

+4
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
## 3.17.0 - 2025-02-27
2+
3+
1. The LangChain handler now captures tools in `$ai_generation` events, in property `$ai_tools`. This allows for displaying tools provided to the LLM call in PostHog UI. Note that support for `$ai_tools` in OpenAI and Anthropic SDKs is coming soon.
4+
15
## 3.16.0 - 2025-02-26
26

37
1. feat: add some platform info to events (#198)

posthog/ai/langchain/callbacks.py

+20-3
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@ class GenerationMetadata(SpanMetadata):
6060
"""Model parameters of the run: temperature, max_tokens, etc."""
6161
base_url: Optional[str] = None
6262
"""Base URL of the provider's API used in the run."""
63+
tools: Optional[List[Dict[str, Any]]] = None
64+
"""Tools provided to the model."""
6365

6466

6567
RunMetadata = Union[SpanMetadata, GenerationMetadata]
@@ -377,6 +379,8 @@ def _set_llm_metadata(
377379
generation = GenerationMetadata(name=run_name, input=messages, start_time=time.time(), end_time=None)
378380
if isinstance(invocation_params, dict):
379381
generation.model_params = get_model_params(invocation_params)
382+
if tools := invocation_params.get("tools"):
383+
generation.tools = tools
380384
if isinstance(metadata, dict):
381385
if model := metadata.get("ls_model_name"):
382386
generation.model = model
@@ -424,7 +428,11 @@ def _pop_run_and_capture_trace_or_span(self, run_id: UUID, parent_run_id: Option
424428
log.warning(f"Run {run_id} is a generation, but attempted to be captured as a trace or span.")
425429
return
426430
self._capture_trace_or_span(
427-
trace_id, run_id, run, outputs, self._get_parent_run_id(trace_id, run_id, parent_run_id)
431+
trace_id,
432+
run_id,
433+
run,
434+
outputs,
435+
self._get_parent_run_id(trace_id, run_id, parent_run_id),
428436
)
429437

430438
def _capture_trace_or_span(
@@ -465,7 +473,10 @@ def _capture_trace_or_span(
465473
)
466474

467475
def _pop_run_and_capture_generation(
468-
self, run_id: UUID, parent_run_id: Optional[UUID], response: Union[LLMResult, BaseException]
476+
self,
477+
run_id: UUID,
478+
parent_run_id: Optional[UUID],
479+
response: Union[LLMResult, BaseException],
469480
):
470481
trace_id = self._get_trace_id(run_id)
471482
self._pop_parent_of_run(run_id)
@@ -476,7 +487,11 @@ def _pop_run_and_capture_generation(
476487
log.warning(f"Run {run_id} is not a generation, but attempted to be captured as a generation.")
477488
return
478489
self._capture_generation(
479-
trace_id, run_id, run, response, self._get_parent_run_id(trace_id, run_id, parent_run_id)
490+
trace_id,
491+
run_id,
492+
run,
493+
response,
494+
self._get_parent_run_id(trace_id, run_id, parent_run_id),
480495
)
481496

482497
def _capture_generation(
@@ -500,6 +515,8 @@ def _capture_generation(
500515
"$ai_latency": run.latency,
501516
"$ai_base_url": run.base_url,
502517
}
518+
if run.tools:
519+
event_properties["$ai_tools"] = run.tools
503520

504521
if isinstance(output, BaseException):
505522
event_properties["$ai_http_status"] = _get_http_status(output)

posthog/test/ai/langchain/test_callbacks.py

+55
Original file line numberDiff line numberDiff line change
@@ -1168,6 +1168,61 @@ async def test_async_anthropic_streaming(mock_client):
11681168
assert isinstance(trace_props["$ai_output_state"], AIMessage)
11691169

11701170

1171+
def test_metadata_tools(mock_client):
1172+
callbacks = CallbackHandler(mock_client)
1173+
run_id = uuid.uuid4()
1174+
tools = [
1175+
[
1176+
{
1177+
"type": "function",
1178+
"function": {
1179+
"name": "foo",
1180+
"description": "The foo.",
1181+
"parameters": {
1182+
"properties": {
1183+
"bar": {
1184+
"description": "The bar of foo.",
1185+
"type": "string",
1186+
},
1187+
},
1188+
"required": ["query_description", "query_kind"],
1189+
"type": "object",
1190+
"additionalProperties": False,
1191+
},
1192+
"strict": True,
1193+
},
1194+
}
1195+
]
1196+
]
1197+
1198+
with patch("time.time", return_value=1234567890):
1199+
callbacks._set_llm_metadata(
1200+
{"kwargs": {"openai_api_base": "https://us.posthog.com"}},
1201+
run_id,
1202+
messages=[{"role": "user", "content": "What's the weather like in SF?"}],
1203+
invocation_params={"temperature": 0.5, "tools": tools},
1204+
metadata={"ls_model_name": "hog-mini", "ls_provider": "posthog"},
1205+
name="test",
1206+
)
1207+
expected = GenerationMetadata(
1208+
model="hog-mini",
1209+
input=[{"role": "user", "content": "What's the weather like in SF?"}],
1210+
start_time=1234567890,
1211+
model_params={"temperature": 0.5},
1212+
provider="posthog",
1213+
base_url="https://us.posthog.com",
1214+
name="test",
1215+
tools=tools,
1216+
end_time=None,
1217+
)
1218+
assert callbacks._runs[run_id] == expected
1219+
with patch("time.time", return_value=1234567891):
1220+
run = callbacks._pop_run_metadata(run_id)
1221+
expected.end_time = 1234567891
1222+
assert run == expected
1223+
assert callbacks._runs == {}
1224+
1225+
11711226
def test_tool_calls(mock_client):
11721227
prompt = ChatPromptTemplate.from_messages([("user", "Foo")])
11731228
model = FakeMessagesListChatModel(

posthog/version.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
VERSION = "3.16.0"
1+
VERSION = "3.17.0"
22

33
if __name__ == "__main__":
44
print(VERSION, end="") # noqa: T201

0 commit comments

Comments
 (0)