Skip to content
This repository was archived by the owner on Feb 14, 2025. It is now read-only.

Commit 40112d9

Browse files
committed
feat: Added tool_use support
1 parent 986ef7b commit 40112d9

File tree

3 files changed

+725
-266
lines changed

3 files changed

+725
-266
lines changed

examples/anthrophic.py

+124-20
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
import os
22
import asyncio
3-
from dotenv import load_dotenv
43
from anthropic import Anthropic, AsyncAnthropic
54

65
from lunary.anthrophic import monitor
76

8-
load_dotenv()
9-
10-
11-
def test_sync_non_streaming():
7+
def sync_non_streaming():
128
client = Anthropic()
139
monitor(client)
1410

@@ -23,7 +19,7 @@ def test_sync_non_streaming():
2319
print(message.content)
2420

2521

26-
async def test_async_non_streaming():
22+
async def async_non_streaming():
2723
client = monitor(AsyncAnthropic())
2824

2925
message = await client.messages.create(
@@ -37,7 +33,7 @@ async def test_async_non_streaming():
3733
print(message.content)
3834

3935

40-
def test_sync_streaming():
36+
def sync_streaming():
4137
client = monitor(Anthropic())
4238

4339
stream = client.messages.create(
@@ -53,7 +49,7 @@ def test_sync_streaming():
5349
print(event)
5450

5551

56-
async def test_async_streaming():
52+
async def async_streaming():
5753
client = monitor(AsyncAnthropic())
5854

5955
stream = await client.messages.create(
@@ -69,7 +65,7 @@ async def test_async_streaming():
6965
print(event)
7066

7167

72-
def test_sync_stream_helper():
68+
def sync_stream_helper():
7369
client = Anthropic()
7470
monitor(client)
7571

@@ -84,7 +80,7 @@ def test_sync_stream_helper():
8480
for event in stream:
8581
print(event)
8682

87-
async def test_async_stream_helper():
83+
async def async_stream_helper():
8884
client = monitor(AsyncAnthropic())
8985

9086
async with client.messages.stream(
@@ -97,14 +93,15 @@ async def test_async_stream_helper():
9793
],
9894
model="claude-3-opus-20240229",
9995
) as stream:
100-
async for event in stream:
101-
print(event)
96+
async for text in stream.text_stream:
97+
print(text, end="", flush=True)
98+
print()
10299

103100
message = await stream.get_final_message()
104101
print(message.to_json())
105102

106103

107-
def test_extra_arguments():
104+
def extra_arguments():
108105
client = Anthropic()
109106
monitor(client)
110107

@@ -129,13 +126,120 @@ def test_extra_arguments():
129126
print(message.content)
130127

131128

132-
# test_sync_non_streaming()
133-
# test_asyncio.run(async_non_streaming())
129+
def anthrophic_bedrock():
130+
from anthropic import AnthropicBedrock
131+
132+
client = AnthropicBedrock()
133+
134+
message = client.messages.create(
135+
max_tokens=1024,
136+
messages=[
137+
{
138+
"role": "user",
139+
"content": "Hello!",
140+
}
141+
],
142+
model="anthropic.claude-3-sonnet-20240229-v1:0",
143+
)
144+
print(message)
145+
146+
def tool_calls():
147+
from anthropic import Anthropic
148+
from anthropic.types import ToolParam, MessageParam
149+
150+
client = monitor(Anthropic())
151+
152+
user_message: MessageParam = {
153+
"role": "user",
154+
"content": "What is the weather in San Francisco, California?",
155+
}
156+
tools: list[ToolParam] = [
157+
{
158+
"name": "get_weather",
159+
"description": "Get the weather for a specific location",
160+
"input_schema": {
161+
"type": "object",
162+
"properties": {"location": {"type": "string"}},
163+
},
164+
}
165+
]
166+
167+
message = client.messages.create(
168+
model="claude-3-opus-20240229",
169+
max_tokens=1024,
170+
messages=[user_message],
171+
tools=tools,
172+
)
173+
print(f"Initial response: {message.model_dump_json(indent=2)}")
174+
175+
assert message.stop_reason == "tool_use"
176+
177+
tool = next(c for c in message.content if c.type == "tool_use")
178+
response = client.messages.create(
179+
model="claude-3-opus-20240229",
180+
max_tokens=1024,
181+
messages=[
182+
user_message,
183+
{"role": message.role, "content": message.content},
184+
{
185+
"role": "user",
186+
"content": [
187+
{
188+
"type": "tool_result",
189+
"tool_use_id": tool.id,
190+
"content": [{"type": "text", "text": "The weather is 73f"}],
191+
}
192+
],
193+
},
194+
],
195+
tools=tools,
196+
)
197+
print(f"\nFinal response: {response.model_dump_json(indent=2)}")
198+
199+
200+
async def async_tool_calls():
201+
client = monitor(AsyncAnthropic())
202+
async with client.messages.stream(
203+
max_tokens=1024,
204+
model="claude-3-haiku-20240307",
205+
tools=[
206+
{
207+
"name": "get_weather",
208+
"description": "Get the weather at a specific location",
209+
"input_schema": {
210+
"type": "object",
211+
"properties": {
212+
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
213+
"unit": {
214+
"type": "string",
215+
"enum": ["celsius", "fahrenheit"],
216+
"description": "Unit for the output",
217+
},
218+
},
219+
"required": ["location"],
220+
},
221+
}
222+
],
223+
messages=[{"role": "user", "content": "What is the weather in SF?"}],
224+
) as stream:
225+
async for event in stream:
226+
if event.type == "input_json":
227+
print(f"delta: {repr(event.partial_json)}")
228+
print(f"snapshot: {event.snapshot}")
229+
230+
231+
# sync_non_streaming()
232+
# asyncio.run(async_non_streaming())
233+
234+
# sync_streaming()
235+
# asyncio.run(async_streaming())
236+
237+
# extra_arguments()
134238

135-
# test_sync_streaming()
136-
# test_asyncio.run(async_streaming())
239+
# sync_stream_helper()
240+
# asyncio.run(async_stream_helper())
137241

138-
# test_extra_arguments()
242+
# # anthrophic_bedrock()
139243

140-
# test_sync_stream_helper()
141-
asyncio.run(test_async_stream_helper())
244+
# tool_calls()
245+
# asyncio.run(async_tool_calls())

lunary/__init__.py

-1
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,6 @@ def track_event(
143143
"appId": app_id
144144
}
145145

146-
147146
if callback_queue is not None:
148147
callback_queue.append(event)
149148
else:

0 commit comments

Comments
 (0)