Skip to content

Commit 9134b42

Browse files
committed
use a single FunctionCallParams parameter for function calls
1 parent 0c012ca commit 9134b42

File tree

13 files changed

+47
-44
lines changed

13 files changed

+47
-44
lines changed

guides/fundamentals/end-pipeline.mdx

+3-3
Original file line numberDiff line numberDiff line change
@@ -35,11 +35,11 @@ await task.queue_frame(EndFrame())
3535
- Push an `EndTaskFrame` upstream from inside your pipeline. For example, inside a function call:
3636

3737
```python
38-
async def end_conversation(function_name, tool_call_id, args, llm, context, result_callback):
39-
await llm.push_frame(TTSSpeakFrame("Have a nice day!"))
38+
async def end_conversation(params: FunctionCallParams):
39+
await params.llm.push_frame(TTSSpeakFrame("Have a nice day!"))
4040

4141
# Signal that the task should end after processing this frame
42-
await llm.push_frame(EndTaskFrame(), FrameDirection.UPSTREAM)
42+
await params.llm.push_frame(EndTaskFrame(), FrameDirection.UPSTREAM)
4343
```
4444

4545
### How Graceful Shutdown Works

guides/fundamentals/function-calling.mdx

+14-13
Original file line numberDiff line numberDiff line change
@@ -201,10 +201,10 @@ Register handlers for your functions using the LLM service's [`register_function
201201
llm = OpenAILLMService(api_key="your-api-key", model="gpt-4")
202202

203203
# Main function handler - called to execute the function
204-
async def fetch_weather_from_api(function_name, tool_call_id, args, llm, context, result_callback):
204+
async def fetch_weather_from_api(params: FunctionCallParams):
205205
# Fetch weather data from your API
206206
weather_data = {"conditions": "sunny", "temperature": "75"}
207-
await result_callback(weather_data)
207+
await params.result_callback(weather_data)
208208

209209
# Register the function
210210
llm.register_function(
@@ -243,12 +243,13 @@ pipeline = Pipeline([
243243

244244
### Handler Parameters
245245

246-
- `function_name`: Name of the called function
247-
- `tool_call_id`: Unique identifier for the function call
248-
- `args`: Arguments passed by the LLM
249-
- `llm`: Reference to the LLM service
250-
- `context`: Current conversation context
251-
- `result_callback`: Async function to return results
246+
- `params`: FunctionCallParams
247+
- `function_name`: Name of the called function
248+
- `arguments`: Arguments passed by the LLM
249+
- `tool_call_id`: Unique identifier for the function call
250+
- `llm`: Reference to the LLM service
251+
- `context`: Current conversation context
252+
- `result_callback`: Async function to return results
252253

253254
### Return Values
254255

@@ -287,18 +288,18 @@ Controls whether the LLM should generate a response after the function call:
287288
### Example Usage
288289

289290
```python
290-
async def fetch_weather_from_api(function_name, tool_call_id, args, llm, context, result_callback):
291+
async def fetch_weather_from_api(params: FunctionCallParams):
291292
# Fetch weather data
292293
weather_data = {"conditions": "sunny", "temperature": "75"}
293294

294295
# Don't run LLM after this function call
295296
properties = FunctionCallResultProperties(run_llm=False)
296297

297-
await result_callback(weather_data, properties=properties)
298+
await params.result_callback(weather_data, properties=properties)
298299

299-
async def query_database(function_name, tool_call_id, args, llm, context, result_callback):
300+
async def query_database(params: FunctionCallParams):
300301
# Query database
301-
results = await db.query(args["query"])
302+
results = await db.query(params.arguments["query"])
302303

303304
async def on_update():
304305
await notify_system("Database query complete")
@@ -309,7 +310,7 @@ async def query_database(function_name, tool_call_id, args, llm, context, result
309310
on_context_updated=on_update
310311
)
311312

312-
await result_callback(results, properties=properties)
313+
await params.result_callback(results, properties=properties)
313314
```
314315

315316
## Next steps

server/frameworks/rtvi/rtvi-processor.mdx

+8-6
Original file line numberDiff line numberDiff line change
@@ -150,12 +150,14 @@ Handle LLM function calls with client interaction:
150150

151151
```python
152152
await processor.handle_function_call(
153-
function_name=function_name,
154-
tool_call_id=tool_call_id,
155-
arguments=arguments,
156-
llm=llm,
157-
context=context,
158-
result_callback=result_callback
153+
FunctionCallParams(
154+
function_name=function_name,
155+
arguments=arguments,
156+
tool_call_id=tool_call_id,
157+
llm=llm,
158+
context=context,
159+
result_callback=result_callback
160+
)
159161
)
160162
```
161163

server/services/llm/azure.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -184,8 +184,8 @@ context = OpenAILLMContext(
184184
)
185185

186186
# Register function handlers
187-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
188-
await result_callback({"conditions": "nice", "temperature": "75"})
187+
async def fetch_weather(params: FunctionCallParams):
188+
await params.result_callback({"conditions": "nice", "temperature": "75"})
189189

190190
llm.register_function("get_current_weather", fetch_weather)
191191

server/services/llm/cerebras.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,8 @@ Infer whether to use Fahrenheit or Celsius automatically based on the location,
108108
)
109109

110110
# Register function handlers
111-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
112-
await result_callback({"conditions": "nice", "temperature": "75"})
111+
async def fetch_weather(params: FunctionCallParams):
112+
await params.result_callback({"conditions": "nice", "temperature": "75"})
113113

114114
llm.register_function("get_current_weather", fetch_weather)
115115

server/services/llm/deepseek.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -144,8 +144,8 @@ context = OpenAILLMContext(
144144
)
145145

146146
# Register function handlers
147-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
148-
await result_callback({"conditions": "nice", "temperature": "75"})
147+
async def fetch_weather(params: FunctionCallParams):
148+
await params.result_callback({"conditions": "nice", "temperature": "75"})
149149

150150
llm.register_function("get_current_weather", fetch_weather)
151151

server/services/llm/grok.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,8 @@ context = OpenAILLMContext(
108108
)
109109

110110
# Register function handlers
111-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
112-
await result_callback({"conditions": "nice", "temperature": "75"})
111+
async def fetch_weather(params: FunctionCallParams):
112+
await params.result_callback({"conditions": "nice", "temperature": "75"})
113113

114114
llm.register_function("get_current_weather", fetch_weather)
115115

server/services/llm/groq.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,8 @@ context = OpenAILLMContext(
107107
)
108108

109109
# Register function handlers
110-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
111-
await result_callback({"conditions": "nice", "temperature": "75"})
110+
async def fetch_weather(params: FunctionCallParams):
111+
await params.result_callback({"conditions": "nice", "temperature": "75"})
112112

113113
llm.register_function("get_current_weather", fetch_weather)
114114

server/services/llm/nim.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -116,8 +116,8 @@ context = OpenAILLMContext(
116116
)
117117

118118
# Register function handlers
119-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
120-
await result_callback({"conditions": "nice", "temperature": "75"})
119+
async def fetch_weather(params: FunctionCallParams):
120+
await params.result_callback({"conditions": "nice", "temperature": "75"})
121121

122122
llm.register_function("get_current_weather", fetch_weather)
123123

server/services/llm/openai.mdx

+4-4
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ Range: [-2.0, 2.0] where higher values reduce repetition more.
7575
<ParamField path="temperature" type="float" optional>
7676
Controls randomness/creativity in the output. Lower values are more
7777
deterministic, higher values more creative.
78-
78+
7979
Range: [0.0, 2.0]
8080

8181
</ParamField>
@@ -178,7 +178,7 @@ OpenAI's API requires a specific format for conversation history and tools. The
178178
<ParamField path="messages" type="List[ChatCompletionMessageParam]" optional>
179179
Initial list of conversation messages. Each message should be an object with
180180
at least a "role" (user, assistant, or system) and "content" fields.
181-
181+
182182
Defaults to an empty list.
183183

184184
</ParamField>
@@ -384,8 +384,8 @@ context = OpenAILLMContext(
384384
)
385385

386386
# Register function handlers
387-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
388-
await result_callback({"conditions": "nice", "temperature": "75"})
387+
async def fetch_weather(params: FunctionCallParams):
388+
await params.result_callback({"conditions": "nice", "temperature": "75"})
389389

390390
llm.register_function("get_current_weather", fetch_weather)
391391

server/services/llm/openrouter.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -148,8 +148,8 @@ context = OpenAILLMContext(
148148
)
149149

150150
# Register function handlers
151-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
152-
await result_callback({"conditions": "nice", "temperature": "75"})
151+
async def fetch_weather(params: FunctionCallParams):
152+
await params.result_callback({"conditions": "nice", "temperature": "75"})
153153

154154
llm.register_function(get_current_weather, fetch_weather)
155155

server/services/llm/qwen.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -116,8 +116,8 @@ context = OpenAILLMContext(
116116
)
117117

118118
# Register function handlers
119-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
120-
await result_callback({"conditions": "sunny", "temperature": "22°C"})
119+
async def fetch_weather(params: FunctionCallParams):
120+
await params.result_callback({"conditions": "sunny", "temperature": "22°C"})
121121

122122
llm.register_function("get_current_weather", fetch_weather)
123123

server/services/llm/together.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -156,8 +156,8 @@ context = OpenAILLMContext(
156156
)
157157

158158
# Register function handlers
159-
async def fetch_weather(function_name, tool_call_id, args, llm, context, result_callback):
160-
await result_callback({"conditions": "nice", "temperature": "75"})
159+
async def fetch_weather(params: FunctionCallParams):
160+
await params.result_callback({"conditions": "nice", "temperature": "75"})
161161

162162
llm.register_function("get_current_weather", fetch_weather)
163163

0 commit comments

Comments
 (0)