File tree Expand file tree Collapse file tree 5 files changed +28
-11
lines changed Expand file tree Collapse file tree 5 files changed +28
-11
lines changed Original file line number Diff line number Diff line change @@ -96,7 +96,11 @@ async def get_response(
9696 logger .debug ("Received model response" )
9797 else :
9898 logger .debug (
99- f"LLM resp:\n { json .dumps (response .choices [0 ].message .model_dump (), indent = 2 )} \n "
99+ f"LLM resp:\n { json .dumps (
100+ response .choices [0 ].message .model_dump (),
101+ indent = 2 ,
102+ ensure_ascii = False
103+ )} \n "
100104 )
101105
102106 if hasattr (response , "usage" ):
@@ -251,8 +255,8 @@ async def _fetch_response(
251255 else :
252256 logger .debug (
253257 f"Calling Litellm model: { self .model } \n "
254- f"{ json .dumps (converted_messages , indent = 2 )} \n "
255- f"Tools:\n { json .dumps (converted_tools , indent = 2 )} \n "
258+ f"{ json .dumps (converted_messages , indent = 2 , ensure_ascii = False )} \n "
259+ f"Tools:\n { json .dumps (converted_tools , indent = 2 , ensure_ascii = False )} \n "
256260 f"Stream: { stream } \n "
257261 f"Tool choice: { tool_choice } \n "
258262 f"Response format: { response_format } \n "
Original file line number Diff line number Diff line change @@ -116,7 +116,10 @@ async def invoke_mcp_tool(
116116 if len (result .content ) == 1 :
117117 tool_output = result .content [0 ].model_dump_json ()
118118 elif len (result .content ) > 1 :
119- tool_output = json .dumps ([item .model_dump () for item in result .content ])
119+ tool_output = json .dumps (
120+ [item .model_dump () for item in result .content ],
121+ ensure_ascii = False
122+ )
120123 else :
121124 logger .error (f"Errored MCP tool result: { result } " )
122125 tool_output = "Error running tool."
Original file line number Diff line number Diff line change @@ -393,7 +393,8 @@ def ensure_assistant_message() -> ChatCompletionAssistantMessageParam:
393393 {
394394 "queries" : file_search .get ("queries" , []),
395395 "status" : file_search .get ("status" ),
396- }
396+ },
397+ ensure_ascii = False
397398 ),
398399 },
399400 )
Original file line number Diff line number Diff line change @@ -74,7 +74,12 @@ async def get_response(
7474 logger .debug ("Received model response" )
7575 else :
7676 logger .debug (
77- f"LLM resp:\n { json .dumps (response .choices [0 ].message .model_dump (), indent = 2 )} \n "
77+ "LLM resp:\n " ,
78+ f"{ json .dumps (
79+ response .choices [0 ].message .model_dump (),
80+ indent = 2 ,
81+ ensure_ascii = False
82+ )} \n "
7883 )
7984
8085 usage = (
@@ -222,8 +227,8 @@ async def _fetch_response(
222227 logger .debug ("Calling LLM" )
223228 else :
224229 logger .debug (
225- f"{ json .dumps (converted_messages , indent = 2 )} \n "
226- f"Tools:\n { json .dumps (converted_tools , indent = 2 )} \n "
230+ f"{ json .dumps (converted_messages , indent = 2 , ensure_ascii = False )} \n "
231+ f"Tools:\n { json .dumps (converted_tools , indent = 2 , ensure_ascii = False )} \n "
227232 f"Stream: { stream } \n "
228233 f"Tool choice: { tool_choice } \n "
229234 f"Response format: { response_format } \n "
Original file line number Diff line number Diff line change @@ -89,7 +89,11 @@ async def get_response(
8989 else :
9090 logger .debug (
9191 "LLM resp:\n "
92- f"{ json .dumps ([x .model_dump () for x in response .output ], indent = 2 )} \n "
92+ f"{ json .dumps (
93+ [x .model_dump () for x in response .output ],
94+ indent = 2 ,
95+ ensure_ascii = False
96+ )} \n "
9397 )
9498
9599 usage = (
@@ -231,8 +235,8 @@ async def _fetch_response(
231235 else :
232236 logger .debug (
233237 f"Calling LLM { self .model } with input:\n "
234- f"{ json .dumps (list_input , indent = 2 )} \n "
235- f"Tools:\n { json .dumps (converted_tools .tools , indent = 2 )} \n "
238+ f"{ json .dumps (list_input , indent = 2 , ensure_ascii = False )} \n "
239+ f"Tools:\n { json .dumps (converted_tools .tools , indent = 2 , ensure_ascii = False )} \n "
236240 f"Stream: { stream } \n "
237241 f"Tool choice: { tool_choice } \n "
238242 f"Response format: { response_format } \n "
You can’t perform that action at this time.
0 commit comments