From c69fde7d59db3526ffb0ecd48a947b7ecf158811 Mon Sep 17 00:00:00 2001 From: Ryan Carroll Date: Sat, 8 Feb 2025 16:07:43 -0500 Subject: [PATCH] model name as const --- example_agent/utils/ex_nodes.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/example_agent/utils/ex_nodes.py b/example_agent/utils/ex_nodes.py index d85e15f..12ce90f 100644 --- a/example_agent/utils/ex_nodes.py +++ b/example_agent/utils/ex_nodes.py @@ -13,7 +13,7 @@ load_dotenv() -environ_model_name = os.environ.get("MODEL_NAME") +ENVIRON_MODEL_NAME = os.environ.get("MODEL_NAME") @lru_cache(maxsize=4) def _get_tool_model(model_name: str): @@ -46,7 +46,7 @@ def multi_choice_structured(state: AgentState, config): # We call the model with structured output in order to return the same format to the user every time # state['messages'][-2] is the last ToolMessage in the convo, which we convert to a HumanMessage for the model to use # We could also pass the entire chat history, but this saves tokens since all we care to structure is the output of the tool - model_name = config.get("configurable", {}).get("model_name", environ_model_name) + model_name = config.get("configurable", {}).get("model_name", ENVIRON_MODEL_NAME) response = _get_response_model(model_name).invoke( [ @@ -72,18 +72,20 @@ def structure_response(state: AgentState, config): # if not multi-choice don't need to do anything return {"messages": []} + system_prompt = """ You are an oregon trail playing tool calling AI agent. Use the tools available to you to answer the question you are presented. When in doubt use the tools to help you find the answer. If anyone asks your first name is Art return just that string. """ + # Define the function that calls the model def call_tool_model(state: AgentState, config): # Combine system prompt with incoming messages messages = [{"role": "system", "content": system_prompt}] + state["messages"] # Get from LangGraph config - model_name = config.get("configurable", {}).get("model_name", environ_model_name) + model_name = config.get("configurable", {}).get("model_name", ENVIRON_MODEL_NAME) # Get our model that binds our tools model = _get_tool_model(model_name)