@@ -49,7 +49,7 @@ def test_anthropic_invoke_happy_path(mock_anthropic: Mock) -> None:
49
49
input_text = "may thy knife chip and shatter"
50
50
response = llm .invoke (input_text )
51
51
assert response .content == "generated text"
52
- llm .client .messages .create .assert_called_once_with (
52
+ llm .client .messages .create .assert_called_once_with ( # type: ignore[attr-defined]
53
53
messages = [{"role" : "user" , "content" : input_text }],
54
54
model = "claude-3-opus-20240229" ,
55
55
system = anthropic .NOT_GIVEN ,
@@ -81,7 +81,7 @@ def test_anthropic_invoke_with_message_history_happy_path(mock_anthropic: Mock)
81
81
response = llm .invoke (question , message_history )
82
82
assert response .content == "generated text"
83
83
message_history .add_message (LLMMessage (role = "user" , content = question ))
84
- llm .client .messages .create .assert_called_once_with (
84
+ llm .client .messages .create .assert_called_once_with ( # type: ignore[attr-defined]
85
85
messages = message_history ,
86
86
model = "claude-3-opus-20240229" ,
87
87
system = anthropic .NOT_GIVEN ,
@@ -107,14 +107,14 @@ def test_anthropic_invoke_with_system_instruction(
107
107
assert isinstance (response , LLMResponse )
108
108
assert response .content == "generated text"
109
109
messages = [{"role" : "user" , "content" : question }]
110
- llm .client .messages .create .assert_called_with (
110
+ llm .client .messages .create .assert_called_with ( # type: ignore[attr-defined]
111
111
model = "claude-3-opus-20240229" ,
112
112
system = system_instruction ,
113
113
messages = messages ,
114
114
** model_params ,
115
115
)
116
116
117
- assert llm .client .messages .create .call_count == 1
117
+ assert llm .client .messages .create .call_count == 1 # type: ignore[attr-defined]
118
118
119
119
120
120
def test_anthropic_invoke_with_message_history_and_system_instruction (
@@ -145,14 +145,14 @@ def test_anthropic_invoke_with_message_history_and_system_instruction(
145
145
assert isinstance (response , LLMResponse )
146
146
assert response .content == "generated text"
147
147
message_history .add_message (LLMMessage (role = "user" , content = question ))
148
- llm .client .messages .create .assert_called_with (
148
+ llm .client .messages .create .assert_called_with ( # type: ignore[attr-defined]
149
149
model = "claude-3-opus-20240229" ,
150
150
system = system_instruction ,
151
151
messages = message_history ,
152
152
** model_params ,
153
153
)
154
154
155
- assert llm .client .messages .create .call_count == 1
155
+ assert llm .client .messages .create .call_count == 1 # type: ignore[attr-defined]
156
156
157
157
158
158
def test_anthropic_invoke_with_message_history_validation_error (
@@ -190,7 +190,7 @@ async def test_anthropic_ainvoke_happy_path(mock_anthropic: Mock) -> None:
190
190
input_text = "may thy knife chip and shatter"
191
191
response = await llm .ainvoke (input_text )
192
192
assert response .content == "Return text"
193
- llm .async_client .messages .create .assert_awaited_once_with (
193
+ llm .async_client .messages .create .assert_awaited_once_with ( # type: ignore[attr-defined]
194
194
model = "claude-3-opus-20240229" ,
195
195
system = anthropic .NOT_GIVEN ,
196
196
messages = [{"role" : "user" , "content" : input_text }],
0 commit comments