Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 3 additions & 7 deletions libs/vertexai/tests/integration_tests/test_anthropic_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,7 @@ def test_anthropic_system_cache() -> None:
)
message = HumanMessage(content="Hello! What can you do for me?")

response = model.invoke(
[context, message], model_name="claude-3-5-sonnet-v2@20241022"
)
response = model.invoke([context, message], model_name="claude-sonnet-4-5@20250929")
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert "usage_metadata" in response.additional_kwargs
Expand Down Expand Up @@ -66,9 +64,7 @@ def test_anthropic_mixed_cache() -> None:
]
)

response = model.invoke(
[context, message], model_name="claude-3-5-sonnet-v2@20241022"
)
response = model.invoke([context, message], model_name="claude-sonnet-4-5@20250929")
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert "usage_metadata" in response.additional_kwargs
Expand Down Expand Up @@ -112,7 +108,7 @@ def test_anthropic_conversation_cache() -> None:
),
]

response = model.invoke(messages, model_name="claude-3-5-sonnet-v2@20241022")
response = model.invoke(messages, model_name="claude-sonnet-4-5@20250929")
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
assert "peter" in response.content.lower() # Should remember the name
Expand Down
6 changes: 3 additions & 3 deletions libs/vertexai/tests/integration_tests/test_anthropic_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
def test_pdf_gcs_uri() -> None:
gcs_uri = "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf"
llm = ChatAnthropicVertex(
model="claude-3-5-sonnet-v2@20241022",
model="claude-sonnet-4-5@20250929",
location="us-east5",
temperature=0.8,
project=os.environ["PROJECT_ID"],
Expand All @@ -37,7 +37,7 @@ def test_pdf_gcs_uri() -> None:
def test_pdf_byts() -> None:
gcs_uri = "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf"
llm = ChatAnthropicVertex(
model="claude-3-5-sonnet-v2@20241022",
model="claude-sonnet-4-5@20250929",
location="us-east5",
temperature=0.8,
project=os.environ["PROJECT_ID"],
Expand All @@ -64,7 +64,7 @@ def test_https_image() -> None:
uri = "https://picsum.photos/seed/picsum/200/300.jpg"

llm = ChatAnthropicVertex(
model="claude-3-5-sonnet-v2@20241022",
model="claude-sonnet-4-5@20250929",
location="us-east5",
temperature=0.8,
project=os.environ["PROJECT_ID"],
Expand Down
24 changes: 11 additions & 13 deletions libs/vertexai/tests/integration_tests/test_model_garden.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
)

_ANTHROPIC_LOCATION = "us-east5"
_ANTHROPIC_CLAUDE35_MODEL_NAME = "claude-3-5-sonnet-v2@20241022"
_ANTHROPIC_CLAUDE_MODEL_NAME = "claude-sonnet-4-5@20250929"


@pytest.mark.extended
Expand Down Expand Up @@ -123,9 +123,7 @@ def test_anthropic() -> None:
)
context = SystemMessage(content=raw_context)
message = HumanMessage(content=question)
response = model.invoke(
[context, message], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME
)
response = model.invoke([context, message], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME)
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)

Expand All @@ -142,7 +140,7 @@ def test_anthropic_stream() -> None:
"Hello, could you recommend a good movie for me to watch this evening, please?"
)
message = HumanMessage(content=question)
sync_response = model.stream([message], model=_ANTHROPIC_CLAUDE35_MODEL_NAME)
sync_response = model.stream([message], model=_ANTHROPIC_CLAUDE_MODEL_NAME)
for chunk in sync_response:
assert isinstance(chunk, AIMessageChunk)

Expand All @@ -166,7 +164,7 @@ def test_anthropic_thinking_stream() -> None:
"Hello, could you recommend a good movie for me to watch this evening, please?"
)
message = HumanMessage(content=question)
sync_response = model.stream([message], model="claude-3-7-sonnet@20250219")
sync_response = model.stream([message], model="claude-sonnet-4-5@20250929")
for chunk in sync_response:
assert isinstance(chunk, AIMessageChunk)

Expand All @@ -189,7 +187,7 @@ async def test_anthropic_async() -> None:
context = SystemMessage(content=raw_context)
message = HumanMessage(content=question)
response = await model.ainvoke(
[context, message], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME, temperature=0.2
[context, message], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME, temperature=0.2
)
assert isinstance(response, AIMessage)
assert isinstance(response.content, str)
Expand Down Expand Up @@ -223,7 +221,7 @@ class MyModel(BaseModel):
# Test .bind_tools with BaseModel
message = HumanMessage(content="My name is Erick and I am 27 years old")
model_with_tools = model.bind_tools(
[MyModel], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME
[MyModel], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME
)
response = model_with_tools.invoke([message])
_check_tool_calls(response, "MyModel")
Expand All @@ -233,7 +231,7 @@ def my_model(name: str, age: int) -> None:
"""Invoke this with names and ages."""

model_with_tools = model.bind_tools(
[my_model], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME
[my_model], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME
)
response = model_with_tools.invoke([message])
_check_tool_calls(response, "my_model")
Expand All @@ -244,7 +242,7 @@ def my_tool(name: str, age: int) -> None:
"""Invoke this with names and ages."""

model_with_tools = model.bind_tools(
[my_tool], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME
[my_tool], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME
)
response = model_with_tools.invoke([message])
_check_tool_calls(response, "my_tool")
Expand Down Expand Up @@ -274,7 +272,7 @@ def test_anthropic_with_structured_output() -> None:
model = ChatAnthropicVertex(
project=project,
location=location,
model=_ANTHROPIC_CLAUDE35_MODEL_NAME,
model=_ANTHROPIC_CLAUDE_MODEL_NAME,
)

class MyModel(BaseModel):
Expand Down Expand Up @@ -303,7 +301,7 @@ def test_anthropic_multiturn_tool_calling() -> None:
model = ChatAnthropicVertex(
project=project,
location=location,
model=_ANTHROPIC_CLAUDE35_MODEL_NAME,
model=_ANTHROPIC_CLAUDE_MODEL_NAME,
)

@tool
Expand Down Expand Up @@ -349,7 +347,7 @@ def test_anthropic_tool_error_handling() -> None:
model = ChatAnthropicVertex(
project=project,
location=location,
model=_ANTHROPIC_CLAUDE35_MODEL_NAME,
model=_ANTHROPIC_CLAUDE_MODEL_NAME,
)

@tool
Expand Down