From b92c61f1ac70cf3be91d7a01ffff24cf23ff0858 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Thu, 30 Oct 2025 18:19:55 -0400 Subject: [PATCH 1/3] tests(vertex): use new claude model --- .../integration_tests/test_anthropic_cache.py | 10 +++----- .../integration_tests/test_anthropic_files.py | 6 ++--- .../integration_tests/test_model_garden.py | 24 +++++++++---------- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/libs/vertexai/tests/integration_tests/test_anthropic_cache.py b/libs/vertexai/tests/integration_tests/test_anthropic_cache.py index 8385f129a..679d6d449 100644 --- a/libs/vertexai/tests/integration_tests/test_anthropic_cache.py +++ b/libs/vertexai/tests/integration_tests/test_anthropic_cache.py @@ -27,9 +27,7 @@ def test_anthropic_system_cache() -> None: ) message = HumanMessage(content="Hello! What can you do for me?") - response = model.invoke( - [context, message], model_name="claude-3-5-sonnet-v2@20241022" - ) + response = model.invoke([context, message], model_name="claude-sonnet-4-5@20250929") assert isinstance(response, AIMessage) assert isinstance(response.content, str) assert "usage_metadata" in response.additional_kwargs @@ -66,9 +64,7 @@ def test_anthropic_mixed_cache() -> None: ] ) - response = model.invoke( - [context, message], model_name="claude-3-5-sonnet-v2@20241022" - ) + response = model.invoke([context, message], model_name="claude-sonnet-4-5@20250929") assert isinstance(response, AIMessage) assert isinstance(response.content, str) assert "usage_metadata" in response.additional_kwargs @@ -112,7 +108,7 @@ def test_anthropic_conversation_cache() -> None: ), ] - response = model.invoke(messages, model_name="claude-3-5-sonnet-v2@20241022") + response = model.invoke(messages, model_name="claude-sonnet-4-5@20250929") assert isinstance(response, AIMessage) assert isinstance(response.content, str) assert "peter" in response.content.lower() # Should remember the name diff --git a/libs/vertexai/tests/integration_tests/test_anthropic_files.py b/libs/vertexai/tests/integration_tests/test_anthropic_files.py index 261ee720a..86e8cde04 100644 --- a/libs/vertexai/tests/integration_tests/test_anthropic_files.py +++ b/libs/vertexai/tests/integration_tests/test_anthropic_files.py @@ -13,7 +13,7 @@ def test_pdf_gcs_uri() -> None: gcs_uri = "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf" llm = ChatAnthropicVertex( - model="claude-3-5-sonnet-v2@20241022", + model="claude-sonnet-4-5@20250929", location="us-east5", temperature=0.8, project=os.environ["PROJECT_ID"], @@ -37,7 +37,7 @@ def test_pdf_gcs_uri() -> None: def test_pdf_byts() -> None: gcs_uri = "gs://cloud-samples-data/generative-ai/pdf/2403.05530.pdf" llm = ChatAnthropicVertex( - model="claude-3-5-sonnet-v2@20241022", + model="claude-sonnet-4-5@20250929", location="us-east5", temperature=0.8, project=os.environ["PROJECT_ID"], @@ -64,7 +64,7 @@ def test_https_image() -> None: uri = "https://picsum.photos/seed/picsum/200/300.jpg" llm = ChatAnthropicVertex( - model="claude-3-5-sonnet-v2@20241022", + model="claude-sonnet-4-5@20250929", location="us-east5", temperature=0.8, project=os.environ["PROJECT_ID"], diff --git a/libs/vertexai/tests/integration_tests/test_model_garden.py b/libs/vertexai/tests/integration_tests/test_model_garden.py index cd09ade8b..f1d7f6841 100644 --- a/libs/vertexai/tests/integration_tests/test_model_garden.py +++ b/libs/vertexai/tests/integration_tests/test_model_garden.py @@ -21,7 +21,7 @@ ) _ANTHROPIC_LOCATION = "us-east5" -_ANTHROPIC_CLAUDE35_MODEL_NAME = "claude-3-5-sonnet-v2@20241022" +_ANTHROPIC_CLAUDE_MODEL_NAME = "claude-sonnet-4-5@20250929" @pytest.mark.extended @@ -123,9 +123,7 @@ def test_anthropic() -> None: ) context = SystemMessage(content=raw_context) message = HumanMessage(content=question) - response = model.invoke( - [context, message], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME - ) + response = model.invoke([context, message], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME) assert isinstance(response, AIMessage) assert isinstance(response.content, str) @@ -142,7 +140,7 @@ def test_anthropic_stream() -> None: "Hello, could you recommend a good movie for me to watch this evening, please?" ) message = HumanMessage(content=question) - sync_response = model.stream([message], model=_ANTHROPIC_CLAUDE35_MODEL_NAME) + sync_response = model.stream([message], model=_ANTHROPIC_CLAUDE_MODEL_NAME) for chunk in sync_response: assert isinstance(chunk, AIMessageChunk) @@ -166,7 +164,7 @@ def test_anthropic_thinking_stream() -> None: "Hello, could you recommend a good movie for me to watch this evening, please?" ) message = HumanMessage(content=question) - sync_response = model.stream([message], model="claude-3-7-sonnet@20250219") + sync_response = model.stream([message], model="claude-sonnet-4-5@20250929") for chunk in sync_response: assert isinstance(chunk, AIMessageChunk) @@ -189,7 +187,7 @@ async def test_anthropic_async() -> None: context = SystemMessage(content=raw_context) message = HumanMessage(content=question) response = await model.ainvoke( - [context, message], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME, temperature=0.2 + [context, message], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME, temperature=0.2 ) assert isinstance(response, AIMessage) assert isinstance(response.content, str) @@ -223,7 +221,7 @@ class MyModel(BaseModel): # Test .bind_tools with BaseModel message = HumanMessage(content="My name is Erick and I am 27 years old") model_with_tools = model.bind_tools( - [MyModel], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME + [MyModel], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME ) response = model_with_tools.invoke([message]) _check_tool_calls(response, "MyModel") @@ -233,7 +231,7 @@ def my_model(name: str, age: int) -> None: """Invoke this with names and ages.""" model_with_tools = model.bind_tools( - [my_model], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME + [my_model], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME ) response = model_with_tools.invoke([message]) _check_tool_calls(response, "my_model") @@ -244,7 +242,7 @@ def my_tool(name: str, age: int) -> None: """Invoke this with names and ages.""" model_with_tools = model.bind_tools( - [my_tool], model_name=_ANTHROPIC_CLAUDE35_MODEL_NAME + [my_tool], model_name=_ANTHROPIC_CLAUDE_MODEL_NAME ) response = model_with_tools.invoke([message]) _check_tool_calls(response, "my_tool") @@ -274,7 +272,7 @@ def test_anthropic_with_structured_output() -> None: model = ChatAnthropicVertex( project=project, location=location, - model=_ANTHROPIC_CLAUDE35_MODEL_NAME, + model=_ANTHROPIC_CLAUDE_MODEL_NAME, ) class MyModel(BaseModel): @@ -303,7 +301,7 @@ def test_anthropic_multiturn_tool_calling() -> None: model = ChatAnthropicVertex( project=project, location=location, - model=_ANTHROPIC_CLAUDE35_MODEL_NAME, + model=_ANTHROPIC_CLAUDE_MODEL_NAME, ) @tool @@ -349,7 +347,7 @@ def test_anthropic_tool_error_handling() -> None: model = ChatAnthropicVertex( project=project, location=location, - model=_ANTHROPIC_CLAUDE35_MODEL_NAME, + model=_ANTHROPIC_CLAUDE_MODEL_NAME, ) @tool From 5360272b1f9128b6d1e7386f70fef536798de1da Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Fri, 31 Oct 2025 15:06:47 -0400 Subject: [PATCH 2/3] bump From 576cc4011297177831d9710950eb2c8b8987eca1 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Mon, 3 Nov 2025 09:41:51 -0500 Subject: [PATCH 3/3] bump