1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import pytest
15
+ from google .genai import types
16
+
17
+ from google .adk .code_executors .built_in_code_executor import BuiltInCodeExecutor
18
+ from google .adk .models .llm_request import LlmRequest
19
+
20
+
21
+ @pytest .fixture
22
+ def built_in_executor () -> BuiltInCodeExecutor :
23
+ return BuiltInCodeExecutor ()
24
+
25
+
26
+ def test_process_llm_request_gemini_2_model_config_none (
27
+ built_in_executor : BuiltInCodeExecutor ,
28
+ ):
29
+ """Tests processing when llm_request.config is None for Gemini 2."""
30
+ llm_request = LlmRequest (model = "gemini-2.0-flash" )
31
+ built_in_executor .process_llm_request (llm_request )
32
+ assert llm_request .config is not None
33
+ assert llm_request .config .tools == [
34
+ types .Tool (code_execution = types .ToolCodeExecution ())
35
+ ]
36
+
37
+
38
+ def test_process_llm_request_gemini_2_model_tools_none (
39
+ built_in_executor : BuiltInCodeExecutor ,
40
+ ):
41
+ """Tests processing when llm_request.config.tools is None for Gemini 2."""
42
+ llm_request = LlmRequest (
43
+ model = "gemini-2.0-pro" , config = types .GenerateContentConfig ()
44
+ )
45
+ built_in_executor .process_llm_request (llm_request )
46
+ assert llm_request .config .tools == [
47
+ types .Tool (code_execution = types .ToolCodeExecution ())
48
+ ]
49
+
50
+
51
+ def test_process_llm_request_gemini_2_model_tools_empty (
52
+ built_in_executor : BuiltInCodeExecutor ,
53
+ ):
54
+ """Tests processing when llm_request.config.tools is empty for Gemini 2."""
55
+ llm_request = LlmRequest (
56
+ model = "gemini-2.0-ultra" ,
57
+ config = types .GenerateContentConfig (tools = []),
58
+ )
59
+ built_in_executor .process_llm_request (llm_request )
60
+ assert llm_request .config .tools == [
61
+ types .Tool (code_execution = types .ToolCodeExecution ())
62
+ ]
63
+
64
+
65
+ def test_process_llm_request_gemini_2_model_with_existing_tools (
66
+ built_in_executor : BuiltInCodeExecutor ,
67
+ ):
68
+ """Tests processing when llm_request.config.tools already has tools for Gemini 2."""
69
+ existing_tool = types .Tool (
70
+ function_declarations = [
71
+ types .FunctionDeclaration (name = "test_func" , description = "A test func" )
72
+ ]
73
+ )
74
+ llm_request = LlmRequest (
75
+ model = "gemini-2.0-flash-001" ,
76
+ config = types .GenerateContentConfig (tools = [existing_tool ]),
77
+ )
78
+ built_in_executor .process_llm_request (llm_request )
79
+ assert len (llm_request .config .tools ) == 2
80
+ assert existing_tool in llm_request .config .tools
81
+ assert types .Tool (
82
+ code_execution = types .ToolCodeExecution ()
83
+ ) in llm_request .config .tools
84
+
85
+
86
+ def test_process_llm_request_non_gemini_2_model (
87
+ built_in_executor : BuiltInCodeExecutor ,
88
+ ):
89
+ """Tests that a ValueError is raised for non-Gemini 2 models."""
90
+ llm_request = LlmRequest (model = "gemini-1.5-flash" )
91
+ with pytest .raises (ValueError ) as excinfo :
92
+ built_in_executor .process_llm_request (llm_request )
93
+ assert (
94
+ "Gemini code execution tool is not supported for model gemini-1.5-flash"
95
+ in str (excinfo .value )
96
+ )
97
+
98
+
99
+ def test_process_llm_request_no_model_name (
100
+ built_in_executor : BuiltInCodeExecutor ,
101
+ ):
102
+ """Tests that a ValueError is raised if model name is not set."""
103
+ llm_request = LlmRequest () # Model name defaults to None
104
+ with pytest .raises (ValueError ) as excinfo :
105
+ built_in_executor .process_llm_request (llm_request )
106
+ assert (
107
+ "Gemini code execution tool is not supported for model None"
108
+ in str (excinfo .value )
109
+ )
0 commit comments