Skip to content

Commit 33f61b9

Browse files
authored
google gemini (oai) support (#95)
1 parent 66173ac commit 33f61b9

File tree

8 files changed

+55
-7
lines changed

8 files changed

+55
-7
lines changed

src/mcp_agent/config.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,19 @@ class DeepSeekSettings(BaseModel):
139139
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
140140

141141

142+
class GoogleSettings(BaseModel):
143+
"""
144+
Settings for using OpenAI models in the fast-agent application.
145+
"""
146+
147+
api_key: str | None = None
148+
# reasoning_effort: Literal["low", "medium", "high"] = "medium"
149+
150+
base_url: str | None = None
151+
152+
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
153+
154+
142155
class GenericSettings(BaseModel):
143156
"""
144157
Settings for using OpenAI models in the fast-agent application.
@@ -264,6 +277,9 @@ class Settings(BaseSettings):
264277
deepseek: DeepSeekSettings | None = None
265278
"""Settings for using DeepSeek models in the fast-agent application"""
266279

280+
google: GoogleSettings | None = None
281+
"""Settings for using DeepSeek models in the fast-agent application"""
282+
267283
openrouter: OpenRouterSettings | None = None
268284
"""Settings for using OpenRouter models in the fast-agent application"""
269285

src/mcp_agent/llm/model_factory.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from mcp_agent.llm.providers.augmented_llm_anthropic import AnthropicAugmentedLLM
1313
from mcp_agent.llm.providers.augmented_llm_deepseek import DeepSeekAugmentedLLM
1414
from mcp_agent.llm.providers.augmented_llm_generic import GenericAugmentedLLM
15+
from mcp_agent.llm.providers.augmented_llm_google import GoogleAugmentedLLM
1516
from mcp_agent.llm.providers.augmented_llm_openai import OpenAIAugmentedLLM
1617
from mcp_agent.llm.providers.augmented_llm_openrouter import OpenRouterAugmentedLLM
1718
from mcp_agent.mcp.interfaces import AugmentedLLMProtocol
@@ -107,6 +108,7 @@ class ModelFactory:
107108
Provider.FAST_AGENT: PassthroughLLM,
108109
Provider.DEEPSEEK: DeepSeekAugmentedLLM,
109110
Provider.GENERIC: GenericAugmentedLLM,
111+
Provider.GOOGLE: GoogleAugmentedLLM, # type: ignore
110112
Provider.OPENROUTER: OpenRouterAugmentedLLM,
111113
}
112114

src/mcp_agent/llm/provider_key_manager.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"anthropic": "ANTHROPIC_API_KEY",
1515
"openai": "OPENAI_API_KEY",
1616
"deepseek": "DEEPSEEK_API_KEY",
17+
"google": "GOOGLE_API_KEY",
1718
"openrouter": "OPENROUTER_API_KEY",
1819
"generic": "GENERIC_API_KEY",
1920
}

src/mcp_agent/llm/provider_types.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ class Provider(Enum):
1111
ANTHROPIC = "anthropic"
1212
OPENAI = "openai"
1313
FAST_AGENT = "fast-agent"
14+
GOOGLE = "google"
1415
DEEPSEEK = "deepseek"
1516
GENERIC = "generic"
16-
OPENROUTER = "openrouter"
17+
OPENROUTER = "openrouter"

src/mcp_agent/llm/providers/augmented_llm_deepseek.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
from mcp_agent.core.request_params import RequestParams
32
from mcp_agent.llm.provider_types import Provider
43
from mcp_agent.llm.providers.augmented_llm_openai import OpenAIAugmentedLLM
@@ -9,7 +8,6 @@
98

109
class DeepSeekAugmentedLLM(OpenAIAugmentedLLM):
1110
def __init__(self, *args, **kwargs) -> None:
12-
kwargs["provider_name"] = "Deepseek" # Set provider name in kwargs
1311
super().__init__(
1412
*args, provider=Provider.DEEPSEEK, **kwargs
1513
) # Properly pass args and kwargs to parent
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from mcp_agent.core.request_params import RequestParams
2+
from mcp_agent.llm.provider_types import Provider
3+
from mcp_agent.llm.providers.augmented_llm_openai import OpenAIAugmentedLLM
4+
5+
GOOGLE_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai"
6+
DEFAULT_GOOGLE_MODEL = "gemini-2.0-flash"
7+
8+
9+
class GoogleAugmentedLLM(OpenAIAugmentedLLM):
10+
def __init__(self, *args, **kwargs) -> None:
11+
super().__init__(*args, provider=Provider.GOOGLE, **kwargs)
12+
13+
def _initialize_default_params(self, kwargs: dict) -> RequestParams:
14+
"""Initialize Google OpenAI Compatibility default parameters"""
15+
chosen_model = kwargs.get("model", DEFAULT_GOOGLE_MODEL)
16+
17+
return RequestParams(
18+
model=chosen_model,
19+
systemPrompt=self.instruction,
20+
parallel_tool_calls=False,
21+
max_iterations=10,
22+
use_history=True,
23+
)
24+
25+
def _base_url(self) -> str:
26+
base_url = None
27+
if self.context.config and self.context.config.google:
28+
base_url = self.context.config.google.base_url
29+
30+
return base_url if base_url else GOOGLE_BASE_URL

tests/e2e/smoke/test_e2e_smoke.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
# "generic.qwen2.5:latest",
2828
"generic.llama3.2:latest",
2929
"openrouter.google/gemini-2.0-flash-001",
30+
"google.gemini-2.0-flash",
3031
],
3132
)
3233
async def test_basic_textual_prompting(fast_agent, model_name):
@@ -260,6 +261,7 @@ async def agent_function():
260261
"gpt-4.1",
261262
"gpt-4.1-nano",
262263
"gpt-4.1-mini",
264+
"google.gemini-2.0-flash",
263265
],
264266
)
265267
async def test_basic_tool_calling(fast_agent, model_name):

tests/e2e/structured/test_structured_outputs.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,7 @@ async def create_structured():
125125
# "haiku", -- anthropic do not support structured outputs this way
126126
"gpt-4.1-mini",
127127
"openrouter.google/gemini-2.0-flash-001",
128+
"google.gemini-2.0-flash",
128129
],
129130
)
130131
async def test_structured_output_with_response_format_overriden(fast_agent, model_name):
@@ -156,10 +157,7 @@ async def create_structured():
156157
@pytest.mark.e2e
157158
@pytest.mark.parametrize(
158159
"model_name",
159-
[
160-
"gpt-4.1-mini",
161-
"haiku",
162-
],
160+
["gpt-4.1-mini", "haiku", "google.gemini-2.0-flash"],
163161
)
164162
async def test_history_management_with_structured(fast_agent, model_name):
165163
"""Test that the agent can generate structured response with response_format_specified."""

0 commit comments

Comments
 (0)