From 5611cfc2cf50021a93117171df48d3e7d26385b6 Mon Sep 17 00:00:00 2001 From: PR Bot Date: Sun, 22 Mar 2026 18:15:11 +0800 Subject: [PATCH] feat: add MiniMax as first-class LLM provider with preset system MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a provider dropdown to the LLM settings page so users can pick MiniMax, OpenAI, DeepSeek, or Ollama and have the endpoint URL, model list, and API-key hint auto-filled. Custom endpoints still work via the "Custom endpoint" option. - New `abogen/llm_providers.py` — `LLMProviderPreset` dataclass and built-in presets (MiniMax, OpenAI, DeepSeek, Ollama) - Settings UI: provider ` + + {% for preset in llm_provider_presets %} + + {% endfor %} + +

Pick a cloud provider to auto-fill the endpoint, or choose Custom endpoint to enter any OpenAI-compatible URL.

+
@@ -284,7 +294,7 @@

Application Settings

-

Leave blank or use ollama for local servers that do not require keys.

+

Leave blank or use ollama for local servers that do not require keys.

diff --git a/tests/test_llm_providers.py b/tests/test_llm_providers.py new file mode 100644 index 0000000..6bad80c --- /dev/null +++ b/tests/test_llm_providers.py @@ -0,0 +1,76 @@ +"""Tests for the LLM provider presets module.""" + +from __future__ import annotations + +import pytest + +from abogen.llm_providers import ( + LLMProviderPreset, + get_provider_presets, + get_provider_by_id, +) + + +def test_get_provider_presets_returns_non_empty(): + presets = get_provider_presets() + assert len(presets) >= 4 + + +def test_minimax_preset_exists(): + preset = get_provider_by_id("minimax") + assert preset is not None + assert preset.name == "MiniMax" + assert preset.base_url == "https://api.minimax.io/v1" + assert preset.api_key_env == "MINIMAX_API_KEY" + assert len(preset.models) >= 1 + assert "MiniMax-M2.7" in preset.models + + +def test_openai_preset_exists(): + preset = get_provider_by_id("openai") + assert preset is not None + assert preset.base_url == "https://api.openai.com/v1" + + +def test_ollama_preset_has_no_models(): + preset = get_provider_by_id("ollama") + assert preset is not None + assert preset.models == () + + +def test_get_provider_by_id_returns_none_for_unknown(): + assert get_provider_by_id("nonexistent") is None + assert get_provider_by_id("") is None + + +def test_preset_ids_are_unique(): + presets = get_provider_presets() + ids = [p.id for p in presets] + assert len(ids) == len(set(ids)) + + +def test_to_dict_has_required_keys(): + preset = get_provider_by_id("minimax") + d = preset.to_dict() + assert set(d.keys()) == {"id", "name", "base_url", "api_key_env", "api_key_hint", "models"} + assert isinstance(d["models"], list) + assert d["id"] == "minimax" + + +def test_preset_is_frozen(): + preset = get_provider_by_id("minimax") + with pytest.raises(AttributeError): + preset.name = "changed" + + +def test_all_presets_have_base_url(): + for preset in get_provider_presets(): + assert preset.base_url, f"Preset {preset.id!r} missing base_url" + + +def test_normalization_settings_includes_llm_provider(): + """The llm_provider key must exist in the settings defaults.""" + from abogen.normalization_settings import _SETTINGS_DEFAULTS + + assert "llm_provider" in _SETTINGS_DEFAULTS + assert _SETTINGS_DEFAULTS["llm_provider"] == "" diff --git a/tests/test_llm_providers_integration.py b/tests/test_llm_providers_integration.py new file mode 100644 index 0000000..21cb2ea --- /dev/null +++ b/tests/test_llm_providers_integration.py @@ -0,0 +1,35 @@ +"""Integration tests for the LLM provider presets in the settings pipeline.""" + +from __future__ import annotations + +from abogen.llm_providers import get_provider_presets, get_provider_by_id +from abogen.normalization_settings import ( + _extract_settings, + build_llm_configuration, +) + + +def test_extract_settings_preserves_llm_provider(): + """When llm_provider is supplied it must survive _extract_settings().""" + extracted = _extract_settings({"llm_provider": "minimax"}) + assert extracted["llm_provider"] == "minimax" + + +def test_extract_settings_defaults_llm_provider_to_empty(): + extracted = _extract_settings({}) + assert extracted["llm_provider"] == "" + + +def test_build_llm_configuration_with_minimax_preset(): + """Simulate choosing the MiniMax preset and building the LLM config.""" + preset = get_provider_by_id("minimax") + settings = _extract_settings({ + "llm_provider": preset.id, + "llm_base_url": preset.base_url, + "llm_api_key": "test-key", + "llm_model": preset.models[0], + }) + config = build_llm_configuration(settings) + assert config.base_url == "https://api.minimax.io/v1" + assert config.api_key == "test-key" + assert config.model == preset.models[0]