Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cli/planoai/config_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"mistral",
"openai",
"xiaomi",
"qianfan",
"gemini",
"anthropic",
"together_ai",
Expand Down
6 changes: 6 additions & 0 deletions cli/planoai/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@ class ProviderDefault:
base_url="https://api.deepseek.com/v1",
model_pattern="deepseek/*",
),
ProviderDefault(
name="qianfan",
env_var="QIANFAN_API_KEY",
base_url="https://qianfan.baidubce.com/v2",
model_pattern="qianfan/*",
),
ProviderDefault(
name="mistral",
env_var="MISTRAL_API_KEY",
Expand Down
18 changes: 18 additions & 0 deletions cli/test/test_config_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,24 @@ def test_validate_and_render_happy_path_agent_config(monkeypatch):
base_url: https://openrouter.ai/api/v1
passthrough_auth: true

""",
},
{
"id": "qianfan_is_supported_provider",
"expected_error": None,
"plano_config": """
version: v0.4.0

listeners:
- name: llm
type: model
port: 12000

model_providers:
- model: qianfan/*
base_url: https://qianfan.baidubce.com/v2
passthrough_auth: true

""",
},
{
Expand Down
21 changes: 21 additions & 0 deletions cli/test/test_defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def test_zero_env_vars_produces_pure_passthrough():
assert provider.get("default") is not True
# All known providers should be listed.
names = {p["name"] for p in cfg["model_providers"]}
assert "qianfan" in names
assert "digitalocean" in names
assert "vercel" in names
assert "openrouter" in names
Expand Down Expand Up @@ -80,6 +81,11 @@ def test_synthesized_config_validates_against_schema():
jsonschema.validate(cfg, _schema())


def test_synthesized_config_with_qianfan_validates_against_schema():
cfg = synthesize_default_config(env={"QIANFAN_API_KEY": "qf-1"})
jsonschema.validate(cfg, _schema())


def test_provider_defaults_digitalocean_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "digitalocean" in by_name
Expand All @@ -104,6 +110,21 @@ def test_provider_defaults_openrouter_is_configured():
assert by_name["openrouter"].model_pattern == "openrouter/*"


def test_provider_defaults_qianfan_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "qianfan" in by_name
assert by_name["qianfan"].env_var == "QIANFAN_API_KEY"
assert by_name["qianfan"].base_url == "https://qianfan.baidubce.com/v2"
assert by_name["qianfan"].model_pattern == "qianfan/*"


def test_qianfan_env_key_promotes_to_env_keyed():
cfg = synthesize_default_config(env={"QIANFAN_API_KEY": "qf-1"})
by_name = {p["name"]: p for p in cfg["model_providers"]}
assert by_name["qianfan"].get("access_key") == "$QIANFAN_API_KEY"
assert by_name["qianfan"].get("passthrough_auth") is None


def test_openrouter_env_key_promotes_to_env_keyed():
cfg = synthesize_default_config(env={"OPENROUTER_API_KEY": "or-1"})
by_name = {p["name"]: p for p in cfg["model_providers"]}
Expand Down
2 changes: 2 additions & 0 deletions config/plano_config_schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ properties:
- mistral
- openai
- xiaomi
- qianfan
- gemini
- chatgpt
- digitalocean
Expand Down Expand Up @@ -247,6 +248,7 @@ properties:
- mistral
- openai
- xiaomi
- qianfan
- gemini
- chatgpt
- digitalocean
Expand Down
12 changes: 12 additions & 0 deletions crates/common/src/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,8 @@ pub enum LlmProviderType {
OpenAI,
#[serde(rename = "xiaomi")]
Xiaomi,
#[serde(rename = "qianfan")]
Qianfan,
#[serde(rename = "gemini")]
Gemini,
#[serde(rename = "xai")]
Expand Down Expand Up @@ -412,6 +414,7 @@ impl Display for LlmProviderType {
LlmProviderType::Mistral => write!(f, "mistral"),
LlmProviderType::OpenAI => write!(f, "openai"),
LlmProviderType::Xiaomi => write!(f, "xiaomi"),
LlmProviderType::Qianfan => write!(f, "qianfan"),
LlmProviderType::XAI => write!(f, "xai"),
LlmProviderType::TogetherAI => write!(f, "together_ai"),
LlmProviderType::AzureOpenAI => write!(f, "azure_openai"),
Expand Down Expand Up @@ -783,6 +786,15 @@ mod test {
}
}

#[test]
fn test_llm_provider_type_qianfan_roundtrip() {
let parsed: LlmProviderType =
serde_yaml::from_str("qianfan").expect("variant should deserialize");
assert_eq!(parsed, LlmProviderType::Qianfan);
assert_eq!(parsed.to_string(), "qianfan");
assert_eq!(parsed.to_provider_id(), hermesllm::ProviderId::Qianfan);
}

#[test]
fn test_overrides_disable_signals_default_none() {
let overrides = super::Overrides::default();
Expand Down
20 changes: 20 additions & 0 deletions crates/hermesllm/src/clients/endpoints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,13 @@ impl SupportedAPIsFromClient {
build_endpoint("/v1", endpoint_suffix)
}
}
ProviderId::Qianfan => {
if request_path.starts_with("/v1/") {
build_endpoint("/v2", endpoint_suffix)
} else {
build_endpoint("/v1", endpoint_suffix)
}
}
ProviderId::AzureOpenAI => {
if request_path.starts_with("/v1/") {
let suffix = endpoint_suffix.trim_start_matches('/');
Expand Down Expand Up @@ -400,6 +407,19 @@ mod tests {
"/compatible-mode/v1/chat/completions"
);

// Test Qianfan provider
assert_eq!(
api.target_endpoint_for_provider(
&ProviderId::Qianfan,
"/v1/chat/completions",
"ernie-4.0-turbo-8k",
false,
None,
false
),
"/v2/chat/completions"
);

// Test Azure OpenAI provider
assert_eq!(
api.target_endpoint_for_provider(
Expand Down
42 changes: 42 additions & 0 deletions crates/hermesllm/src/providers/id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ fn load_provider_models() -> &'static HashMap<String, Vec<String>> {
pub enum ProviderId {
OpenAI,
Xiaomi,
Qianfan,
Mistral,
Deepseek,
Groq,
Expand Down Expand Up @@ -57,6 +58,8 @@ impl TryFrom<&str> for ProviderId {
match value.to_lowercase().as_str() {
"openai" => Ok(ProviderId::OpenAI),
"xiaomi" => Ok(ProviderId::Xiaomi),
"qianfan" => Ok(ProviderId::Qianfan),
"baidu" => Ok(ProviderId::Qianfan), // alias
"mistral" => Ok(ProviderId::Mistral),
"deepseek" => Ok(ProviderId::Deepseek),
"groq" => Ok(ProviderId::Groq),
Expand Down Expand Up @@ -97,6 +100,7 @@ impl ProviderId {
ProviderId::Gemini => "google",
ProviderId::OpenAI => "openai",
ProviderId::Xiaomi => "xiaomi",
ProviderId::Qianfan => "qianfan",
ProviderId::Anthropic => "anthropic",
ProviderId::Mistral => "mistralai",
ProviderId::Deepseek => "deepseek",
Expand Down Expand Up @@ -159,6 +163,7 @@ impl ProviderId {
(
ProviderId::OpenAI
| ProviderId::Xiaomi
| ProviderId::Qianfan
| ProviderId::Groq
| ProviderId::Mistral
| ProviderId::Deepseek
Expand All @@ -181,6 +186,7 @@ impl ProviderId {
(
ProviderId::OpenAI
| ProviderId::Xiaomi
| ProviderId::Qianfan
| ProviderId::Groq
| ProviderId::Mistral
| ProviderId::Deepseek
Expand Down Expand Up @@ -248,6 +254,7 @@ impl Display for ProviderId {
match self {
ProviderId::OpenAI => write!(f, "OpenAI"),
ProviderId::Xiaomi => write!(f, "xiaomi"),
ProviderId::Qianfan => write!(f, "qianfan"),
ProviderId::Mistral => write!(f, "Mistral"),
ProviderId::Deepseek => write!(f, "Deepseek"),
ProviderId::Groq => write!(f, "Groq"),
Expand Down Expand Up @@ -380,6 +387,13 @@ mod tests {
assert!(ProviderId::try_from("open_router").is_err());
}

#[test]
fn test_qianfan_parsing_and_display() {
assert_eq!(ProviderId::try_from("qianfan"), Ok(ProviderId::Qianfan));
assert_eq!(ProviderId::try_from("baidu"), Ok(ProviderId::Qianfan));
assert_eq!(ProviderId::Qianfan.to_string(), "qianfan");
}

#[test]
fn test_vercel_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
Expand Down Expand Up @@ -436,6 +450,34 @@ mod tests {
);
}

#[test]
fn test_qianfan_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};

let openai_client =
SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let upstream = ProviderId::Qianfan.compatible_api_for_client(&openai_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"Qianfan should map OpenAI client to OpenAIChatCompletions upstream"
);

let anthropic_client =
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
let upstream = ProviderId::Qianfan.compatible_api_for_client(&anthropic_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"Qianfan should translate Anthropic client to OpenAIChatCompletions upstream"
);

let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::Qianfan.compatible_api_for_client(&responses_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"Qianfan should translate Responses API client to OpenAIChatCompletions upstream"
);
}

#[test]
fn test_vercel_and_openrouter_empty_models() {
assert!(ProviderId::Vercel.models().is_empty());
Expand Down
25 changes: 25 additions & 0 deletions docs/source/concepts/llm_providers/supported_providers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -547,6 +547,31 @@ Xiaomi MiMo
- model: xiaomi/mimo-v2-omni
access_key: $MIMO_API_KEY

Baidu Qianfan
~~~~~~~~~~~~~

**Provider Prefix:** ``qianfan/``

**API Endpoint:** ``/v2/chat/completions`` through Qianfan's OpenAI-compatible API.

**Authentication:** API Key - Get your API key from `Baidu AI Cloud Qianfan <https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application>`_ and set ``QIANFAN_API_KEY``.

**Supported Chat Models:** All Qianfan chat models available through the OpenAI-compatible API, including ERNIE models and future chat model releases.

**Configuration Examples:**

.. code-block:: yaml

llm_providers:
# Configure Qianfan models with wildcard routing
- model: qianfan/*
access_key: $QIANFAN_API_KEY

# Or configure a specific ERNIE model
- model: qianfan/ernie-4.0-turbo-8k
access_key: $QIANFAN_API_KEY
default: true

Providers Requiring Base URL
----------------------------

Expand Down