From 39b09c425c723f1255cf5e9d8c0a591452b97936 Mon Sep 17 00:00:00 2001 From: lolo MD <65371299+loloMD@users.noreply.github.com> Date: Wed, 10 Dec 2025 16:26:40 -0500 Subject: [PATCH] Add new model configurations for various AI models - Introduced Qwen models: Qwen3 VL 8B Thinking and QwQ 32B with respective parameters. - Added SorcererLM 8x22B and Relace models: Relace Apply 3 and Relace Search with defined limits and costs. - Included Sao10K models: Llama 3 Euryale 70B v2.1, Llama 3 8B Lunaris, and others with specific modalities and costs. - Implemented StepFun: Step3 and Switchpoint Router models with detailed configurations. - Added Tencent: Hunyuan A13B Instruct and TheDrummer models: Anubis 70B V1.1, Cydonia 24B V4.1, and others. - Introduced TNG: DeepSeek R1T Chimera and its free version, along with additional TNG models. - Added Z.AI models: GLM 4 series with various configurations and costs. --- .../models/ai21/jamba-large-1.7.toml | 22 ++++++++++++++++++ .../models/ai21/jamba-mini-1.7.toml | 22 ++++++++++++++++++ .../models/aion-labs/aion-1.0-mini.toml | 22 ++++++++++++++++++ .../openrouter/models/aion-labs/aion-1.0.toml | 22 ++++++++++++++++++ .../aion-labs/aion-rp-llama-3.1-8b.toml | 22 ++++++++++++++++++ .../codellama-7b-instruct-solidity.toml | 22 ++++++++++++++++++ .../alibaba/tongyi-deepresearch-30b-a3b.toml | 22 ++++++++++++++++++ .../tongyi-deepresearch-30b-a3b:free.toml | 22 ++++++++++++++++++ .../allenai/olmo-2-0325-32b-instruct.toml | 22 ++++++++++++++++++ .../models/allenai/olmo-3-32b-think:free.toml | 22 ++++++++++++++++++ .../models/allenai/olmo-3-7b-instruct.toml | 22 ++++++++++++++++++ .../models/allenai/olmo-3-7b-think.toml | 22 ++++++++++++++++++ .../models/alpindale/goliath-120b.toml | 22 ++++++++++++++++++ .../models/amazon/nova-2-lite-v1.toml | 22 ++++++++++++++++++ .../models/amazon/nova-2-lite-v1:free.toml | 22 ++++++++++++++++++ .../models/amazon/nova-lite-v1.toml | 22 ++++++++++++++++++ .../models/amazon/nova-micro-v1.toml | 22 ++++++++++++++++++ .../models/amazon/nova-premier-v1.toml | 22 ++++++++++++++++++ .../openrouter/models/amazon/nova-pro-v1.toml | 22 ++++++++++++++++++ .../models/anthracite-org/magnum-v4-72b.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-3-haiku.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-3-opus.toml | 22 ++++++++++++++++++ .../anthropic/claude-3.5-haiku-20241022.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-3.5-sonnet.toml | 22 ++++++++++++++++++ .../anthropic/claude-3.7-sonnet:thinking.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-haiku-4.5.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-opus-4.1.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-opus-4.5.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-sonnet-4.5.toml | 22 ++++++++++++++++++ .../models/anthropic/claude-sonnet-4.toml | 22 ++++++++++++++++++ .../models/arcee-ai/coder-large.toml | 22 ++++++++++++++++++ .../models/arcee-ai/maestro-reasoning.toml | 22 ++++++++++++++++++ .../openrouter/models/arcee-ai/spotlight.toml | 22 ++++++++++++++++++ .../models/arcee-ai/trinity-mini.toml | 22 ++++++++++++++++++ .../models/arcee-ai/trinity-mini:free.toml | 22 ++++++++++++++++++ .../models/arcee-ai/virtuoso-large.toml | 22 ++++++++++++++++++ .../models/arliai/qwq-32b-arliai-rpr-v1.toml | 22 ++++++++++++++++++ .../baidu/ernie-4.5-21b-a3b-thinking.toml | 22 ++++++++++++++++++ .../models/baidu/ernie-4.5-21b-a3b.toml | 22 ++++++++++++++++++ .../models/baidu/ernie-4.5-300b-a47b.toml | 22 ++++++++++++++++++ .../models/baidu/ernie-4.5-vl-28b-a3b.toml | 22 ++++++++++++++++++ .../models/baidu/ernie-4.5-vl-424b-a47b.toml | 22 ++++++++++++++++++ .../models/bytedance/ui-tars-1.5-7b.toml | 22 ++++++++++++++++++ ...lphin-mistral-24b-venice-edition:free.toml | 22 ++++++++++++++++++ .../openrouter/models/cohere/command-a.toml | 22 ++++++++++++++++++ .../models/cohere/command-r-08-2024.toml | 22 ++++++++++++++++++ .../models/cohere/command-r-plus-08-2024.toml | 22 ++++++++++++++++++ .../models/cohere/command-r7b-12-2024.toml | 22 ++++++++++++++++++ .../cogito-v2-preview-llama-109b-moe.toml | 22 ++++++++++++++++++ .../cogito-v2-preview-llama-405b.toml | 22 ++++++++++++++++++ .../cogito-v2-preview-llama-70b.toml | 22 ++++++++++++++++++ .../models/deepcogito/cogito-v2.1-671b.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-chat-v3.1.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-chat.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-prover-v2.toml | 22 ++++++++++++++++++ .../deepseek/deepseek-r1-0528-qwen3-8b.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-r1-0528.toml | 22 ++++++++++++++++++ .../deepseek-r1-distill-qwen-32b.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-r1.toml | 22 ++++++++++++++++++ .../deepseek/deepseek-v3.1-terminus.toml | 22 ++++++++++++++++++ .../deepseek-v3.1-terminus:exacto.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-v3.2-exp.toml | 22 ++++++++++++++++++ .../deepseek/deepseek-v3.2-speciale.toml | 22 ++++++++++++++++++ .../models/deepseek/deepseek-v3.2.toml | 22 ++++++++++++++++++ .../models/eleutherai/llemma_7b.toml | 22 ++++++++++++++++++ .../models/essentialai/rnj-1-instruct.toml | 22 ++++++++++++++++++ .../google/gemini-2.0-flash-lite-001.toml | 22 ++++++++++++++++++ .../gemini-2.5-flash-image-preview.toml | 22 ++++++++++++++++++ .../models/google/gemini-2.5-flash-image.toml | 22 ++++++++++++++++++ ...gemini-2.5-flash-lite-preview-09-2025.toml | 22 ++++++++++++++++++ .../models/google/gemini-2.5-flash-lite.toml | 22 ++++++++++++++++++ .../gemini-2.5-flash-preview-09-2025.toml | 22 ++++++++++++++++++ .../models/google/gemini-2.5-pro-preview.toml | 22 ++++++++++++++++++ .../google/gemini-3-pro-image-preview.toml | 22 ++++++++++++++++++ .../models/google/gemini-3-pro-preview.toml | 22 ++++++++++++++++++ .../models/google/gemma-2-27b-it.toml | 22 ++++++++++++++++++ .../models/google/gemma-2-9b-it.toml | 22 ++++++++++++++++++ .../models/google/gemma-3-12b-it:free.toml | 22 ++++++++++++++++++ .../models/google/gemma-3-27b-it:free.toml | 22 ++++++++++++++++++ .../models/google/gemma-3-4b-it.toml | 22 ++++++++++++++++++ .../models/google/gemma-3-4b-it:free.toml | 22 ++++++++++++++++++ .../models/google/gemma-3n-e2b-it:free.toml | 22 ++++++++++++++++++ .../models/gryphe/mythomax-l2-13b.toml | 22 ++++++++++++++++++ .../ibm-granite/granite-4.0-h-micro.toml | 22 ++++++++++++++++++ .../models/inception/mercury-coder.toml | 22 ++++++++++++++++++ .../openrouter/models/inception/mercury.toml | 22 ++++++++++++++++++ .../models/inflection/inflection-3-pi.toml | 22 ++++++++++++++++++ .../inflection/inflection-3-productivity.toml | 22 ++++++++++++++++++ .../models/kwaipilot/kat-coder-pro:free.toml | 22 ++++++++++++++++++ .../openrouter/models/liquid/lfm-2.2-6b.toml | 22 ++++++++++++++++++ .../openrouter/models/liquid/lfm2-8b-a1b.toml | 22 ++++++++++++++++++ .../openrouter/models/mancer/weaver.toml | 22 ++++++++++++++++++ .../models/meituan/longcat-flash-chat.toml | 22 ++++++++++++++++++ .../meituan/longcat-flash-chat:free.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3-70b-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3-8b-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.1-405b-instruct.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-3.1-405b.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.1-70b-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.1-8b-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.2-1b-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.2-3b-instruct.toml | 22 ++++++++++++++++++ .../llama-3.2-3b-instruct:free.toml | 22 ++++++++++++++++++ .../llama-3.2-90b-vision-instruct.toml | 22 ++++++++++++++++++ .../meta-llama/llama-3.3-70b-instruct.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-4-maverick.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-4-scout.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-guard-2-8b.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-guard-3-8b.toml | 22 ++++++++++++++++++ .../models/meta-llama/llama-guard-4-12b.toml | 22 ++++++++++++++++++ .../models/microsoft/mai-ds-r1.toml | 22 ++++++++++++++++++ .../microsoft/phi-3-medium-128k-instruct.toml | 22 ++++++++++++++++++ .../microsoft/phi-3-mini-128k-instruct.toml | 22 ++++++++++++++++++ .../microsoft/phi-3.5-mini-128k-instruct.toml | 22 ++++++++++++++++++ .../microsoft/phi-4-multimodal-instruct.toml | 22 ++++++++++++++++++ .../microsoft/phi-4-reasoning-plus.toml | 22 ++++++++++++++++++ .../openrouter/models/microsoft/phi-4.toml | 22 ++++++++++++++++++ .../models/microsoft/wizardlm-2-8x22b.toml | 22 ++++++++++++++++++ .../openrouter/models/minimax/minimax-01.toml | 22 ++++++++++++++++++ .../openrouter/models/minimax/minimax-m1.toml | 22 ++++++++++++++++++ .../openrouter/models/minimax/minimax-m2.toml | 22 ++++++++++++++++++ .../models/mistralai/codestral-2508.toml | 22 ++++++++++++++++++ .../models/mistralai/devstral-2512.toml | 22 ++++++++++++++++++ .../models/mistralai/devstral-2512:free.toml | 22 ++++++++++++++++++ .../models/mistralai/devstral-medium.toml | 22 ++++++++++++++++++ .../models/mistralai/devstral-small.toml | 22 ++++++++++++++++++ .../models/mistralai/ministral-14b-2512.toml | 22 ++++++++++++++++++ .../models/mistralai/ministral-3b-2512.toml | 22 ++++++++++++++++++ .../models/mistralai/ministral-3b.toml | 22 ++++++++++++++++++ .../models/mistralai/ministral-8b-2512.toml | 22 ++++++++++++++++++ .../models/mistralai/ministral-8b.toml | 22 ++++++++++++++++++ .../mistralai/mistral-7b-instruct-v0.1.toml | 22 ++++++++++++++++++ .../mistralai/mistral-7b-instruct-v0.2.toml | 22 ++++++++++++++++++ .../mistralai/mistral-7b-instruct-v0.3.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-7b-instruct.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-large-2407.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-large-2411.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-large-2512.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-large.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-medium-3.1.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-medium-3.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-nemo.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-saba.toml | 22 ++++++++++++++++++ .../mistral-small-24b-instruct-2501.toml | 22 ++++++++++++++++++ .../mistral-small-3.1-24b-instruct:free.toml | 22 ++++++++++++++++++ .../models/mistralai/mistral-tiny.toml | 22 ++++++++++++++++++ .../mistralai/mixtral-8x22b-instruct.toml | 22 ++++++++++++++++++ .../mistralai/mixtral-8x7b-instruct.toml | 22 ++++++++++++++++++ .../models/mistralai/pixtral-12b.toml | 22 ++++++++++++++++++ .../models/mistralai/pixtral-large-2411.toml | 22 ++++++++++++++++++ .../mistralai/voxtral-small-24b-2507.toml | 22 ++++++++++++++++++ .../models/moonshotai/kimi-dev-72b.toml | 22 ++++++++++++++++++ .../models/moonshotai/kimi-k2-0905.toml | 22 ++++++++++++++++++ .../moonshotai/kimi-k2-0905:exacto.toml | 22 ++++++++++++++++++ .../models/moonshotai/kimi-k2-thinking.toml | 22 ++++++++++++++++++ .../models/moonshotai/kimi-k2:free.toml | 22 ++++++++++++++++++ .../kimi-linear-48b-a3b-instruct.toml | 22 ++++++++++++++++++ .../models/morph/morph-v3-fast.toml | 22 ++++++++++++++++++ .../models/morph/morph-v3-large.toml | 22 ++++++++++++++++++ .../neversleep/llama-3.1-lumimaid-8b.toml | 22 ++++++++++++++++++ .../models/neversleep/noromaid-20b.toml | 22 ++++++++++++++++++ .../nex-agi/deepseek-v3.1-nex-n1:free.toml | 22 ++++++++++++++++++ .../deephermes-3-mistral-24b-preview.toml | 22 ++++++++++++++++++ .../nousresearch/hermes-2-pro-llama-3-8b.toml | 22 ++++++++++++++++++ .../nousresearch/hermes-3-llama-3.1-405b.toml | 22 ++++++++++++++++++ .../hermes-3-llama-3.1-405b:free.toml | 22 ++++++++++++++++++ .../nousresearch/hermes-3-llama-3.1-70b.toml | 22 ++++++++++++++++++ .../models/nousresearch/hermes-4-405b.toml | 22 ++++++++++++++++++ .../models/nousresearch/hermes-4-70b.toml | 22 ++++++++++++++++++ .../llama-3.1-nemotron-70b-instruct.toml | 22 ++++++++++++++++++ .../llama-3.1-nemotron-ultra-253b-v1.toml | 22 ++++++++++++++++++ .../llama-3.3-nemotron-super-49b-v1.5.toml | 22 ++++++++++++++++++ .../nvidia/nemotron-nano-12b-v2-vl.toml | 22 ++++++++++++++++++ .../nvidia/nemotron-nano-12b-v2-vl:free.toml | 22 ++++++++++++++++++ .../models/nvidia/nemotron-nano-9b-v2.toml | 22 ++++++++++++++++++ .../nvidia/nemotron-nano-9b-v2:free.toml | 22 ++++++++++++++++++ .../models/openai/chatgpt-4o-latest.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/codex-mini.toml | 22 ++++++++++++++++++ .../models/openai/gpt-3.5-turbo-0613.toml | 22 ++++++++++++++++++ .../models/openai/gpt-3.5-turbo-16k.toml | 22 ++++++++++++++++++ .../models/openai/gpt-3.5-turbo-instruct.toml | 22 ++++++++++++++++++ .../models/openai/gpt-3.5-turbo.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-4-0314.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4-1106-preview.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4-turbo-preview.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-4-turbo.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4.1-nano.toml | 22 ++++++++++++++++++ providers/openrouter/models/openai/gpt-4.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-2024-05-13.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-2024-08-06.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-2024-11-20.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-audio-preview.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-mini-2024-07-18.toml | 22 ++++++++++++++++++ .../openai/gpt-4o-mini-search-preview.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o-search-preview.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-4o.toml | 22 ++++++++++++++++++ .../models/openai/gpt-4o:extended.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-chat.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-codex.toml | 22 ++++++++++++++++++ .../models/openai/gpt-5-image-mini.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-image.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-mini.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-nano.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5-pro.toml | 22 ++++++++++++++++++ .../models/openai/gpt-5.1-chat.toml | 22 ++++++++++++++++++ .../models/openai/gpt-5.1-codex-max.toml | 22 ++++++++++++++++++ .../models/openai/gpt-5.1-codex-mini.toml | 22 ++++++++++++++++++ .../models/openai/gpt-5.1-codex.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-5.1.toml | 22 ++++++++++++++++++ providers/openrouter/models/openai/gpt-5.toml | 22 ++++++++++++++++++ .../models/openai/gpt-oss-120b.toml | 22 ++++++++++++++++++ .../models/openai/gpt-oss-120b:exacto.toml | 22 ++++++++++++++++++ .../models/openai/gpt-oss-120b:free.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/gpt-oss-20b.toml | 22 ++++++++++++++++++ .../models/openai/gpt-oss-20b:free.toml | 22 ++++++++++++++++++ .../models/openai/gpt-oss-safeguard-20b.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/o1-pro.toml | 22 ++++++++++++++++++ providers/openrouter/models/openai/o1.toml | 22 ++++++++++++++++++ .../models/openai/o3-deep-research.toml | 22 ++++++++++++++++++ .../models/openai/o3-mini-high.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/o3-mini.toml | 22 ++++++++++++++++++ .../openrouter/models/openai/o3-pro.toml | 22 ++++++++++++++++++ providers/openrouter/models/openai/o3.toml | 22 ++++++++++++++++++ .../models/openai/o4-mini-deep-research.toml | 22 ++++++++++++++++++ .../models/openai/o4-mini-high.toml | 22 ++++++++++++++++++ .../models/opengvlab/internvl3-78b.toml | 22 ++++++++++++++++++ .../openrouter/models/openrouter/auto.toml | 22 ++++++++++++++++++ .../models/openrouter/bodybuilder.toml | 22 ++++++++++++++++++ .../perplexity/sonar-deep-research.toml | 23 +++++++++++++++++++ .../models/perplexity/sonar-pro-search.toml | 22 ++++++++++++++++++ .../models/perplexity/sonar-pro.toml | 22 ++++++++++++++++++ .../perplexity/sonar-reasoning-pro.toml | 22 ++++++++++++++++++ .../models/perplexity/sonar-reasoning.toml | 22 ++++++++++++++++++ .../openrouter/models/perplexity/sonar.toml | 22 ++++++++++++++++++ .../models/prime-intellect/intellect-3.toml | 22 ++++++++++++++++++ .../models/qwen/qwen-2.5-72b-instruct.toml | 22 ++++++++++++++++++ .../models/qwen/qwen-2.5-7b-instruct.toml | 22 ++++++++++++++++++ .../models/qwen/qwen-2.5-vl-7b-instruct.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen-max.toml | 22 ++++++++++++++++++ .../models/qwen/qwen-plus-2025-07-28.toml | 22 ++++++++++++++++++ .../qwen/qwen-plus-2025-07-28:thinking.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen-plus.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen-turbo.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen-vl-max.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen-vl-plus.toml | 22 ++++++++++++++++++ .../qwen/qwen2.5-coder-7b-instruct.toml | 22 ++++++++++++++++++ .../models/qwen/qwen2.5-vl-32b-instruct.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-14b.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-235b-a22b-2507.toml | 22 ++++++++++++++++++ .../qwen/qwen3-235b-a22b-thinking-2507.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-235b-a22b.toml | 22 ++++++++++++++++++ .../qwen/qwen3-30b-a3b-instruct-2507.toml | 22 ++++++++++++++++++ .../qwen/qwen3-30b-a3b-thinking-2507.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-30b-a3b.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-32b.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-4b:free.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-8b.toml | 22 ++++++++++++++++++ .../qwen/qwen3-coder-30b-a3b-instruct.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-coder-flash.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-coder-plus.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-coder.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-coder:exacto.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-coder:free.toml | 22 ++++++++++++++++++ .../openrouter/models/qwen/qwen3-max.toml | 22 ++++++++++++++++++ .../qwen/qwen3-next-80b-a3b-instruct.toml | 22 ++++++++++++++++++ .../qwen/qwen3-next-80b-a3b-thinking.toml | 22 ++++++++++++++++++ .../qwen/qwen3-vl-235b-a22b-instruct.toml | 22 ++++++++++++++++++ .../qwen/qwen3-vl-235b-a22b-thinking.toml | 22 ++++++++++++++++++ .../qwen/qwen3-vl-30b-a3b-instruct.toml | 22 ++++++++++++++++++ .../qwen/qwen3-vl-30b-a3b-thinking.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-vl-8b-instruct.toml | 22 ++++++++++++++++++ .../models/qwen/qwen3-vl-8b-thinking.toml | 22 ++++++++++++++++++ providers/openrouter/models/qwen/qwq-32b.toml | 22 ++++++++++++++++++ .../models/raifle/sorcererlm-8x22b.toml | 22 ++++++++++++++++++ .../models/relace/relace-apply-3.toml | 22 ++++++++++++++++++ .../models/relace/relace-search.toml | 22 ++++++++++++++++++ .../models/sao10k/l3-euryale-70b.toml | 22 ++++++++++++++++++ .../models/sao10k/l3-lunaris-8b.toml | 22 ++++++++++++++++++ .../models/sao10k/l3.1-70b-hanami-x1.toml | 22 ++++++++++++++++++ .../models/sao10k/l3.1-euryale-70b.toml | 22 ++++++++++++++++++ .../models/sao10k/l3.3-euryale-70b.toml | 22 ++++++++++++++++++ .../openrouter/models/stepfun-ai/step3.toml | 22 ++++++++++++++++++ .../openrouter/models/switchpoint/router.toml | 22 ++++++++++++++++++ .../models/tencent/hunyuan-a13b-instruct.toml | 22 ++++++++++++++++++ .../models/thedrummer/anubis-70b-v1.1.toml | 22 ++++++++++++++++++ .../models/thedrummer/cydonia-24b-v4.1.toml | 22 ++++++++++++++++++ .../models/thedrummer/rocinante-12b.toml | 22 ++++++++++++++++++ .../models/thedrummer/skyfall-36b-v2.toml | 22 ++++++++++++++++++ .../models/thedrummer/unslopnemo-12b.toml | 22 ++++++++++++++++++ .../models/thudm/glm-4.1v-9b-thinking.toml | 22 ++++++++++++++++++ .../models/tngtech/deepseek-r1t-chimera.toml | 22 ++++++++++++++++++ .../tngtech/deepseek-r1t-chimera:free.toml | 22 ++++++++++++++++++ .../models/tngtech/deepseek-r1t2-chimera.toml | 22 ++++++++++++++++++ .../models/tngtech/tng-r1t-chimera.toml | 22 ++++++++++++++++++ .../models/tngtech/tng-r1t-chimera:free.toml | 22 ++++++++++++++++++ .../models/undi95/remm-slerp-l2-13b.toml | 22 ++++++++++++++++++ .../openrouter/models/x-ai/grok-4-fast.toml | 22 ++++++++++++++++++ .../openrouter/models/x-ai/grok-4.1-fast.toml | 22 ++++++++++++++++++ .../models/x-ai/grok-code-fast-1.toml | 22 ++++++++++++++++++ .../openrouter/models/z-ai/glm-4-32b.toml | 22 ++++++++++++++++++ .../openrouter/models/z-ai/glm-4.5-air.toml | 22 ++++++++++++++++++ .../models/z-ai/glm-4.5-air:free.toml | 22 ++++++++++++++++++ providers/openrouter/models/z-ai/glm-4.5.toml | 22 ++++++++++++++++++ .../openrouter/models/z-ai/glm-4.5v.toml | 22 ++++++++++++++++++ providers/openrouter/models/z-ai/glm-4.6.toml | 22 ++++++++++++++++++ .../models/z-ai/glm-4.6:exacto.toml | 22 ++++++++++++++++++ .../openrouter/models/z-ai/glm-4.6v.toml | 22 ++++++++++++++++++ 307 files changed, 6755 insertions(+) create mode 100644 providers/openrouter/models/ai21/jamba-large-1.7.toml create mode 100644 providers/openrouter/models/ai21/jamba-mini-1.7.toml create mode 100644 providers/openrouter/models/aion-labs/aion-1.0-mini.toml create mode 100644 providers/openrouter/models/aion-labs/aion-1.0.toml create mode 100644 providers/openrouter/models/aion-labs/aion-rp-llama-3.1-8b.toml create mode 100644 providers/openrouter/models/alfredpros/codellama-7b-instruct-solidity.toml create mode 100644 providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b.toml create mode 100644 providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b:free.toml create mode 100644 providers/openrouter/models/allenai/olmo-2-0325-32b-instruct.toml create mode 100644 providers/openrouter/models/allenai/olmo-3-32b-think:free.toml create mode 100644 providers/openrouter/models/allenai/olmo-3-7b-instruct.toml create mode 100644 providers/openrouter/models/allenai/olmo-3-7b-think.toml create mode 100644 providers/openrouter/models/alpindale/goliath-120b.toml create mode 100644 providers/openrouter/models/amazon/nova-2-lite-v1.toml create mode 100644 providers/openrouter/models/amazon/nova-2-lite-v1:free.toml create mode 100644 providers/openrouter/models/amazon/nova-lite-v1.toml create mode 100644 providers/openrouter/models/amazon/nova-micro-v1.toml create mode 100644 providers/openrouter/models/amazon/nova-premier-v1.toml create mode 100644 providers/openrouter/models/amazon/nova-pro-v1.toml create mode 100644 providers/openrouter/models/anthracite-org/magnum-v4-72b.toml create mode 100644 providers/openrouter/models/anthropic/claude-3-haiku.toml create mode 100644 providers/openrouter/models/anthropic/claude-3-opus.toml create mode 100644 providers/openrouter/models/anthropic/claude-3.5-haiku-20241022.toml create mode 100644 providers/openrouter/models/anthropic/claude-3.5-sonnet.toml create mode 100644 providers/openrouter/models/anthropic/claude-3.7-sonnet:thinking.toml create mode 100644 providers/openrouter/models/anthropic/claude-haiku-4.5.toml create mode 100644 providers/openrouter/models/anthropic/claude-opus-4.1.toml create mode 100644 providers/openrouter/models/anthropic/claude-opus-4.5.toml create mode 100644 providers/openrouter/models/anthropic/claude-sonnet-4.5.toml create mode 100644 providers/openrouter/models/anthropic/claude-sonnet-4.toml create mode 100644 providers/openrouter/models/arcee-ai/coder-large.toml create mode 100644 providers/openrouter/models/arcee-ai/maestro-reasoning.toml create mode 100644 providers/openrouter/models/arcee-ai/spotlight.toml create mode 100644 providers/openrouter/models/arcee-ai/trinity-mini.toml create mode 100644 providers/openrouter/models/arcee-ai/trinity-mini:free.toml create mode 100644 providers/openrouter/models/arcee-ai/virtuoso-large.toml create mode 100644 providers/openrouter/models/arliai/qwq-32b-arliai-rpr-v1.toml create mode 100644 providers/openrouter/models/baidu/ernie-4.5-21b-a3b-thinking.toml create mode 100644 providers/openrouter/models/baidu/ernie-4.5-21b-a3b.toml create mode 100644 providers/openrouter/models/baidu/ernie-4.5-300b-a47b.toml create mode 100644 providers/openrouter/models/baidu/ernie-4.5-vl-28b-a3b.toml create mode 100644 providers/openrouter/models/baidu/ernie-4.5-vl-424b-a47b.toml create mode 100644 providers/openrouter/models/bytedance/ui-tars-1.5-7b.toml create mode 100644 providers/openrouter/models/cognitivecomputations/dolphin-mistral-24b-venice-edition:free.toml create mode 100644 providers/openrouter/models/cohere/command-a.toml create mode 100644 providers/openrouter/models/cohere/command-r-08-2024.toml create mode 100644 providers/openrouter/models/cohere/command-r-plus-08-2024.toml create mode 100644 providers/openrouter/models/cohere/command-r7b-12-2024.toml create mode 100644 providers/openrouter/models/deepcogito/cogito-v2-preview-llama-109b-moe.toml create mode 100644 providers/openrouter/models/deepcogito/cogito-v2-preview-llama-405b.toml create mode 100644 providers/openrouter/models/deepcogito/cogito-v2-preview-llama-70b.toml create mode 100644 providers/openrouter/models/deepcogito/cogito-v2.1-671b.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-chat-v3.1.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-chat.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-prover-v2.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-r1-0528-qwen3-8b.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-r1-0528.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-r1-distill-qwen-32b.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-r1.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-v3.1-terminus.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-v3.1-terminus:exacto.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-v3.2-exp.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-v3.2-speciale.toml create mode 100644 providers/openrouter/models/deepseek/deepseek-v3.2.toml create mode 100644 providers/openrouter/models/eleutherai/llemma_7b.toml create mode 100644 providers/openrouter/models/essentialai/rnj-1-instruct.toml create mode 100644 providers/openrouter/models/google/gemini-2.0-flash-lite-001.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-flash-image-preview.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-flash-image.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-flash-lite-preview-09-2025.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-flash-lite.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-flash-preview-09-2025.toml create mode 100644 providers/openrouter/models/google/gemini-2.5-pro-preview.toml create mode 100644 providers/openrouter/models/google/gemini-3-pro-image-preview.toml create mode 100644 providers/openrouter/models/google/gemini-3-pro-preview.toml create mode 100644 providers/openrouter/models/google/gemma-2-27b-it.toml create mode 100644 providers/openrouter/models/google/gemma-2-9b-it.toml create mode 100644 providers/openrouter/models/google/gemma-3-12b-it:free.toml create mode 100644 providers/openrouter/models/google/gemma-3-27b-it:free.toml create mode 100644 providers/openrouter/models/google/gemma-3-4b-it.toml create mode 100644 providers/openrouter/models/google/gemma-3-4b-it:free.toml create mode 100644 providers/openrouter/models/google/gemma-3n-e2b-it:free.toml create mode 100644 providers/openrouter/models/gryphe/mythomax-l2-13b.toml create mode 100644 providers/openrouter/models/ibm-granite/granite-4.0-h-micro.toml create mode 100644 providers/openrouter/models/inception/mercury-coder.toml create mode 100644 providers/openrouter/models/inception/mercury.toml create mode 100644 providers/openrouter/models/inflection/inflection-3-pi.toml create mode 100644 providers/openrouter/models/inflection/inflection-3-productivity.toml create mode 100644 providers/openrouter/models/kwaipilot/kat-coder-pro:free.toml create mode 100644 providers/openrouter/models/liquid/lfm-2.2-6b.toml create mode 100644 providers/openrouter/models/liquid/lfm2-8b-a1b.toml create mode 100644 providers/openrouter/models/mancer/weaver.toml create mode 100644 providers/openrouter/models/meituan/longcat-flash-chat.toml create mode 100644 providers/openrouter/models/meituan/longcat-flash-chat:free.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3-70b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3-8b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.1-405b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.1-405b.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.1-70b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.1-8b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.2-1b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.2-3b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.2-3b-instruct:free.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.2-90b-vision-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-3.3-70b-instruct.toml create mode 100644 providers/openrouter/models/meta-llama/llama-4-maverick.toml create mode 100644 providers/openrouter/models/meta-llama/llama-4-scout.toml create mode 100644 providers/openrouter/models/meta-llama/llama-guard-2-8b.toml create mode 100644 providers/openrouter/models/meta-llama/llama-guard-3-8b.toml create mode 100644 providers/openrouter/models/meta-llama/llama-guard-4-12b.toml create mode 100644 providers/openrouter/models/microsoft/mai-ds-r1.toml create mode 100644 providers/openrouter/models/microsoft/phi-3-medium-128k-instruct.toml create mode 100644 providers/openrouter/models/microsoft/phi-3-mini-128k-instruct.toml create mode 100644 providers/openrouter/models/microsoft/phi-3.5-mini-128k-instruct.toml create mode 100644 providers/openrouter/models/microsoft/phi-4-multimodal-instruct.toml create mode 100644 providers/openrouter/models/microsoft/phi-4-reasoning-plus.toml create mode 100644 providers/openrouter/models/microsoft/phi-4.toml create mode 100644 providers/openrouter/models/microsoft/wizardlm-2-8x22b.toml create mode 100644 providers/openrouter/models/minimax/minimax-01.toml create mode 100644 providers/openrouter/models/minimax/minimax-m1.toml create mode 100644 providers/openrouter/models/minimax/minimax-m2.toml create mode 100644 providers/openrouter/models/mistralai/codestral-2508.toml create mode 100644 providers/openrouter/models/mistralai/devstral-2512.toml create mode 100644 providers/openrouter/models/mistralai/devstral-2512:free.toml create mode 100644 providers/openrouter/models/mistralai/devstral-medium.toml create mode 100644 providers/openrouter/models/mistralai/devstral-small.toml create mode 100644 providers/openrouter/models/mistralai/ministral-14b-2512.toml create mode 100644 providers/openrouter/models/mistralai/ministral-3b-2512.toml create mode 100644 providers/openrouter/models/mistralai/ministral-3b.toml create mode 100644 providers/openrouter/models/mistralai/ministral-8b-2512.toml create mode 100644 providers/openrouter/models/mistralai/ministral-8b.toml create mode 100644 providers/openrouter/models/mistralai/mistral-7b-instruct-v0.1.toml create mode 100644 providers/openrouter/models/mistralai/mistral-7b-instruct-v0.2.toml create mode 100644 providers/openrouter/models/mistralai/mistral-7b-instruct-v0.3.toml create mode 100644 providers/openrouter/models/mistralai/mistral-7b-instruct.toml create mode 100644 providers/openrouter/models/mistralai/mistral-large-2407.toml create mode 100644 providers/openrouter/models/mistralai/mistral-large-2411.toml create mode 100644 providers/openrouter/models/mistralai/mistral-large-2512.toml create mode 100644 providers/openrouter/models/mistralai/mistral-large.toml create mode 100644 providers/openrouter/models/mistralai/mistral-medium-3.1.toml create mode 100644 providers/openrouter/models/mistralai/mistral-medium-3.toml create mode 100644 providers/openrouter/models/mistralai/mistral-nemo.toml create mode 100644 providers/openrouter/models/mistralai/mistral-saba.toml create mode 100644 providers/openrouter/models/mistralai/mistral-small-24b-instruct-2501.toml create mode 100644 providers/openrouter/models/mistralai/mistral-small-3.1-24b-instruct:free.toml create mode 100644 providers/openrouter/models/mistralai/mistral-tiny.toml create mode 100644 providers/openrouter/models/mistralai/mixtral-8x22b-instruct.toml create mode 100644 providers/openrouter/models/mistralai/mixtral-8x7b-instruct.toml create mode 100644 providers/openrouter/models/mistralai/pixtral-12b.toml create mode 100644 providers/openrouter/models/mistralai/pixtral-large-2411.toml create mode 100644 providers/openrouter/models/mistralai/voxtral-small-24b-2507.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-dev-72b.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-k2-0905.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-k2-0905:exacto.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-k2-thinking.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-k2:free.toml create mode 100644 providers/openrouter/models/moonshotai/kimi-linear-48b-a3b-instruct.toml create mode 100644 providers/openrouter/models/morph/morph-v3-fast.toml create mode 100644 providers/openrouter/models/morph/morph-v3-large.toml create mode 100644 providers/openrouter/models/neversleep/llama-3.1-lumimaid-8b.toml create mode 100644 providers/openrouter/models/neversleep/noromaid-20b.toml create mode 100644 providers/openrouter/models/nex-agi/deepseek-v3.1-nex-n1:free.toml create mode 100644 providers/openrouter/models/nousresearch/deephermes-3-mistral-24b-preview.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-2-pro-llama-3-8b.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b:free.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-3-llama-3.1-70b.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-4-405b.toml create mode 100644 providers/openrouter/models/nousresearch/hermes-4-70b.toml create mode 100644 providers/openrouter/models/nvidia/llama-3.1-nemotron-70b-instruct.toml create mode 100644 providers/openrouter/models/nvidia/llama-3.1-nemotron-ultra-253b-v1.toml create mode 100644 providers/openrouter/models/nvidia/llama-3.3-nemotron-super-49b-v1.5.toml create mode 100644 providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl.toml create mode 100644 providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl:free.toml create mode 100644 providers/openrouter/models/nvidia/nemotron-nano-9b-v2.toml create mode 100644 providers/openrouter/models/nvidia/nemotron-nano-9b-v2:free.toml create mode 100644 providers/openrouter/models/openai/chatgpt-4o-latest.toml create mode 100644 providers/openrouter/models/openai/codex-mini.toml create mode 100644 providers/openrouter/models/openai/gpt-3.5-turbo-0613.toml create mode 100644 providers/openrouter/models/openai/gpt-3.5-turbo-16k.toml create mode 100644 providers/openrouter/models/openai/gpt-3.5-turbo-instruct.toml create mode 100644 providers/openrouter/models/openai/gpt-3.5-turbo.toml create mode 100644 providers/openrouter/models/openai/gpt-4-0314.toml create mode 100644 providers/openrouter/models/openai/gpt-4-1106-preview.toml create mode 100644 providers/openrouter/models/openai/gpt-4-turbo-preview.toml create mode 100644 providers/openrouter/models/openai/gpt-4-turbo.toml create mode 100644 providers/openrouter/models/openai/gpt-4.1-nano.toml create mode 100644 providers/openrouter/models/openai/gpt-4.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-2024-05-13.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-2024-08-06.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-2024-11-20.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-audio-preview.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-mini-2024-07-18.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-mini-search-preview.toml create mode 100644 providers/openrouter/models/openai/gpt-4o-search-preview.toml create mode 100644 providers/openrouter/models/openai/gpt-4o.toml create mode 100644 providers/openrouter/models/openai/gpt-4o:extended.toml create mode 100644 providers/openrouter/models/openai/gpt-5-chat.toml create mode 100644 providers/openrouter/models/openai/gpt-5-codex.toml create mode 100644 providers/openrouter/models/openai/gpt-5-image-mini.toml create mode 100644 providers/openrouter/models/openai/gpt-5-image.toml create mode 100644 providers/openrouter/models/openai/gpt-5-mini.toml create mode 100644 providers/openrouter/models/openai/gpt-5-nano.toml create mode 100644 providers/openrouter/models/openai/gpt-5-pro.toml create mode 100644 providers/openrouter/models/openai/gpt-5.1-chat.toml create mode 100644 providers/openrouter/models/openai/gpt-5.1-codex-max.toml create mode 100644 providers/openrouter/models/openai/gpt-5.1-codex-mini.toml create mode 100644 providers/openrouter/models/openai/gpt-5.1-codex.toml create mode 100644 providers/openrouter/models/openai/gpt-5.1.toml create mode 100644 providers/openrouter/models/openai/gpt-5.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-120b.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-120b:exacto.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-120b:free.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-20b.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-20b:free.toml create mode 100644 providers/openrouter/models/openai/gpt-oss-safeguard-20b.toml create mode 100644 providers/openrouter/models/openai/o1-pro.toml create mode 100644 providers/openrouter/models/openai/o1.toml create mode 100644 providers/openrouter/models/openai/o3-deep-research.toml create mode 100644 providers/openrouter/models/openai/o3-mini-high.toml create mode 100644 providers/openrouter/models/openai/o3-mini.toml create mode 100644 providers/openrouter/models/openai/o3-pro.toml create mode 100644 providers/openrouter/models/openai/o3.toml create mode 100644 providers/openrouter/models/openai/o4-mini-deep-research.toml create mode 100644 providers/openrouter/models/openai/o4-mini-high.toml create mode 100644 providers/openrouter/models/opengvlab/internvl3-78b.toml create mode 100644 providers/openrouter/models/openrouter/auto.toml create mode 100644 providers/openrouter/models/openrouter/bodybuilder.toml create mode 100644 providers/openrouter/models/perplexity/sonar-deep-research.toml create mode 100644 providers/openrouter/models/perplexity/sonar-pro-search.toml create mode 100644 providers/openrouter/models/perplexity/sonar-pro.toml create mode 100644 providers/openrouter/models/perplexity/sonar-reasoning-pro.toml create mode 100644 providers/openrouter/models/perplexity/sonar-reasoning.toml create mode 100644 providers/openrouter/models/perplexity/sonar.toml create mode 100644 providers/openrouter/models/prime-intellect/intellect-3.toml create mode 100644 providers/openrouter/models/qwen/qwen-2.5-72b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen-2.5-7b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen-2.5-vl-7b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen-max.toml create mode 100644 providers/openrouter/models/qwen/qwen-plus-2025-07-28.toml create mode 100644 providers/openrouter/models/qwen/qwen-plus-2025-07-28:thinking.toml create mode 100644 providers/openrouter/models/qwen/qwen-plus.toml create mode 100644 providers/openrouter/models/qwen/qwen-turbo.toml create mode 100644 providers/openrouter/models/qwen/qwen-vl-max.toml create mode 100644 providers/openrouter/models/qwen/qwen-vl-plus.toml create mode 100644 providers/openrouter/models/qwen/qwen2.5-coder-7b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen2.5-vl-32b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-14b.toml create mode 100644 providers/openrouter/models/qwen/qwen3-235b-a22b-2507.toml create mode 100644 providers/openrouter/models/qwen/qwen3-235b-a22b-thinking-2507.toml create mode 100644 providers/openrouter/models/qwen/qwen3-235b-a22b.toml create mode 100644 providers/openrouter/models/qwen/qwen3-30b-a3b-instruct-2507.toml create mode 100644 providers/openrouter/models/qwen/qwen3-30b-a3b-thinking-2507.toml create mode 100644 providers/openrouter/models/qwen/qwen3-30b-a3b.toml create mode 100644 providers/openrouter/models/qwen/qwen3-32b.toml create mode 100644 providers/openrouter/models/qwen/qwen3-4b:free.toml create mode 100644 providers/openrouter/models/qwen/qwen3-8b.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder-30b-a3b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder-flash.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder-plus.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder:exacto.toml create mode 100644 providers/openrouter/models/qwen/qwen3-coder:free.toml create mode 100644 providers/openrouter/models/qwen/qwen3-max.toml create mode 100644 providers/openrouter/models/qwen/qwen3-next-80b-a3b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-next-80b-a3b-thinking.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-235b-a22b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-235b-a22b-thinking.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-30b-a3b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-30b-a3b-thinking.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-8b-instruct.toml create mode 100644 providers/openrouter/models/qwen/qwen3-vl-8b-thinking.toml create mode 100644 providers/openrouter/models/qwen/qwq-32b.toml create mode 100644 providers/openrouter/models/raifle/sorcererlm-8x22b.toml create mode 100644 providers/openrouter/models/relace/relace-apply-3.toml create mode 100644 providers/openrouter/models/relace/relace-search.toml create mode 100644 providers/openrouter/models/sao10k/l3-euryale-70b.toml create mode 100644 providers/openrouter/models/sao10k/l3-lunaris-8b.toml create mode 100644 providers/openrouter/models/sao10k/l3.1-70b-hanami-x1.toml create mode 100644 providers/openrouter/models/sao10k/l3.1-euryale-70b.toml create mode 100644 providers/openrouter/models/sao10k/l3.3-euryale-70b.toml create mode 100644 providers/openrouter/models/stepfun-ai/step3.toml create mode 100644 providers/openrouter/models/switchpoint/router.toml create mode 100644 providers/openrouter/models/tencent/hunyuan-a13b-instruct.toml create mode 100644 providers/openrouter/models/thedrummer/anubis-70b-v1.1.toml create mode 100644 providers/openrouter/models/thedrummer/cydonia-24b-v4.1.toml create mode 100644 providers/openrouter/models/thedrummer/rocinante-12b.toml create mode 100644 providers/openrouter/models/thedrummer/skyfall-36b-v2.toml create mode 100644 providers/openrouter/models/thedrummer/unslopnemo-12b.toml create mode 100644 providers/openrouter/models/thudm/glm-4.1v-9b-thinking.toml create mode 100644 providers/openrouter/models/tngtech/deepseek-r1t-chimera.toml create mode 100644 providers/openrouter/models/tngtech/deepseek-r1t-chimera:free.toml create mode 100644 providers/openrouter/models/tngtech/deepseek-r1t2-chimera.toml create mode 100644 providers/openrouter/models/tngtech/tng-r1t-chimera.toml create mode 100644 providers/openrouter/models/tngtech/tng-r1t-chimera:free.toml create mode 100644 providers/openrouter/models/undi95/remm-slerp-l2-13b.toml create mode 100644 providers/openrouter/models/x-ai/grok-4-fast.toml create mode 100644 providers/openrouter/models/x-ai/grok-4.1-fast.toml create mode 100644 providers/openrouter/models/x-ai/grok-code-fast-1.toml create mode 100644 providers/openrouter/models/z-ai/glm-4-32b.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.5-air.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.5-air:free.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.5.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.5v.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.6.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.6:exacto.toml create mode 100644 providers/openrouter/models/z-ai/glm-4.6v.toml diff --git a/providers/openrouter/models/ai21/jamba-large-1.7.toml b/providers/openrouter/models/ai21/jamba-large-1.7.toml new file mode 100644 index 000000000..2fe3bab7c --- /dev/null +++ b/providers/openrouter/models/ai21/jamba-large-1.7.toml @@ -0,0 +1,22 @@ +name = "AI21: Jamba Large 1.7" +release_date = "2025-08-08" +last_updated = "2025-08-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 4096 + +[cost] +input = 2.0 +output = 8.0 diff --git a/providers/openrouter/models/ai21/jamba-mini-1.7.toml b/providers/openrouter/models/ai21/jamba-mini-1.7.toml new file mode 100644 index 000000000..0ea5c7c7d --- /dev/null +++ b/providers/openrouter/models/ai21/jamba-mini-1.7.toml @@ -0,0 +1,22 @@ +name = "AI21: Jamba Mini 1.7" +release_date = "2025-08-08" +last_updated = "2025-08-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/aion-labs/aion-1.0-mini.toml b/providers/openrouter/models/aion-labs/aion-1.0-mini.toml new file mode 100644 index 000000000..db18199e1 --- /dev/null +++ b/providers/openrouter/models/aion-labs/aion-1.0-mini.toml @@ -0,0 +1,22 @@ +name = "AionLabs: Aion-1.0-Mini" +release_date = "2025-02-04" +last_updated = "2025-02-04" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32768 + +[cost] +input = 0.7 +output = 1.4 diff --git a/providers/openrouter/models/aion-labs/aion-1.0.toml b/providers/openrouter/models/aion-labs/aion-1.0.toml new file mode 100644 index 000000000..f2741f52d --- /dev/null +++ b/providers/openrouter/models/aion-labs/aion-1.0.toml @@ -0,0 +1,22 @@ +name = "AionLabs: Aion-1.0" +release_date = "2025-02-04" +last_updated = "2025-02-04" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32768 + +[cost] +input = 4.0 +output = 8.0 diff --git a/providers/openrouter/models/aion-labs/aion-rp-llama-3.1-8b.toml b/providers/openrouter/models/aion-labs/aion-rp-llama-3.1-8b.toml new file mode 100644 index 000000000..4800f1781 --- /dev/null +++ b/providers/openrouter/models/aion-labs/aion-rp-llama-3.1-8b.toml @@ -0,0 +1,22 @@ +name = "AionLabs: Aion-RP 1.0 (8B)" +release_date = "2025-02-04" +last_updated = "2025-02-04" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/alfredpros/codellama-7b-instruct-solidity.toml b/providers/openrouter/models/alfredpros/codellama-7b-instruct-solidity.toml new file mode 100644 index 000000000..618c4b145 --- /dev/null +++ b/providers/openrouter/models/alfredpros/codellama-7b-instruct-solidity.toml @@ -0,0 +1,22 @@ +name = "AlfredPros: CodeLLaMa 7B Instruct Solidity" +release_date = "2025-04-14" +last_updated = "2025-04-14" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4096 +input = 4096 +output = 4096 + +[cost] +input = 0.7999999999999999 +output = 1.2 diff --git a/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b.toml b/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b.toml new file mode 100644 index 000000000..310f0b0a6 --- /dev/null +++ b/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b.toml @@ -0,0 +1,22 @@ +name = "Tongyi DeepResearch 30B A3B" +release_date = "2025-09-18" +last_updated = "2025-09-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.09 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b:free.toml b/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b:free.toml new file mode 100644 index 000000000..c0fd55bb0 --- /dev/null +++ b/providers/openrouter/models/alibaba/tongyi-deepresearch-30b-a3b:free.toml @@ -0,0 +1,22 @@ +name = "Tongyi DeepResearch 30B A3B (free)" +release_date = "2025-09-18" +last_updated = "2025-09-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/allenai/olmo-2-0325-32b-instruct.toml b/providers/openrouter/models/allenai/olmo-2-0325-32b-instruct.toml new file mode 100644 index 000000000..70cc9a05a --- /dev/null +++ b/providers/openrouter/models/allenai/olmo-2-0325-32b-instruct.toml @@ -0,0 +1,22 @@ +name = "AllenAI: Olmo 2 32B Instruct" +release_date = "2025-03-14" +last_updated = "2025-03-14" +open_weights = true +tool_call = false +structured_output = false +temperature = false +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.049999999999999996 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/allenai/olmo-3-32b-think:free.toml b/providers/openrouter/models/allenai/olmo-3-32b-think:free.toml new file mode 100644 index 000000000..79eda022d --- /dev/null +++ b/providers/openrouter/models/allenai/olmo-3-32b-think:free.toml @@ -0,0 +1,22 @@ +name = "AllenAI: Olmo 3 32B Think (free)" +release_date = "2025-11-21" +last_updated = "2025-11-21" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 65536 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/allenai/olmo-3-7b-instruct.toml b/providers/openrouter/models/allenai/olmo-3-7b-instruct.toml new file mode 100644 index 000000000..c04dd5b61 --- /dev/null +++ b/providers/openrouter/models/allenai/olmo-3-7b-instruct.toml @@ -0,0 +1,22 @@ +name = "AllenAI: Olmo 3 7B Instruct" +release_date = "2025-11-21" +last_updated = "2025-11-21" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 65536 + +[cost] +input = 0.09999999999999999 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/allenai/olmo-3-7b-think.toml b/providers/openrouter/models/allenai/olmo-3-7b-think.toml new file mode 100644 index 000000000..b2c9421a8 --- /dev/null +++ b/providers/openrouter/models/allenai/olmo-3-7b-think.toml @@ -0,0 +1,22 @@ +name = "AllenAI: Olmo 3 7B Think" +release_date = "2025-11-21" +last_updated = "2025-11-21" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 65536 + +[cost] +input = 0.12 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/alpindale/goliath-120b.toml b/providers/openrouter/models/alpindale/goliath-120b.toml new file mode 100644 index 000000000..20d8a88d1 --- /dev/null +++ b/providers/openrouter/models/alpindale/goliath-120b.toml @@ -0,0 +1,22 @@ +name = "Goliath 120B" +release_date = "2023-11-09" +last_updated = "2023-11-09" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 6144 +input = 6144 +output = 1024 + +[cost] +input = 6.0 +output = 8.0 diff --git a/providers/openrouter/models/amazon/nova-2-lite-v1.toml b/providers/openrouter/models/amazon/nova-2-lite-v1.toml new file mode 100644 index 000000000..2c583c923 --- /dev/null +++ b/providers/openrouter/models/amazon/nova-2-lite-v1.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova 2 Lite" +release_date = "2025-12-02" +last_updated = "2025-12-02" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "video", "file",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 65535 + +[cost] +input = 0.3 +output = 2.5 diff --git a/providers/openrouter/models/amazon/nova-2-lite-v1:free.toml b/providers/openrouter/models/amazon/nova-2-lite-v1:free.toml new file mode 100644 index 000000000..3c349c928 --- /dev/null +++ b/providers/openrouter/models/amazon/nova-2-lite-v1:free.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova 2 Lite (free)" +release_date = "2025-12-02" +last_updated = "2025-12-02" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "video", "file",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 65535 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/amazon/nova-lite-v1.toml b/providers/openrouter/models/amazon/nova-lite-v1.toml new file mode 100644 index 000000000..0d7d5ef24 --- /dev/null +++ b/providers/openrouter/models/amazon/nova-lite-v1.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova Lite 1.0" +release_date = "2024-12-05" +last_updated = "2024-12-05" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 300000 +input = 300000 +output = 5120 + +[cost] +input = 0.06 +output = 0.24 diff --git a/providers/openrouter/models/amazon/nova-micro-v1.toml b/providers/openrouter/models/amazon/nova-micro-v1.toml new file mode 100644 index 000000000..93da0ed6d --- /dev/null +++ b/providers/openrouter/models/amazon/nova-micro-v1.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova Micro 1.0" +release_date = "2024-12-05" +last_updated = "2024-12-05" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 5120 + +[cost] +input = 0.035 +output = 0.14 diff --git a/providers/openrouter/models/amazon/nova-premier-v1.toml b/providers/openrouter/models/amazon/nova-premier-v1.toml new file mode 100644 index 000000000..9cf0be77c --- /dev/null +++ b/providers/openrouter/models/amazon/nova-premier-v1.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova Premier 1.0" +release_date = "2025-10-31" +last_updated = "2025-10-31" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 32000 + +[cost] +input = 2.5 +output = 12.5 diff --git a/providers/openrouter/models/amazon/nova-pro-v1.toml b/providers/openrouter/models/amazon/nova-pro-v1.toml new file mode 100644 index 000000000..f975f924c --- /dev/null +++ b/providers/openrouter/models/amazon/nova-pro-v1.toml @@ -0,0 +1,22 @@ +name = "Amazon: Nova Pro 1.0" +release_date = "2024-12-05" +last_updated = "2024-12-05" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 300000 +input = 300000 +output = 5120 + +[cost] +input = 0.7999999999999999 +output = 3.1999999999999997 diff --git a/providers/openrouter/models/anthracite-org/magnum-v4-72b.toml b/providers/openrouter/models/anthracite-org/magnum-v4-72b.toml new file mode 100644 index 000000000..a4c951d97 --- /dev/null +++ b/providers/openrouter/models/anthracite-org/magnum-v4-72b.toml @@ -0,0 +1,22 @@ +name = "Magnum v4 72B" +release_date = "2024-10-21" +last_updated = "2024-10-21" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16384 +input = 16384 +output = 2048 + +[cost] +input = 3.0 +output = 5.0 diff --git a/providers/openrouter/models/anthropic/claude-3-haiku.toml b/providers/openrouter/models/anthropic/claude-3-haiku.toml new file mode 100644 index 000000000..dfc0e9e2b --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-3-haiku.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude 3 Haiku" +release_date = "2024-03-12" +last_updated = "2024-03-12" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 4096 + +[cost] +input = 0.25 +output = 1.25 diff --git a/providers/openrouter/models/anthropic/claude-3-opus.toml b/providers/openrouter/models/anthropic/claude-3-opus.toml new file mode 100644 index 000000000..463eeed49 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-3-opus.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude 3 Opus" +release_date = "2024-03-04" +last_updated = "2024-03-04" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 4096 + +[cost] +input = 15.0 +output = 75.0 diff --git a/providers/openrouter/models/anthropic/claude-3.5-haiku-20241022.toml b/providers/openrouter/models/anthropic/claude-3.5-haiku-20241022.toml new file mode 100644 index 000000000..483a2b94c --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-3.5-haiku-20241022.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude 3.5 Haiku (2024-10-22)" +release_date = "2024-11-03" +last_updated = "2024-11-03" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 8192 + +[cost] +input = 0.7999999999999999 +output = 4.0 diff --git a/providers/openrouter/models/anthropic/claude-3.5-sonnet.toml b/providers/openrouter/models/anthropic/claude-3.5-sonnet.toml new file mode 100644 index 000000000..2beaa8bc4 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-3.5-sonnet.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude 3.5 Sonnet" +release_date = "2024-10-21" +last_updated = "2024-10-21" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 8192 + +[cost] +input = 6.0 +output = 30.0 diff --git a/providers/openrouter/models/anthropic/claude-3.7-sonnet:thinking.toml b/providers/openrouter/models/anthropic/claude-3.7-sonnet:thinking.toml new file mode 100644 index 000000000..2602420db --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-3.7-sonnet:thinking.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude 3.7 Sonnet (thinking)" +release_date = "2025-02-24" +last_updated = "2025-02-24" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 64000 + +[cost] +input = 3.0 +output = 15.0 diff --git a/providers/openrouter/models/anthropic/claude-haiku-4.5.toml b/providers/openrouter/models/anthropic/claude-haiku-4.5.toml new file mode 100644 index 000000000..24fa70e73 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-haiku-4.5.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude Haiku 4.5" +release_date = "2025-10-15" +last_updated = "2025-10-15" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 64000 + +[cost] +input = 1.0 +output = 5.0 diff --git a/providers/openrouter/models/anthropic/claude-opus-4.1.toml b/providers/openrouter/models/anthropic/claude-opus-4.1.toml new file mode 100644 index 000000000..353bbbcc8 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-opus-4.1.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude Opus 4.1" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 32000 + +[cost] +input = 15.0 +output = 75.0 diff --git a/providers/openrouter/models/anthropic/claude-opus-4.5.toml b/providers/openrouter/models/anthropic/claude-opus-4.5.toml new file mode 100644 index 000000000..e390299fa --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-opus-4.5.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude Opus 4.5" +release_date = "2025-11-24" +last_updated = "2025-11-24" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "file", "image", "text",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 32000 + +[cost] +input = 5.0 +output = 25.0 diff --git a/providers/openrouter/models/anthropic/claude-sonnet-4.5.toml b/providers/openrouter/models/anthropic/claude-sonnet-4.5.toml new file mode 100644 index 000000000..cd1279438 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-sonnet-4.5.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude Sonnet 4.5" +release_date = "2025-09-29" +last_updated = "2025-09-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 64000 + +[cost] +input = 3.0 +output = 15.0 diff --git a/providers/openrouter/models/anthropic/claude-sonnet-4.toml b/providers/openrouter/models/anthropic/claude-sonnet-4.toml new file mode 100644 index 000000000..b9c66bbe7 --- /dev/null +++ b/providers/openrouter/models/anthropic/claude-sonnet-4.toml @@ -0,0 +1,22 @@ +name = "Anthropic: Claude Sonnet 4" +release_date = "2025-05-22" +last_updated = "2025-05-22" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 64000 + +[cost] +input = 3.0 +output = 15.0 diff --git a/providers/openrouter/models/arcee-ai/coder-large.toml b/providers/openrouter/models/arcee-ai/coder-large.toml new file mode 100644 index 000000000..bba56ccf4 --- /dev/null +++ b/providers/openrouter/models/arcee-ai/coder-large.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Coder Large" +release_date = "2025-05-05" +last_updated = "2025-05-05" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.5 +output = 0.7999999999999999 diff --git a/providers/openrouter/models/arcee-ai/maestro-reasoning.toml b/providers/openrouter/models/arcee-ai/maestro-reasoning.toml new file mode 100644 index 000000000..b4771b1d8 --- /dev/null +++ b/providers/openrouter/models/arcee-ai/maestro-reasoning.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Maestro Reasoning" +release_date = "2025-05-05" +last_updated = "2025-05-05" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32000 + +[cost] +input = 0.8999999999999999 +output = 3.3000000000000003 diff --git a/providers/openrouter/models/arcee-ai/spotlight.toml b/providers/openrouter/models/arcee-ai/spotlight.toml new file mode 100644 index 000000000..e4dd16c87 --- /dev/null +++ b/providers/openrouter/models/arcee-ai/spotlight.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Spotlight" +release_date = "2025-05-05" +last_updated = "2025-05-05" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 65537 + +[cost] +input = 0.18 +output = 0.18 diff --git a/providers/openrouter/models/arcee-ai/trinity-mini.toml b/providers/openrouter/models/arcee-ai/trinity-mini.toml new file mode 100644 index 000000000..1288d0f35 --- /dev/null +++ b/providers/openrouter/models/arcee-ai/trinity-mini.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Trinity Mini" +release_date = "2025-12-01" +last_updated = "2025-12-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.045 +output = 0.15 diff --git a/providers/openrouter/models/arcee-ai/trinity-mini:free.toml b/providers/openrouter/models/arcee-ai/trinity-mini:free.toml new file mode 100644 index 000000000..65ea0ce3c --- /dev/null +++ b/providers/openrouter/models/arcee-ai/trinity-mini:free.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Trinity Mini (free)" +release_date = "2025-12-01" +last_updated = "2025-12-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/arcee-ai/virtuoso-large.toml b/providers/openrouter/models/arcee-ai/virtuoso-large.toml new file mode 100644 index 000000000..e944b26d8 --- /dev/null +++ b/providers/openrouter/models/arcee-ai/virtuoso-large.toml @@ -0,0 +1,22 @@ +name = "Arcee AI: Virtuoso Large" +release_date = "2025-05-05" +last_updated = "2025-05-05" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 64000 + +[cost] +input = 0.75 +output = 1.2 diff --git a/providers/openrouter/models/arliai/qwq-32b-arliai-rpr-v1.toml b/providers/openrouter/models/arliai/qwq-32b-arliai-rpr-v1.toml new file mode 100644 index 000000000..3a546901f --- /dev/null +++ b/providers/openrouter/models/arliai/qwq-32b-arliai-rpr-v1.toml @@ -0,0 +1,22 @@ +name = "ArliAI: QwQ 32B RpR v1" +release_date = "2025-04-13" +last_updated = "2025-04-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.03 +output = 0.11 diff --git a/providers/openrouter/models/baidu/ernie-4.5-21b-a3b-thinking.toml b/providers/openrouter/models/baidu/ernie-4.5-21b-a3b-thinking.toml new file mode 100644 index 000000000..ddfd72296 --- /dev/null +++ b/providers/openrouter/models/baidu/ernie-4.5-21b-a3b-thinking.toml @@ -0,0 +1,22 @@ +name = "Baidu: ERNIE 4.5 21B A3B Thinking" +release_date = "2025-10-09" +last_updated = "2025-10-09" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 65536 + +[cost] +input = 0.056 +output = 0.224 diff --git a/providers/openrouter/models/baidu/ernie-4.5-21b-a3b.toml b/providers/openrouter/models/baidu/ernie-4.5-21b-a3b.toml new file mode 100644 index 000000000..845d13450 --- /dev/null +++ b/providers/openrouter/models/baidu/ernie-4.5-21b-a3b.toml @@ -0,0 +1,22 @@ +name = "Baidu: ERNIE 4.5 21B A3B" +release_date = "2025-08-12" +last_updated = "2025-08-12" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 120000 +input = 120000 +output = 8000 + +[cost] +input = 0.056 +output = 0.224 diff --git a/providers/openrouter/models/baidu/ernie-4.5-300b-a47b.toml b/providers/openrouter/models/baidu/ernie-4.5-300b-a47b.toml new file mode 100644 index 000000000..1200ecf86 --- /dev/null +++ b/providers/openrouter/models/baidu/ernie-4.5-300b-a47b.toml @@ -0,0 +1,22 @@ +name = "Baidu: ERNIE 4.5 300B A47B " +release_date = "2025-06-30" +last_updated = "2025-06-30" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 123000 +input = 123000 +output = 12000 + +[cost] +input = 0.224 +output = 0.88 diff --git a/providers/openrouter/models/baidu/ernie-4.5-vl-28b-a3b.toml b/providers/openrouter/models/baidu/ernie-4.5-vl-28b-a3b.toml new file mode 100644 index 000000000..8496339a3 --- /dev/null +++ b/providers/openrouter/models/baidu/ernie-4.5-vl-28b-a3b.toml @@ -0,0 +1,22 @@ +name = "Baidu: ERNIE 4.5 VL 28B A3B" +release_date = "2025-08-12" +last_updated = "2025-08-12" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 30000 +input = 30000 +output = 8000 + +[cost] +input = 0.112 +output = 0.448 diff --git a/providers/openrouter/models/baidu/ernie-4.5-vl-424b-a47b.toml b/providers/openrouter/models/baidu/ernie-4.5-vl-424b-a47b.toml new file mode 100644 index 000000000..9e33b4bd7 --- /dev/null +++ b/providers/openrouter/models/baidu/ernie-4.5-vl-424b-a47b.toml @@ -0,0 +1,22 @@ +name = "Baidu: ERNIE 4.5 VL 424B A47B " +release_date = "2025-06-30" +last_updated = "2025-06-30" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 123000 +input = 123000 +output = 16000 + +[cost] +input = 0.33599999999999997 +output = 1.0 diff --git a/providers/openrouter/models/bytedance/ui-tars-1.5-7b.toml b/providers/openrouter/models/bytedance/ui-tars-1.5-7b.toml new file mode 100644 index 000000000..d01b31341 --- /dev/null +++ b/providers/openrouter/models/bytedance/ui-tars-1.5-7b.toml @@ -0,0 +1,22 @@ +name = "ByteDance: UI-TARS 7B " +release_date = "2025-07-22" +last_updated = "2025-07-22" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 2048 + +[cost] +input = 0.09999999999999999 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/cognitivecomputations/dolphin-mistral-24b-venice-edition:free.toml b/providers/openrouter/models/cognitivecomputations/dolphin-mistral-24b-venice-edition:free.toml new file mode 100644 index 000000000..1513ea7c3 --- /dev/null +++ b/providers/openrouter/models/cognitivecomputations/dolphin-mistral-24b-venice-edition:free.toml @@ -0,0 +1,22 @@ +name = "Venice: Uncensored (free)" +release_date = "2025-07-09" +last_updated = "2025-07-09" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/cohere/command-a.toml b/providers/openrouter/models/cohere/command-a.toml new file mode 100644 index 000000000..0af61e2f2 --- /dev/null +++ b/providers/openrouter/models/cohere/command-a.toml @@ -0,0 +1,22 @@ +name = "Cohere: Command A" +release_date = "2025-03-13" +last_updated = "2025-03-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 8192 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/cohere/command-r-08-2024.toml b/providers/openrouter/models/cohere/command-r-08-2024.toml new file mode 100644 index 000000000..33b331c84 --- /dev/null +++ b/providers/openrouter/models/cohere/command-r-08-2024.toml @@ -0,0 +1,22 @@ +name = "Cohere: Command R (08-2024)" +release_date = "2024-08-29" +last_updated = "2024-08-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4000 + +[cost] +input = 0.15 +output = 0.6 diff --git a/providers/openrouter/models/cohere/command-r-plus-08-2024.toml b/providers/openrouter/models/cohere/command-r-plus-08-2024.toml new file mode 100644 index 000000000..2c7dc49c8 --- /dev/null +++ b/providers/openrouter/models/cohere/command-r-plus-08-2024.toml @@ -0,0 +1,22 @@ +name = "Cohere: Command R+ (08-2024)" +release_date = "2024-08-29" +last_updated = "2024-08-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4000 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/cohere/command-r7b-12-2024.toml b/providers/openrouter/models/cohere/command-r7b-12-2024.toml new file mode 100644 index 000000000..58a7a1255 --- /dev/null +++ b/providers/openrouter/models/cohere/command-r7b-12-2024.toml @@ -0,0 +1,22 @@ +name = "Cohere: Command R7B (12-2024)" +release_date = "2024-12-14" +last_updated = "2024-12-14" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4000 + +[cost] +input = 0.0375 +output = 0.15 diff --git a/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-109b-moe.toml b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-109b-moe.toml new file mode 100644 index 000000000..f51c27a65 --- /dev/null +++ b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-109b-moe.toml @@ -0,0 +1,22 @@ +name = "Cogito V2 Preview Llama 109B" +release_date = "2025-09-02" +last_updated = "2025-09-02" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 32767 +input = 32767 +output = 4096 + +[cost] +input = 0.18 +output = 0.59 diff --git a/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-405b.toml b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-405b.toml new file mode 100644 index 000000000..2bbf528a5 --- /dev/null +++ b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-405b.toml @@ -0,0 +1,22 @@ +name = "Deep Cogito: Cogito V2 Preview Llama 405B" +release_date = "2025-10-17" +last_updated = "2025-10-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 3.5 +output = 3.5 diff --git a/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-70b.toml b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-70b.toml new file mode 100644 index 000000000..48f21d9e5 --- /dev/null +++ b/providers/openrouter/models/deepcogito/cogito-v2-preview-llama-70b.toml @@ -0,0 +1,22 @@ +name = "Deep Cogito: Cogito V2 Preview Llama 70B" +release_date = "2025-09-02" +last_updated = "2025-09-02" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.88 +output = 0.88 diff --git a/providers/openrouter/models/deepcogito/cogito-v2.1-671b.toml b/providers/openrouter/models/deepcogito/cogito-v2.1-671b.toml new file mode 100644 index 000000000..af44331eb --- /dev/null +++ b/providers/openrouter/models/deepcogito/cogito-v2.1-671b.toml @@ -0,0 +1,22 @@ +name = "Deep Cogito: Cogito v2.1 671B" +release_date = "2025-11-13" +last_updated = "2025-11-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 1.25 +output = 1.25 diff --git a/providers/openrouter/models/deepseek/deepseek-chat-v3.1.toml b/providers/openrouter/models/deepseek/deepseek-chat-v3.1.toml new file mode 100644 index 000000000..7e3d3c04f --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-chat-v3.1.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.1" +release_date = "2025-08-21" +last_updated = "2025-08-21" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 7168 + +[cost] +input = 0.15 +output = 0.75 diff --git a/providers/openrouter/models/deepseek/deepseek-chat.toml b/providers/openrouter/models/deepseek/deepseek-chat.toml new file mode 100644 index 000000000..ba2b4d4b7 --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-chat.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3" +release_date = "2024-12-26" +last_updated = "2024-12-26" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/deepseek/deepseek-prover-v2.toml b/providers/openrouter/models/deepseek/deepseek-prover-v2.toml new file mode 100644 index 000000000..0f240a429 --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-prover-v2.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek Prover V2" +release_date = "2025-04-30" +last_updated = "2025-04-30" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.5 +output = 2.1799999999999997 diff --git a/providers/openrouter/models/deepseek/deepseek-r1-0528-qwen3-8b.toml b/providers/openrouter/models/deepseek/deepseek-r1-0528-qwen3-8b.toml new file mode 100644 index 000000000..3e32ecffe --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-r1-0528-qwen3-8b.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek R1 0528 Qwen3 8B" +release_date = "2025-05-29" +last_updated = "2025-05-29" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.02 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/deepseek/deepseek-r1-0528.toml b/providers/openrouter/models/deepseek/deepseek-r1-0528.toml new file mode 100644 index 000000000..f7b403f2a --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-r1-0528.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: R1 0528" +release_date = "2025-05-28" +last_updated = "2025-05-28" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.39999999999999997 +output = 1.75 diff --git a/providers/openrouter/models/deepseek/deepseek-r1-distill-qwen-32b.toml b/providers/openrouter/models/deepseek/deepseek-r1-distill-qwen-32b.toml new file mode 100644 index 000000000..ef0f83a7b --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-r1-distill-qwen-32b.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: R1 Distill Qwen 32B" +release_date = "2025-01-29" +last_updated = "2025-01-29" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 64000 +input = 64000 +output = 32000 + +[cost] +input = 0.24 +output = 0.24 diff --git a/providers/openrouter/models/deepseek/deepseek-r1.toml b/providers/openrouter/models/deepseek/deepseek-r1.toml new file mode 100644 index 000000000..087956ced --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-r1.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: R1" +release_date = "2025-01-20" +last_updated = "2025-01-20" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/deepseek/deepseek-v3.1-terminus.toml b/providers/openrouter/models/deepseek/deepseek-v3.1-terminus.toml new file mode 100644 index 000000000..896f763be --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-v3.1-terminus.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus" +release_date = "2025-09-22" +last_updated = "2025-09-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.21 +output = 0.7899999999999999 diff --git a/providers/openrouter/models/deepseek/deepseek-v3.1-terminus:exacto.toml b/providers/openrouter/models/deepseek/deepseek-v3.1-terminus:exacto.toml new file mode 100644 index 000000000..6c1dc368f --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-v3.1-terminus:exacto.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.1 Terminus (exacto)" +release_date = "2025-09-22" +last_updated = "2025-09-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.21 +output = 0.7899999999999999 diff --git a/providers/openrouter/models/deepseek/deepseek-v3.2-exp.toml b/providers/openrouter/models/deepseek/deepseek-v3.2-exp.toml new file mode 100644 index 000000000..583038daf --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-v3.2-exp.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.2 Exp" +release_date = "2025-09-29" +last_updated = "2025-09-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.21 +output = 0.32 diff --git a/providers/openrouter/models/deepseek/deepseek-v3.2-speciale.toml b/providers/openrouter/models/deepseek/deepseek-v3.2-speciale.toml new file mode 100644 index 000000000..477cc09cf --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-v3.2-speciale.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.2 Speciale" +release_date = "2025-12-01" +last_updated = "2025-12-01" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 65536 + +[cost] +input = 0.27 +output = 0.41 diff --git a/providers/openrouter/models/deepseek/deepseek-v3.2.toml b/providers/openrouter/models/deepseek/deepseek-v3.2.toml new file mode 100644 index 000000000..e0f0cc666 --- /dev/null +++ b/providers/openrouter/models/deepseek/deepseek-v3.2.toml @@ -0,0 +1,22 @@ +name = "DeepSeek: DeepSeek V3.2" +release_date = "2025-12-01" +last_updated = "2025-12-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 65536 + +[cost] +input = 0.26 +output = 0.39 diff --git a/providers/openrouter/models/eleutherai/llemma_7b.toml b/providers/openrouter/models/eleutherai/llemma_7b.toml new file mode 100644 index 000000000..b21806538 --- /dev/null +++ b/providers/openrouter/models/eleutherai/llemma_7b.toml @@ -0,0 +1,22 @@ +name = "EleutherAI: Llemma 7b" +release_date = "2025-04-14" +last_updated = "2025-04-14" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4096 +input = 4096 +output = 4096 + +[cost] +input = 0.7999999999999999 +output = 1.2 diff --git a/providers/openrouter/models/essentialai/rnj-1-instruct.toml b/providers/openrouter/models/essentialai/rnj-1-instruct.toml new file mode 100644 index 000000000..11f6cc7a2 --- /dev/null +++ b/providers/openrouter/models/essentialai/rnj-1-instruct.toml @@ -0,0 +1,22 @@ +name = "EssentialAI: Rnj 1 Instruct" +release_date = "2025-12-07" +last_updated = "2025-12-07" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.15 +output = 0.15 diff --git a/providers/openrouter/models/google/gemini-2.0-flash-lite-001.toml b/providers/openrouter/models/google/gemini-2.0-flash-lite-001.toml new file mode 100644 index 000000000..0fa669899 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.0-flash-lite-001.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.0 Flash Lite" +release_date = "2025-02-25" +last_updated = "2025-02-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file", "audio", "video",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 8192 + +[cost] +input = 0.075 +output = 0.3 diff --git a/providers/openrouter/models/google/gemini-2.5-flash-image-preview.toml b/providers/openrouter/models/google/gemini-2.5-flash-image-preview.toml new file mode 100644 index 000000000..2be591242 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-flash-image-preview.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Flash Image Preview (Nano Banana)" +release_date = "2025-08-26" +last_updated = "2025-08-26" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "image", "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.3 +output = 2.5 diff --git a/providers/openrouter/models/google/gemini-2.5-flash-image.toml b/providers/openrouter/models/google/gemini-2.5-flash-image.toml new file mode 100644 index 000000000..17a35cb22 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-flash-image.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Flash Image (Nano Banana)" +release_date = "2025-10-07" +last_updated = "2025-10-07" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "image", "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.3 +output = 2.5 diff --git a/providers/openrouter/models/google/gemini-2.5-flash-lite-preview-09-2025.toml b/providers/openrouter/models/google/gemini-2.5-flash-lite-preview-09-2025.toml new file mode 100644 index 000000000..9f6e48fe1 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-flash-lite-preview-09-2025.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Flash Lite Preview 09-2025" +release_date = "2025-09-25" +last_updated = "2025-09-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file", "audio", "video",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 65536 + +[cost] +input = 0.09999999999999999 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/google/gemini-2.5-flash-lite.toml b/providers/openrouter/models/google/gemini-2.5-flash-lite.toml new file mode 100644 index 000000000..ba0a49c60 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-flash-lite.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Flash Lite" +release_date = "2025-07-22" +last_updated = "2025-07-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file", "audio", "video",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 65535 + +[cost] +input = 0.09999999999999999 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/google/gemini-2.5-flash-preview-09-2025.toml b/providers/openrouter/models/google/gemini-2.5-flash-preview-09-2025.toml new file mode 100644 index 000000000..af2313ad8 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-flash-preview-09-2025.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Flash Preview 09-2025" +release_date = "2025-09-25" +last_updated = "2025-09-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "file", "text", "audio", "video",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 65536 + +[cost] +input = 0.3 +output = 2.5 diff --git a/providers/openrouter/models/google/gemini-2.5-pro-preview.toml b/providers/openrouter/models/google/gemini-2.5-pro-preview.toml new file mode 100644 index 000000000..08eb15f52 --- /dev/null +++ b/providers/openrouter/models/google/gemini-2.5-pro-preview.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 2.5 Pro Preview 06-05" +release_date = "2025-06-05" +last_updated = "2025-06-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "file", "image", "text", "audio",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 65536 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/google/gemini-3-pro-image-preview.toml b/providers/openrouter/models/google/gemini-3-pro-image-preview.toml new file mode 100644 index 000000000..2a7f15472 --- /dev/null +++ b/providers/openrouter/models/google/gemini-3-pro-image-preview.toml @@ -0,0 +1,22 @@ +name = "Google: Nano Banana Pro (Gemini 3 Pro Image Preview)" +release_date = "2025-11-20" +last_updated = "2025-11-20" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "image", "text",] + +[limit] +context = 65536 +input = 65536 +output = 32768 + +[cost] +input = 2.0 +output = 12.0 diff --git a/providers/openrouter/models/google/gemini-3-pro-preview.toml b/providers/openrouter/models/google/gemini-3-pro-preview.toml new file mode 100644 index 000000000..fdf303459 --- /dev/null +++ b/providers/openrouter/models/google/gemini-3-pro-preview.toml @@ -0,0 +1,22 @@ +name = "Google: Gemini 3 Pro Preview" +release_date = "2025-11-18" +last_updated = "2025-11-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file", "audio", "video",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 65536 + +[cost] +input = 2.0 +output = 12.0 diff --git a/providers/openrouter/models/google/gemma-2-27b-it.toml b/providers/openrouter/models/google/gemma-2-27b-it.toml new file mode 100644 index 000000000..8cf30cfb4 --- /dev/null +++ b/providers/openrouter/models/google/gemma-2-27b-it.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 2 27B" +release_date = "2024-07-12" +last_updated = "2024-07-12" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 4096 + +[cost] +input = 0.65 +output = 0.65 diff --git a/providers/openrouter/models/google/gemma-2-9b-it.toml b/providers/openrouter/models/google/gemma-2-9b-it.toml new file mode 100644 index 000000000..10d5b5cdf --- /dev/null +++ b/providers/openrouter/models/google/gemma-2-9b-it.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 2 9B" +release_date = "2024-06-27" +last_updated = "2024-06-27" +open_weights = true +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 4096 + +[cost] +input = 0.03 +output = 0.09 diff --git a/providers/openrouter/models/google/gemma-3-12b-it:free.toml b/providers/openrouter/models/google/gemma-3-12b-it:free.toml new file mode 100644 index 000000000..dc33f103a --- /dev/null +++ b/providers/openrouter/models/google/gemma-3-12b-it:free.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 3 12B (free)" +release_date = "2025-03-13" +last_updated = "2025-03-13" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 8192 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/google/gemma-3-27b-it:free.toml b/providers/openrouter/models/google/gemma-3-27b-it:free.toml new file mode 100644 index 000000000..b39e5c574 --- /dev/null +++ b/providers/openrouter/models/google/gemma-3-27b-it:free.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 3 27B (free)" +release_date = "2025-03-12" +last_updated = "2025-03-12" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 8192 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/google/gemma-3-4b-it.toml b/providers/openrouter/models/google/gemma-3-4b-it.toml new file mode 100644 index 000000000..77fd5f802 --- /dev/null +++ b/providers/openrouter/models/google/gemma-3-4b-it.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 3 4B" +release_date = "2025-03-13" +last_updated = "2025-03-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 96000 +input = 96000 +output = 4096 + +[cost] +input = 0.01703012 +output = 0.0681536 diff --git a/providers/openrouter/models/google/gemma-3-4b-it:free.toml b/providers/openrouter/models/google/gemma-3-4b-it:free.toml new file mode 100644 index 000000000..b91c000e5 --- /dev/null +++ b/providers/openrouter/models/google/gemma-3-4b-it:free.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 3 4B (free)" +release_date = "2025-03-13" +last_updated = "2025-03-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 8192 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/google/gemma-3n-e2b-it:free.toml b/providers/openrouter/models/google/gemma-3n-e2b-it:free.toml new file mode 100644 index 000000000..c3ded60b8 --- /dev/null +++ b/providers/openrouter/models/google/gemma-3n-e2b-it:free.toml @@ -0,0 +1,22 @@ +name = "Google: Gemma 3n 2B (free)" +release_date = "2025-07-09" +last_updated = "2025-07-09" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 2048 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/gryphe/mythomax-l2-13b.toml b/providers/openrouter/models/gryphe/mythomax-l2-13b.toml new file mode 100644 index 000000000..df2d6e6c0 --- /dev/null +++ b/providers/openrouter/models/gryphe/mythomax-l2-13b.toml @@ -0,0 +1,22 @@ +name = "MythoMax 13B" +release_date = "2023-07-01" +last_updated = "2023-07-01" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4096 +input = 4096 +output = 4096 + +[cost] +input = 0.06 +output = 0.06 diff --git a/providers/openrouter/models/ibm-granite/granite-4.0-h-micro.toml b/providers/openrouter/models/ibm-granite/granite-4.0-h-micro.toml new file mode 100644 index 000000000..0f635c68e --- /dev/null +++ b/providers/openrouter/models/ibm-granite/granite-4.0-h-micro.toml @@ -0,0 +1,22 @@ +name = "IBM: Granite 4.0 Micro" +release_date = "2025-10-19" +last_updated = "2025-10-19" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131000 +input = 131000 +output = 4096 + +[cost] +input = 0.017 +output = 0.11 diff --git a/providers/openrouter/models/inception/mercury-coder.toml b/providers/openrouter/models/inception/mercury-coder.toml new file mode 100644 index 000000000..e60ab9e30 --- /dev/null +++ b/providers/openrouter/models/inception/mercury-coder.toml @@ -0,0 +1,22 @@ +name = "Inception: Mercury Coder" +release_date = "2025-04-30" +last_updated = "2025-04-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 0.25 +output = 1.0 diff --git a/providers/openrouter/models/inception/mercury.toml b/providers/openrouter/models/inception/mercury.toml new file mode 100644 index 000000000..fd64e475e --- /dev/null +++ b/providers/openrouter/models/inception/mercury.toml @@ -0,0 +1,22 @@ +name = "Inception: Mercury" +release_date = "2025-06-26" +last_updated = "2025-06-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 0.25 +output = 1.0 diff --git a/providers/openrouter/models/inflection/inflection-3-pi.toml b/providers/openrouter/models/inflection/inflection-3-pi.toml new file mode 100644 index 000000000..3f9c1581d --- /dev/null +++ b/providers/openrouter/models/inflection/inflection-3-pi.toml @@ -0,0 +1,22 @@ +name = "Inflection: Inflection 3 Pi" +release_date = "2024-10-10" +last_updated = "2024-10-10" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8000 +input = 8000 +output = 1024 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/inflection/inflection-3-productivity.toml b/providers/openrouter/models/inflection/inflection-3-productivity.toml new file mode 100644 index 000000000..a3072fa9d --- /dev/null +++ b/providers/openrouter/models/inflection/inflection-3-productivity.toml @@ -0,0 +1,22 @@ +name = "Inflection: Inflection 3 Productivity" +release_date = "2024-10-10" +last_updated = "2024-10-10" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8000 +input = 8000 +output = 1024 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/kwaipilot/kat-coder-pro:free.toml b/providers/openrouter/models/kwaipilot/kat-coder-pro:free.toml new file mode 100644 index 000000000..5ed64d635 --- /dev/null +++ b/providers/openrouter/models/kwaipilot/kat-coder-pro:free.toml @@ -0,0 +1,22 @@ +name = "Kwaipilot: KAT-Coder-Pro V1 (free)" +release_date = "2025-11-09" +last_updated = "2025-11-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 32768 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/liquid/lfm-2.2-6b.toml b/providers/openrouter/models/liquid/lfm-2.2-6b.toml new file mode 100644 index 000000000..6c38dd394 --- /dev/null +++ b/providers/openrouter/models/liquid/lfm-2.2-6b.toml @@ -0,0 +1,22 @@ +name = "LiquidAI/LFM2-2.6B" +release_date = "2025-10-20" +last_updated = "2025-10-20" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.049999999999999996 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/liquid/lfm2-8b-a1b.toml b/providers/openrouter/models/liquid/lfm2-8b-a1b.toml new file mode 100644 index 000000000..7ef6d4075 --- /dev/null +++ b/providers/openrouter/models/liquid/lfm2-8b-a1b.toml @@ -0,0 +1,22 @@ +name = "LiquidAI/LFM2-8B-A1B" +release_date = "2025-10-20" +last_updated = "2025-10-20" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.049999999999999996 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/mancer/weaver.toml b/providers/openrouter/models/mancer/weaver.toml new file mode 100644 index 000000000..e52b592a3 --- /dev/null +++ b/providers/openrouter/models/mancer/weaver.toml @@ -0,0 +1,22 @@ +name = "Mancer: Weaver (alpha)" +release_date = "2023-08-01" +last_updated = "2023-08-01" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8000 +input = 8000 +output = 2000 + +[cost] +input = 1.125 +output = 1.125 diff --git a/providers/openrouter/models/meituan/longcat-flash-chat.toml b/providers/openrouter/models/meituan/longcat-flash-chat.toml new file mode 100644 index 000000000..d48a73cfb --- /dev/null +++ b/providers/openrouter/models/meituan/longcat-flash-chat.toml @@ -0,0 +1,22 @@ +name = "Meituan: LongCat Flash Chat" +release_date = "2025-09-09" +last_updated = "2025-09-09" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.15 +output = 0.75 diff --git a/providers/openrouter/models/meituan/longcat-flash-chat:free.toml b/providers/openrouter/models/meituan/longcat-flash-chat:free.toml new file mode 100644 index 000000000..1201d8e78 --- /dev/null +++ b/providers/openrouter/models/meituan/longcat-flash-chat:free.toml @@ -0,0 +1,22 @@ +name = "Meituan: LongCat Flash Chat (free)" +release_date = "2025-09-09" +last_updated = "2025-09-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/meta-llama/llama-3-70b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3-70b-instruct.toml new file mode 100644 index 000000000..7dffc78c1 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3-70b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3 70B Instruct" +release_date = "2024-04-17" +last_updated = "2024-04-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 16384 + +[cost] +input = 0.3 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/meta-llama/llama-3-8b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3-8b-instruct.toml new file mode 100644 index 000000000..ae362ff7a --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3-8b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3 8B Instruct" +release_date = "2024-04-17" +last_updated = "2024-04-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 16384 + +[cost] +input = 0.03 +output = 0.06 diff --git a/providers/openrouter/models/meta-llama/llama-3.1-405b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.1-405b-instruct.toml new file mode 100644 index 000000000..d2eee9977 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.1-405b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.1 405B Instruct" +release_date = "2024-07-22" +last_updated = "2024-07-22" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 130815 +input = 130815 +output = 4096 + +[cost] +input = 3.5 +output = 3.5 diff --git a/providers/openrouter/models/meta-llama/llama-3.1-405b.toml b/providers/openrouter/models/meta-llama/llama-3.1-405b.toml new file mode 100644 index 000000000..e14774960 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.1-405b.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.1 405B (base)" +release_date = "2024-08-01" +last_updated = "2024-08-01" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 4.0 +output = 4.0 diff --git a/providers/openrouter/models/meta-llama/llama-3.1-70b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.1-70b-instruct.toml new file mode 100644 index 000000000..8a2d5c347 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.1-70b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.1 70B Instruct" +release_date = "2024-07-22" +last_updated = "2024-07-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.39999999999999997 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/meta-llama/llama-3.1-8b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.1-8b-instruct.toml new file mode 100644 index 000000000..669a8833b --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.1-8b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.1 8B Instruct" +release_date = "2024-07-22" +last_updated = "2024-07-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 0.02 +output = 0.03 diff --git a/providers/openrouter/models/meta-llama/llama-3.2-1b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.2-1b-instruct.toml new file mode 100644 index 000000000..e4f572f6b --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.2-1b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.2 1B Instruct" +release_date = "2024-09-24" +last_updated = "2024-09-24" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 60000 +input = 60000 +output = 4096 + +[cost] +input = 0.027 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct.toml new file mode 100644 index 000000000..f3256e634 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.2 3B Instruct" +release_date = "2024-09-24" +last_updated = "2024-09-24" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 0.02 +output = 0.02 diff --git a/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct:free.toml b/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct:free.toml new file mode 100644 index 000000000..b9cf134d5 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.2-3b-instruct:free.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.2 3B Instruct (free)" +release_date = "2024-09-24" +last_updated = "2024-09-24" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/meta-llama/llama-3.2-90b-vision-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.2-90b-vision-instruct.toml new file mode 100644 index 000000000..0e886997b --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.2-90b-vision-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.2 90B Vision Instruct" +release_date = "2024-09-24" +last_updated = "2024-09-24" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 16384 + +[cost] +input = 0.35 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/meta-llama/llama-3.3-70b-instruct.toml b/providers/openrouter/models/meta-llama/llama-3.3-70b-instruct.toml new file mode 100644 index 000000000..99d5bd8a9 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-3.3-70b-instruct.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 3.3 70B Instruct" +release_date = "2024-12-06" +last_updated = "2024-12-06" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 120000 + +[cost] +input = 0.108 +output = 0.32 diff --git a/providers/openrouter/models/meta-llama/llama-4-maverick.toml b/providers/openrouter/models/meta-llama/llama-4-maverick.toml new file mode 100644 index 000000000..9ef6082cb --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-4-maverick.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 4 Maverick" +release_date = "2025-04-05" +last_updated = "2025-04-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 16384 + +[cost] +input = 0.15 +output = 0.6 diff --git a/providers/openrouter/models/meta-llama/llama-4-scout.toml b/providers/openrouter/models/meta-llama/llama-4-scout.toml new file mode 100644 index 000000000..ee9e11ae4 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-4-scout.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama 4 Scout" +release_date = "2025-04-05" +last_updated = "2025-04-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 327680 +input = 327680 +output = 16384 + +[cost] +input = 0.08 +output = 0.3 diff --git a/providers/openrouter/models/meta-llama/llama-guard-2-8b.toml b/providers/openrouter/models/meta-llama/llama-guard-2-8b.toml new file mode 100644 index 000000000..3629e1d08 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-guard-2-8b.toml @@ -0,0 +1,22 @@ +name = "Meta: LlamaGuard 2 8B" +release_date = "2024-05-12" +last_updated = "2024-05-12" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/meta-llama/llama-guard-3-8b.toml b/providers/openrouter/models/meta-llama/llama-guard-3-8b.toml new file mode 100644 index 000000000..74ba114dd --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-guard-3-8b.toml @@ -0,0 +1,22 @@ +name = "Llama Guard 3 8B" +release_date = "2025-02-12" +last_updated = "2025-02-12" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.02 +output = 0.06 diff --git a/providers/openrouter/models/meta-llama/llama-guard-4-12b.toml b/providers/openrouter/models/meta-llama/llama-guard-4-12b.toml new file mode 100644 index 000000000..b9cdf7f11 --- /dev/null +++ b/providers/openrouter/models/meta-llama/llama-guard-4-12b.toml @@ -0,0 +1,22 @@ +name = "Meta: Llama Guard 4 12B" +release_date = "2025-04-29" +last_updated = "2025-04-29" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.18 +output = 0.18 diff --git a/providers/openrouter/models/microsoft/mai-ds-r1.toml b/providers/openrouter/models/microsoft/mai-ds-r1.toml new file mode 100644 index 000000000..391566221 --- /dev/null +++ b/providers/openrouter/models/microsoft/mai-ds-r1.toml @@ -0,0 +1,22 @@ +name = "Microsoft: MAI DS R1" +release_date = "2025-04-20" +last_updated = "2025-04-20" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/microsoft/phi-3-medium-128k-instruct.toml b/providers/openrouter/models/microsoft/phi-3-medium-128k-instruct.toml new file mode 100644 index 000000000..2864d5261 --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-3-medium-128k-instruct.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi-3 Medium 128K Instruct" +release_date = "2024-05-23" +last_updated = "2024-05-23" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 1.0 +output = 1.0 diff --git a/providers/openrouter/models/microsoft/phi-3-mini-128k-instruct.toml b/providers/openrouter/models/microsoft/phi-3-mini-128k-instruct.toml new file mode 100644 index 000000000..1f3b07c65 --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-3-mini-128k-instruct.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi-3 Mini 128K Instruct" +release_date = "2024-05-25" +last_updated = "2024-05-25" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/microsoft/phi-3.5-mini-128k-instruct.toml b/providers/openrouter/models/microsoft/phi-3.5-mini-128k-instruct.toml new file mode 100644 index 000000000..5949546bb --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-3.5-mini-128k-instruct.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi-3.5 Mini 128K Instruct" +release_date = "2024-08-20" +last_updated = "2024-08-20" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/microsoft/phi-4-multimodal-instruct.toml b/providers/openrouter/models/microsoft/phi-4-multimodal-instruct.toml new file mode 100644 index 000000000..dc22c6841 --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-4-multimodal-instruct.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi 4 Multimodal Instruct" +release_date = "2025-03-07" +last_updated = "2025-03-07" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.049999999999999996 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/microsoft/phi-4-reasoning-plus.toml b/providers/openrouter/models/microsoft/phi-4-reasoning-plus.toml new file mode 100644 index 000000000..8f92f50bd --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-4-reasoning-plus.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi 4 Reasoning Plus" +release_date = "2025-05-01" +last_updated = "2025-05-01" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.07 +output = 0.35 diff --git a/providers/openrouter/models/microsoft/phi-4.toml b/providers/openrouter/models/microsoft/phi-4.toml new file mode 100644 index 000000000..c2b1aa73b --- /dev/null +++ b/providers/openrouter/models/microsoft/phi-4.toml @@ -0,0 +1,22 @@ +name = "Microsoft: Phi 4" +release_date = "2025-01-10" +last_updated = "2025-01-10" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16384 +input = 16384 +output = 4096 + +[cost] +input = 0.06 +output = 0.14 diff --git a/providers/openrouter/models/microsoft/wizardlm-2-8x22b.toml b/providers/openrouter/models/microsoft/wizardlm-2-8x22b.toml new file mode 100644 index 000000000..9ec41891d --- /dev/null +++ b/providers/openrouter/models/microsoft/wizardlm-2-8x22b.toml @@ -0,0 +1,22 @@ +name = "WizardLM-2 8x22B" +release_date = "2024-04-15" +last_updated = "2024-04-15" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 16384 + +[cost] +input = 0.48 +output = 0.48 diff --git a/providers/openrouter/models/minimax/minimax-01.toml b/providers/openrouter/models/minimax/minimax-01.toml new file mode 100644 index 000000000..ff45eec11 --- /dev/null +++ b/providers/openrouter/models/minimax/minimax-01.toml @@ -0,0 +1,22 @@ +name = "MiniMax: MiniMax-01" +release_date = "2025-01-14" +last_updated = "2025-01-14" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 1000192 +input = 1000192 +output = 1000192 + +[cost] +input = 0.19999999999999998 +output = 1.1 diff --git a/providers/openrouter/models/minimax/minimax-m1.toml b/providers/openrouter/models/minimax/minimax-m1.toml new file mode 100644 index 000000000..41bac6cee --- /dev/null +++ b/providers/openrouter/models/minimax/minimax-m1.toml @@ -0,0 +1,22 @@ +name = "MiniMax: MiniMax M1" +release_date = "2025-06-17" +last_updated = "2025-06-17" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 40000 + +[cost] +input = 0.39999999999999997 +output = 2.2 diff --git a/providers/openrouter/models/minimax/minimax-m2.toml b/providers/openrouter/models/minimax/minimax-m2.toml new file mode 100644 index 000000000..4e664d020 --- /dev/null +++ b/providers/openrouter/models/minimax/minimax-m2.toml @@ -0,0 +1,22 @@ +name = "MiniMax: MiniMax M2" +release_date = "2025-10-23" +last_updated = "2025-10-23" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.254 +output = 1.02 diff --git a/providers/openrouter/models/mistralai/codestral-2508.toml b/providers/openrouter/models/mistralai/codestral-2508.toml new file mode 100644 index 000000000..4a62bd5ff --- /dev/null +++ b/providers/openrouter/models/mistralai/codestral-2508.toml @@ -0,0 +1,22 @@ +name = "Mistral: Codestral 2508" +release_date = "2025-08-01" +last_updated = "2025-08-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 4096 + +[cost] +input = 0.3 +output = 0.8999999999999999 diff --git a/providers/openrouter/models/mistralai/devstral-2512.toml b/providers/openrouter/models/mistralai/devstral-2512.toml new file mode 100644 index 000000000..78a5ef1f3 --- /dev/null +++ b/providers/openrouter/models/mistralai/devstral-2512.toml @@ -0,0 +1,22 @@ +name = "Mistral: Devstral 2 2512" +release_date = "2025-12-09" +last_updated = "2025-12-09" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 65536 + +[cost] +input = 0.15 +output = 0.6 diff --git a/providers/openrouter/models/mistralai/devstral-2512:free.toml b/providers/openrouter/models/mistralai/devstral-2512:free.toml new file mode 100644 index 000000000..e8fe3f0bc --- /dev/null +++ b/providers/openrouter/models/mistralai/devstral-2512:free.toml @@ -0,0 +1,22 @@ +name = "Mistral: Devstral 2 2512 (free)" +release_date = "2025-12-09" +last_updated = "2025-12-09" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/mistralai/devstral-medium.toml b/providers/openrouter/models/mistralai/devstral-medium.toml new file mode 100644 index 000000000..92fa904e8 --- /dev/null +++ b/providers/openrouter/models/mistralai/devstral-medium.toml @@ -0,0 +1,22 @@ +name = "Mistral: Devstral Medium" +release_date = "2025-07-10" +last_updated = "2025-07-10" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.39999999999999997 +output = 2.0 diff --git a/providers/openrouter/models/mistralai/devstral-small.toml b/providers/openrouter/models/mistralai/devstral-small.toml new file mode 100644 index 000000000..53acf4d42 --- /dev/null +++ b/providers/openrouter/models/mistralai/devstral-small.toml @@ -0,0 +1,22 @@ +name = "Mistral: Devstral Small 1.1" +release_date = "2025-07-10" +last_updated = "2025-07-10" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.07 +output = 0.28 diff --git a/providers/openrouter/models/mistralai/ministral-14b-2512.toml b/providers/openrouter/models/mistralai/ministral-14b-2512.toml new file mode 100644 index 000000000..3f1ceea6b --- /dev/null +++ b/providers/openrouter/models/mistralai/ministral-14b-2512.toml @@ -0,0 +1,22 @@ +name = "Mistral: Ministral 3 14B 2512" +release_date = "2025-12-02" +last_updated = "2025-12-02" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/mistralai/ministral-3b-2512.toml b/providers/openrouter/models/mistralai/ministral-3b-2512.toml new file mode 100644 index 000000000..ec218482f --- /dev/null +++ b/providers/openrouter/models/mistralai/ministral-3b-2512.toml @@ -0,0 +1,22 @@ +name = "Mistral: Ministral 3 3B 2512" +release_date = "2025-12-02" +last_updated = "2025-12-02" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/mistralai/ministral-3b.toml b/providers/openrouter/models/mistralai/ministral-3b.toml new file mode 100644 index 000000000..8d3c32527 --- /dev/null +++ b/providers/openrouter/models/mistralai/ministral-3b.toml @@ -0,0 +1,22 @@ +name = "Mistral: Ministral 3B" +release_date = "2024-10-16" +last_updated = "2024-10-16" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.04 +output = 0.04 diff --git a/providers/openrouter/models/mistralai/ministral-8b-2512.toml b/providers/openrouter/models/mistralai/ministral-8b-2512.toml new file mode 100644 index 000000000..1461cc9f8 --- /dev/null +++ b/providers/openrouter/models/mistralai/ministral-8b-2512.toml @@ -0,0 +1,22 @@ +name = "Mistral: Ministral 3 8B 2512" +release_date = "2025-12-02" +last_updated = "2025-12-02" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.15 +output = 0.15 diff --git a/providers/openrouter/models/mistralai/ministral-8b.toml b/providers/openrouter/models/mistralai/ministral-8b.toml new file mode 100644 index 000000000..d4ab3d451 --- /dev/null +++ b/providers/openrouter/models/mistralai/ministral-8b.toml @@ -0,0 +1,22 @@ +name = "Mistral: Ministral 8B" +release_date = "2024-10-16" +last_updated = "2024-10-16" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.1.toml b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.1.toml new file mode 100644 index 000000000..e44bef06d --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.1.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral 7B Instruct v0.1" +release_date = "2023-09-27" +last_updated = "2023-09-27" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 2824 +input = 2824 +output = 4096 + +[cost] +input = 0.11 +output = 0.19 diff --git a/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.2.toml b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.2.toml new file mode 100644 index 000000000..df8d489d8 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.2.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral 7B Instruct v0.2" +release_date = "2023-12-27" +last_updated = "2023-12-27" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.3.toml b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.3.toml new file mode 100644 index 000000000..7add970b7 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-7b-instruct-v0.3.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral 7B Instruct v0.3" +release_date = "2024-05-26" +last_updated = "2024-05-26" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/mistralai/mistral-7b-instruct.toml b/providers/openrouter/models/mistralai/mistral-7b-instruct.toml new file mode 100644 index 000000000..cc9e75a94 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-7b-instruct.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral 7B Instruct" +release_date = "2024-05-26" +last_updated = "2024-05-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 16384 + +[cost] +input = 0.028 +output = 0.054 diff --git a/providers/openrouter/models/mistralai/mistral-large-2407.toml b/providers/openrouter/models/mistralai/mistral-large-2407.toml new file mode 100644 index 000000000..37333eb32 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-large-2407.toml @@ -0,0 +1,22 @@ +name = "Mistral Large 2407" +release_date = "2024-11-18" +last_updated = "2024-11-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 2.0 +output = 6.0 diff --git a/providers/openrouter/models/mistralai/mistral-large-2411.toml b/providers/openrouter/models/mistralai/mistral-large-2411.toml new file mode 100644 index 000000000..8f578d66a --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-large-2411.toml @@ -0,0 +1,22 @@ +name = "Mistral Large 2411" +release_date = "2024-11-18" +last_updated = "2024-11-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 2.0 +output = 6.0 diff --git a/providers/openrouter/models/mistralai/mistral-large-2512.toml b/providers/openrouter/models/mistralai/mistral-large-2512.toml new file mode 100644 index 000000000..1db99b190 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-large-2512.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Large 3 2512" +release_date = "2025-12-01" +last_updated = "2025-12-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.5 +output = 1.5 diff --git a/providers/openrouter/models/mistralai/mistral-large.toml b/providers/openrouter/models/mistralai/mistral-large.toml new file mode 100644 index 000000000..b8f7858bf --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-large.toml @@ -0,0 +1,22 @@ +name = "Mistral Large" +release_date = "2024-02-25" +last_updated = "2024-02-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 2.0 +output = 6.0 diff --git a/providers/openrouter/models/mistralai/mistral-medium-3.1.toml b/providers/openrouter/models/mistralai/mistral-medium-3.1.toml new file mode 100644 index 000000000..e17cb5e91 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-medium-3.1.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Medium 3.1" +release_date = "2025-08-13" +last_updated = "2025-08-13" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.39999999999999997 +output = 2.0 diff --git a/providers/openrouter/models/mistralai/mistral-medium-3.toml b/providers/openrouter/models/mistralai/mistral-medium-3.toml new file mode 100644 index 000000000..7f9ad7dde --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-medium-3.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Medium 3" +release_date = "2025-05-07" +last_updated = "2025-05-07" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.39999999999999997 +output = 2.0 diff --git a/providers/openrouter/models/mistralai/mistral-nemo.toml b/providers/openrouter/models/mistralai/mistral-nemo.toml new file mode 100644 index 000000000..c6c6ccfdb --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-nemo.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Nemo" +release_date = "2024-07-18" +last_updated = "2024-07-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 0.02 +output = 0.04 diff --git a/providers/openrouter/models/mistralai/mistral-saba.toml b/providers/openrouter/models/mistralai/mistral-saba.toml new file mode 100644 index 000000000..206075e3b --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-saba.toml @@ -0,0 +1,22 @@ +name = "Mistral: Saba" +release_date = "2025-02-17" +last_updated = "2025-02-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.6 diff --git a/providers/openrouter/models/mistralai/mistral-small-24b-instruct-2501.toml b/providers/openrouter/models/mistralai/mistral-small-24b-instruct-2501.toml new file mode 100644 index 000000000..47cffbd3e --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-small-24b-instruct-2501.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Small 3" +release_date = "2025-01-30" +last_updated = "2025-01-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.03 +output = 0.11 diff --git a/providers/openrouter/models/mistralai/mistral-small-3.1-24b-instruct:free.toml b/providers/openrouter/models/mistralai/mistral-small-3.1-24b-instruct:free.toml new file mode 100644 index 000000000..d9e3aded6 --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-small-3.1-24b-instruct:free.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mistral Small 3.1 24B (free)" +release_date = "2025-03-17" +last_updated = "2025-03-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/mistralai/mistral-tiny.toml b/providers/openrouter/models/mistralai/mistral-tiny.toml new file mode 100644 index 000000000..bdb47fe3d --- /dev/null +++ b/providers/openrouter/models/mistralai/mistral-tiny.toml @@ -0,0 +1,22 @@ +name = "Mistral Tiny" +release_date = "2024-01-09" +last_updated = "2024-01-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.25 +output = 0.25 diff --git a/providers/openrouter/models/mistralai/mixtral-8x22b-instruct.toml b/providers/openrouter/models/mistralai/mixtral-8x22b-instruct.toml new file mode 100644 index 000000000..29b0f49f0 --- /dev/null +++ b/providers/openrouter/models/mistralai/mixtral-8x22b-instruct.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mixtral 8x22B Instruct" +release_date = "2024-04-16" +last_updated = "2024-04-16" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 4096 + +[cost] +input = 2.0 +output = 6.0 diff --git a/providers/openrouter/models/mistralai/mixtral-8x7b-instruct.toml b/providers/openrouter/models/mistralai/mixtral-8x7b-instruct.toml new file mode 100644 index 000000000..120b8584f --- /dev/null +++ b/providers/openrouter/models/mistralai/mixtral-8x7b-instruct.toml @@ -0,0 +1,22 @@ +name = "Mistral: Mixtral 8x7B Instruct" +release_date = "2023-12-09" +last_updated = "2023-12-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 16384 + +[cost] +input = 0.54 +output = 0.54 diff --git a/providers/openrouter/models/mistralai/pixtral-12b.toml b/providers/openrouter/models/mistralai/pixtral-12b.toml new file mode 100644 index 000000000..6daac177e --- /dev/null +++ b/providers/openrouter/models/mistralai/pixtral-12b.toml @@ -0,0 +1,22 @@ +name = "Mistral: Pixtral 12B" +release_date = "2024-09-09" +last_updated = "2024-09-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/mistralai/pixtral-large-2411.toml b/providers/openrouter/models/mistralai/pixtral-large-2411.toml new file mode 100644 index 000000000..9c34b92ab --- /dev/null +++ b/providers/openrouter/models/mistralai/pixtral-large-2411.toml @@ -0,0 +1,22 @@ +name = "Mistral: Pixtral Large 2411" +release_date = "2024-11-18" +last_updated = "2024-11-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 2.0 +output = 6.0 diff --git a/providers/openrouter/models/mistralai/voxtral-small-24b-2507.toml b/providers/openrouter/models/mistralai/voxtral-small-24b-2507.toml new file mode 100644 index 000000000..a9747eeb7 --- /dev/null +++ b/providers/openrouter/models/mistralai/voxtral-small-24b-2507.toml @@ -0,0 +1,22 @@ +name = "Mistral: Voxtral Small 24B 2507" +release_date = "2025-10-30" +last_updated = "2025-10-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text", "audio",] +output = [ "text",] + +[limit] +context = 32000 +input = 32000 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.3 diff --git a/providers/openrouter/models/moonshotai/kimi-dev-72b.toml b/providers/openrouter/models/moonshotai/kimi-dev-72b.toml new file mode 100644 index 000000000..9098f24f3 --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-dev-72b.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi Dev 72B" +release_date = "2025-06-16" +last_updated = "2025-06-16" +open_weights = true +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.29 +output = 1.15 diff --git a/providers/openrouter/models/moonshotai/kimi-k2-0905.toml b/providers/openrouter/models/moonshotai/kimi-k2-0905.toml new file mode 100644 index 000000000..dbabbfd33 --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-k2-0905.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi K2 0905" +release_date = "2025-09-04" +last_updated = "2025-09-04" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.39 +output = 1.9 diff --git a/providers/openrouter/models/moonshotai/kimi-k2-0905:exacto.toml b/providers/openrouter/models/moonshotai/kimi-k2-0905:exacto.toml new file mode 100644 index 000000000..89902dc8f --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-k2-0905:exacto.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi K2 0905 (exacto)" +release_date = "2025-09-04" +last_updated = "2025-09-04" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.6 +output = 2.5 diff --git a/providers/openrouter/models/moonshotai/kimi-k2-thinking.toml b/providers/openrouter/models/moonshotai/kimi-k2-thinking.toml new file mode 100644 index 000000000..7b3b324e7 --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-k2-thinking.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi K2 Thinking" +release_date = "2025-11-06" +last_updated = "2025-11-06" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 16384 + +[cost] +input = 0.44999999999999996 +output = 2.35 diff --git a/providers/openrouter/models/moonshotai/kimi-k2:free.toml b/providers/openrouter/models/moonshotai/kimi-k2:free.toml new file mode 100644 index 000000000..ff560d920 --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-k2:free.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi K2 0711 (free)" +release_date = "2025-07-11" +last_updated = "2025-07-11" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/moonshotai/kimi-linear-48b-a3b-instruct.toml b/providers/openrouter/models/moonshotai/kimi-linear-48b-a3b-instruct.toml new file mode 100644 index 000000000..c1dece4c6 --- /dev/null +++ b/providers/openrouter/models/moonshotai/kimi-linear-48b-a3b-instruct.toml @@ -0,0 +1,22 @@ +name = "MoonshotAI: Kimi Linear 48B A3B Instruct" +release_date = "2025-11-07" +last_updated = "2025-11-07" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 1048576 +input = 1048576 +output = 1048576 + +[cost] +input = 0.7 +output = 0.8999999999999999 diff --git a/providers/openrouter/models/morph/morph-v3-fast.toml b/providers/openrouter/models/morph/morph-v3-fast.toml new file mode 100644 index 000000000..6536b7d3a --- /dev/null +++ b/providers/openrouter/models/morph/morph-v3-fast.toml @@ -0,0 +1,22 @@ +name = "Morph: Morph V3 Fast" +release_date = "2025-07-07" +last_updated = "2025-07-07" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 81920 +input = 81920 +output = 38000 + +[cost] +input = 0.7999999999999999 +output = 1.2 diff --git a/providers/openrouter/models/morph/morph-v3-large.toml b/providers/openrouter/models/morph/morph-v3-large.toml new file mode 100644 index 000000000..e7e117b34 --- /dev/null +++ b/providers/openrouter/models/morph/morph-v3-large.toml @@ -0,0 +1,22 @@ +name = "Morph: Morph V3 Large" +release_date = "2025-07-07" +last_updated = "2025-07-07" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 131072 + +[cost] +input = 0.8999999999999999 +output = 1.9 diff --git a/providers/openrouter/models/neversleep/llama-3.1-lumimaid-8b.toml b/providers/openrouter/models/neversleep/llama-3.1-lumimaid-8b.toml new file mode 100644 index 000000000..81de4c3d5 --- /dev/null +++ b/providers/openrouter/models/neversleep/llama-3.1-lumimaid-8b.toml @@ -0,0 +1,22 @@ +name = "NeverSleep: Lumimaid v0.2 8B" +release_date = "2024-09-14" +last_updated = "2024-09-14" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.09 +output = 0.6 diff --git a/providers/openrouter/models/neversleep/noromaid-20b.toml b/providers/openrouter/models/neversleep/noromaid-20b.toml new file mode 100644 index 000000000..06e240723 --- /dev/null +++ b/providers/openrouter/models/neversleep/noromaid-20b.toml @@ -0,0 +1,22 @@ +name = "Noromaid 20B" +release_date = "2023-11-25" +last_updated = "2023-11-25" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4096 +input = 4096 +output = 4096 + +[cost] +input = 1.0 +output = 1.75 diff --git a/providers/openrouter/models/nex-agi/deepseek-v3.1-nex-n1:free.toml b/providers/openrouter/models/nex-agi/deepseek-v3.1-nex-n1:free.toml new file mode 100644 index 000000000..3f9a939ea --- /dev/null +++ b/providers/openrouter/models/nex-agi/deepseek-v3.1-nex-n1:free.toml @@ -0,0 +1,22 @@ +name = "Nex AGI: DeepSeek V3.1 Nex N1 (free)" +release_date = "2025-12-08" +last_updated = "2025-12-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 163840 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/nousresearch/deephermes-3-mistral-24b-preview.toml b/providers/openrouter/models/nousresearch/deephermes-3-mistral-24b-preview.toml new file mode 100644 index 000000000..269042a89 --- /dev/null +++ b/providers/openrouter/models/nousresearch/deephermes-3-mistral-24b-preview.toml @@ -0,0 +1,22 @@ +name = "Nous: DeepHermes 3 Mistral 24B Preview" +release_date = "2025-05-09" +last_updated = "2025-05-09" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.049999999999999996 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/nousresearch/hermes-2-pro-llama-3-8b.toml b/providers/openrouter/models/nousresearch/hermes-2-pro-llama-3-8b.toml new file mode 100644 index 000000000..e6b1d2a29 --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-2-pro-llama-3-8b.toml @@ -0,0 +1,22 @@ +name = "NousResearch: Hermes 2 Pro - Llama-3 8B" +release_date = "2024-05-26" +last_updated = "2024-05-26" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 2048 + +[cost] +input = 0.024999999999999998 +output = 0.08 diff --git a/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b.toml b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b.toml new file mode 100644 index 000000000..b82cc9d75 --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b.toml @@ -0,0 +1,22 @@ +name = "Nous: Hermes 3 405B Instruct" +release_date = "2024-08-15" +last_updated = "2024-08-15" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 1.0 +output = 1.0 diff --git a/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b:free.toml b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b:free.toml new file mode 100644 index 000000000..f2f48f105 --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-405b:free.toml @@ -0,0 +1,22 @@ +name = "Nous: Hermes 3 405B Instruct (free)" +release_date = "2024-08-15" +last_updated = "2024-08-15" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-70b.toml b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-70b.toml new file mode 100644 index 000000000..abee5337e --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-3-llama-3.1-70b.toml @@ -0,0 +1,22 @@ +name = "Nous: Hermes 3 70B Instruct" +release_date = "2024-08-17" +last_updated = "2024-08-17" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 4096 + +[cost] +input = 0.3 +output = 0.3 diff --git a/providers/openrouter/models/nousresearch/hermes-4-405b.toml b/providers/openrouter/models/nousresearch/hermes-4-405b.toml new file mode 100644 index 000000000..f10fa3764 --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-4-405b.toml @@ -0,0 +1,22 @@ +name = "Nous: Hermes 4 405B" +release_date = "2025-08-26" +last_updated = "2025-08-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/nousresearch/hermes-4-70b.toml b/providers/openrouter/models/nousresearch/hermes-4-70b.toml new file mode 100644 index 000000000..b096dfc6a --- /dev/null +++ b/providers/openrouter/models/nousresearch/hermes-4-70b.toml @@ -0,0 +1,22 @@ +name = "Nous: Hermes 4 70B" +release_date = "2025-08-26" +last_updated = "2025-08-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.11 +output = 0.38 diff --git a/providers/openrouter/models/nvidia/llama-3.1-nemotron-70b-instruct.toml b/providers/openrouter/models/nvidia/llama-3.1-nemotron-70b-instruct.toml new file mode 100644 index 000000000..2d9e340a9 --- /dev/null +++ b/providers/openrouter/models/nvidia/llama-3.1-nemotron-70b-instruct.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Llama 3.1 Nemotron 70B Instruct" +release_date = "2024-10-14" +last_updated = "2024-10-14" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 1.2 +output = 1.2 diff --git a/providers/openrouter/models/nvidia/llama-3.1-nemotron-ultra-253b-v1.toml b/providers/openrouter/models/nvidia/llama-3.1-nemotron-ultra-253b-v1.toml new file mode 100644 index 000000000..8e8f9ff57 --- /dev/null +++ b/providers/openrouter/models/nvidia/llama-3.1-nemotron-ultra-253b-v1.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Llama 3.1 Nemotron Ultra 253B v1" +release_date = "2025-04-08" +last_updated = "2025-04-08" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.6 +output = 1.7999999999999998 diff --git a/providers/openrouter/models/nvidia/llama-3.3-nemotron-super-49b-v1.5.toml b/providers/openrouter/models/nvidia/llama-3.3-nemotron-super-49b-v1.5.toml new file mode 100644 index 000000000..8b66161e6 --- /dev/null +++ b/providers/openrouter/models/nvidia/llama-3.3-nemotron-super-49b-v1.5.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5" +release_date = "2025-10-10" +last_updated = "2025-10-10" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl.toml b/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl.toml new file mode 100644 index 000000000..ae31dcf81 --- /dev/null +++ b/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL" +release_date = "2025-10-28" +last_updated = "2025-10-28" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "video",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.6 diff --git a/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl:free.toml b/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl:free.toml new file mode 100644 index 000000000..8da981a6b --- /dev/null +++ b/providers/openrouter/models/nvidia/nemotron-nano-12b-v2-vl:free.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Nemotron Nano 12B 2 VL (free)" +release_date = "2025-10-28" +last_updated = "2025-10-28" +open_weights = false +tool_call = true +structured_output = false +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "video",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 128000 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/nvidia/nemotron-nano-9b-v2.toml b/providers/openrouter/models/nvidia/nemotron-nano-9b-v2.toml new file mode 100644 index 000000000..98388ed62 --- /dev/null +++ b/providers/openrouter/models/nvidia/nemotron-nano-9b-v2.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Nemotron Nano 9B V2" +release_date = "2025-09-05" +last_updated = "2025-09-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.04 +output = 0.16 diff --git a/providers/openrouter/models/nvidia/nemotron-nano-9b-v2:free.toml b/providers/openrouter/models/nvidia/nemotron-nano-9b-v2:free.toml new file mode 100644 index 000000000..a87ce755c --- /dev/null +++ b/providers/openrouter/models/nvidia/nemotron-nano-9b-v2:free.toml @@ -0,0 +1,22 @@ +name = "NVIDIA: Nemotron Nano 9B V2 (free)" +release_date = "2025-09-05" +last_updated = "2025-09-05" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/openai/chatgpt-4o-latest.toml b/providers/openrouter/models/openai/chatgpt-4o-latest.toml new file mode 100644 index 000000000..5084c3c39 --- /dev/null +++ b/providers/openrouter/models/openai/chatgpt-4o-latest.toml @@ -0,0 +1,22 @@ +name = "OpenAI: ChatGPT-4o" +release_date = "2024-08-13" +last_updated = "2024-08-13" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 5.0 +output = 15.0 diff --git a/providers/openrouter/models/openai/codex-mini.toml b/providers/openrouter/models/openai/codex-mini.toml new file mode 100644 index 000000000..208397a7b --- /dev/null +++ b/providers/openrouter/models/openai/codex-mini.toml @@ -0,0 +1,22 @@ +name = "OpenAI: Codex Mini" +release_date = "2025-05-16" +last_updated = "2025-05-16" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 1.5 +output = 6.0 diff --git a/providers/openrouter/models/openai/gpt-3.5-turbo-0613.toml b/providers/openrouter/models/openai/gpt-3.5-turbo-0613.toml new file mode 100644 index 000000000..d028c6ed7 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-3.5-turbo-0613.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-3.5 Turbo (older v0613)" +release_date = "2024-01-24" +last_updated = "2024-01-24" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4095 +input = 4095 +output = 4096 + +[cost] +input = 1.0 +output = 2.0 diff --git a/providers/openrouter/models/openai/gpt-3.5-turbo-16k.toml b/providers/openrouter/models/openai/gpt-3.5-turbo-16k.toml new file mode 100644 index 000000000..662934478 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-3.5-turbo-16k.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-3.5 Turbo 16k" +release_date = "2023-08-27" +last_updated = "2023-08-27" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16385 +input = 16385 +output = 4096 + +[cost] +input = 3.0 +output = 4.0 diff --git a/providers/openrouter/models/openai/gpt-3.5-turbo-instruct.toml b/providers/openrouter/models/openai/gpt-3.5-turbo-instruct.toml new file mode 100644 index 000000000..bd664e192 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-3.5-turbo-instruct.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-3.5 Turbo Instruct" +release_date = "2023-09-27" +last_updated = "2023-09-27" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 4095 +input = 4095 +output = 4096 + +[cost] +input = 1.5 +output = 2.0 diff --git a/providers/openrouter/models/openai/gpt-3.5-turbo.toml b/providers/openrouter/models/openai/gpt-3.5-turbo.toml new file mode 100644 index 000000000..f196d034a --- /dev/null +++ b/providers/openrouter/models/openai/gpt-3.5-turbo.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-3.5 Turbo" +release_date = "2023-05-27" +last_updated = "2023-05-27" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16385 +input = 16385 +output = 4096 + +[cost] +input = 0.5 +output = 1.5 diff --git a/providers/openrouter/models/openai/gpt-4-0314.toml b/providers/openrouter/models/openai/gpt-4-0314.toml new file mode 100644 index 000000000..145f3cc6c --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4-0314.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4 (older v0314)" +release_date = "2023-05-27" +last_updated = "2023-05-27" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8191 +input = 8191 +output = 4096 + +[cost] +input = 30.0 +output = 60.0 diff --git a/providers/openrouter/models/openai/gpt-4-1106-preview.toml b/providers/openrouter/models/openai/gpt-4-1106-preview.toml new file mode 100644 index 000000000..394d650d4 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4-1106-preview.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4 Turbo (older v1106)" +release_date = "2023-11-05" +last_updated = "2023-11-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 10.0 +output = 30.0 diff --git a/providers/openrouter/models/openai/gpt-4-turbo-preview.toml b/providers/openrouter/models/openai/gpt-4-turbo-preview.toml new file mode 100644 index 000000000..cda22ca6b --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4-turbo-preview.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4 Turbo Preview" +release_date = "2024-01-24" +last_updated = "2024-01-24" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 10.0 +output = 30.0 diff --git a/providers/openrouter/models/openai/gpt-4-turbo.toml b/providers/openrouter/models/openai/gpt-4-turbo.toml new file mode 100644 index 000000000..1a24a14aa --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4-turbo.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4 Turbo" +release_date = "2024-04-08" +last_updated = "2024-04-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 10.0 +output = 30.0 diff --git a/providers/openrouter/models/openai/gpt-4.1-nano.toml b/providers/openrouter/models/openai/gpt-4.1-nano.toml new file mode 100644 index 000000000..e62acf9f5 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4.1-nano.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4.1 Nano" +release_date = "2025-04-14" +last_updated = "2025-04-14" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 1047576 +input = 1047576 +output = 32768 + +[cost] +input = 0.09999999999999999 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/openai/gpt-4.toml b/providers/openrouter/models/openai/gpt-4.toml new file mode 100644 index 000000000..aa48c7e38 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4" +release_date = "2023-05-27" +last_updated = "2023-05-27" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8191 +input = 8191 +output = 4096 + +[cost] +input = 30.0 +output = 60.0 diff --git a/providers/openrouter/models/openai/gpt-4o-2024-05-13.toml b/providers/openrouter/models/openai/gpt-4o-2024-05-13.toml new file mode 100644 index 000000000..e621329f9 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-2024-05-13.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o (2024-05-13)" +release_date = "2024-05-12" +last_updated = "2024-05-12" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 5.0 +output = 15.0 diff --git a/providers/openrouter/models/openai/gpt-4o-2024-08-06.toml b/providers/openrouter/models/openai/gpt-4o-2024-08-06.toml new file mode 100644 index 000000000..cdbb34eb0 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-2024-08-06.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o (2024-08-06)" +release_date = "2024-08-05" +last_updated = "2024-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-4o-2024-11-20.toml b/providers/openrouter/models/openai/gpt-4o-2024-11-20.toml new file mode 100644 index 000000000..9f53703a9 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-2024-11-20.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o (2024-11-20)" +release_date = "2024-11-20" +last_updated = "2024-11-20" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-4o-audio-preview.toml b/providers/openrouter/models/openai/gpt-4o-audio-preview.toml new file mode 100644 index 000000000..d363dc95b --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-audio-preview.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o Audio" +release_date = "2025-08-15" +last_updated = "2025-08-15" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "audio", "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-4o-mini-2024-07-18.toml b/providers/openrouter/models/openai/gpt-4o-mini-2024-07-18.toml new file mode 100644 index 000000000..63cb87329 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-mini-2024-07-18.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o-mini (2024-07-18)" +release_date = "2024-07-17" +last_updated = "2024-07-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 0.15 +output = 0.6 diff --git a/providers/openrouter/models/openai/gpt-4o-mini-search-preview.toml b/providers/openrouter/models/openai/gpt-4o-mini-search-preview.toml new file mode 100644 index 000000000..6a38b06c0 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-mini-search-preview.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o-mini Search Preview" +release_date = "2025-03-12" +last_updated = "2025-03-12" +open_weights = false +tool_call = false +structured_output = true +temperature = false +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 0.15 +output = 0.6 diff --git a/providers/openrouter/models/openai/gpt-4o-search-preview.toml b/providers/openrouter/models/openai/gpt-4o-search-preview.toml new file mode 100644 index 000000000..d71e650e9 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o-search-preview.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o Search Preview" +release_date = "2025-03-12" +last_updated = "2025-03-12" +open_weights = false +tool_call = false +structured_output = true +temperature = false +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-4o.toml b/providers/openrouter/models/openai/gpt-4o.toml new file mode 100644 index 000000000..9a82fb620 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o" +release_date = "2024-05-12" +last_updated = "2024-05-12" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 2.5 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-4o:extended.toml b/providers/openrouter/models/openai/gpt-4o:extended.toml new file mode 100644 index 000000000..92de86be7 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-4o:extended.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-4o (extended)" +release_date = "2024-05-12" +last_updated = "2024-05-12" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 64000 + +[cost] +input = 6.0 +output = 18.0 diff --git a/providers/openrouter/models/openai/gpt-5-chat.toml b/providers/openrouter/models/openai/gpt-5-chat.toml new file mode 100644 index 000000000..d98b72f5e --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-chat.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Chat" +release_date = "2025-08-07" +last_updated = "2025-08-07" +open_weights = false +tool_call = false +structured_output = true +temperature = false +reasoning = false +attachment = true + +[modalities] +input = [ "file", "image", "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5-codex.toml b/providers/openrouter/models/openai/gpt-5-codex.toml new file mode 100644 index 000000000..1e43af33c --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-codex.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Codex" +release_date = "2025-09-23" +last_updated = "2025-09-23" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5-image-mini.toml b/providers/openrouter/models/openai/gpt-5-image-mini.toml new file mode 100644 index 000000000..d65d563d7 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-image-mini.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Image Mini" +release_date = "2025-10-16" +last_updated = "2025-10-16" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "file", "image", "text",] +output = [ "image", "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 2.5 +output = 2.0 diff --git a/providers/openrouter/models/openai/gpt-5-image.toml b/providers/openrouter/models/openai/gpt-5-image.toml new file mode 100644 index 000000000..93fd66034 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-image.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Image" +release_date = "2025-10-14" +last_updated = "2025-10-14" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "image", "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 10.0 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5-mini.toml b/providers/openrouter/models/openai/gpt-5-mini.toml new file mode 100644 index 000000000..3b7e78431 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-mini.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Mini" +release_date = "2025-08-07" +last_updated = "2025-08-07" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 0.25 +output = 2.0 diff --git a/providers/openrouter/models/openai/gpt-5-nano.toml b/providers/openrouter/models/openai/gpt-5-nano.toml new file mode 100644 index 000000000..a6104b316 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-nano.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Nano" +release_date = "2025-08-07" +last_updated = "2025-08-07" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 0.049999999999999996 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/openai/gpt-5-pro.toml b/providers/openrouter/models/openai/gpt-5-pro.toml new file mode 100644 index 000000000..2cbe04cb3 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5-pro.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5 Pro" +release_date = "2025-10-06" +last_updated = "2025-10-06" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 15.0 +output = 120.0 diff --git a/providers/openrouter/models/openai/gpt-5.1-chat.toml b/providers/openrouter/models/openai/gpt-5.1-chat.toml new file mode 100644 index 000000000..39d03c993 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.1-chat.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5.1 Chat" +release_date = "2025-11-13" +last_updated = "2025-11-13" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "file", "image", "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 16384 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5.1-codex-max.toml b/providers/openrouter/models/openai/gpt-5.1-codex-max.toml new file mode 100644 index 000000000..2d03e5bba --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.1-codex-max.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5.1-Codex-Max" +release_date = "2025-12-04" +last_updated = "2025-12-04" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5.1-codex-mini.toml b/providers/openrouter/models/openai/gpt-5.1-codex-mini.toml new file mode 100644 index 000000000..5c00a65a6 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.1-codex-mini.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5.1-Codex-Mini" +release_date = "2025-11-13" +last_updated = "2025-11-13" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 100000 + +[cost] +input = 0.25 +output = 2.0 diff --git a/providers/openrouter/models/openai/gpt-5.1-codex.toml b/providers/openrouter/models/openai/gpt-5.1-codex.toml new file mode 100644 index 000000000..7c691d957 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.1-codex.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5.1-Codex" +release_date = "2025-11-13" +last_updated = "2025-11-13" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5.1.toml b/providers/openrouter/models/openai/gpt-5.1.toml new file mode 100644 index 000000000..7f1b29e1c --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.1.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5.1" +release_date = "2025-11-13" +last_updated = "2025-11-13" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-5.toml b/providers/openrouter/models/openai/gpt-5.toml new file mode 100644 index 000000000..946c67340 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-5.toml @@ -0,0 +1,22 @@ +name = "OpenAI: GPT-5" +release_date = "2025-08-07" +last_updated = "2025-08-07" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 400000 +input = 400000 +output = 128000 + +[cost] +input = 1.25 +output = 10.0 diff --git a/providers/openrouter/models/openai/gpt-oss-120b.toml b/providers/openrouter/models/openai/gpt-oss-120b.toml new file mode 100644 index 000000000..d898c684f --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-120b.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-120b" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.039 +output = 0.19 diff --git a/providers/openrouter/models/openai/gpt-oss-120b:exacto.toml b/providers/openrouter/models/openai/gpt-oss-120b:exacto.toml new file mode 100644 index 000000000..0a4e66f0a --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-120b:exacto.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-120b (exacto)" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.039 +output = 0.19 diff --git a/providers/openrouter/models/openai/gpt-oss-120b:free.toml b/providers/openrouter/models/openai/gpt-oss-120b:free.toml new file mode 100644 index 000000000..71dcc366a --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-120b:free.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-120b (free)" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/openai/gpt-oss-20b.toml b/providers/openrouter/models/openai/gpt-oss-20b.toml new file mode 100644 index 000000000..0d60ab0b7 --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-20b.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-20b" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.03 +output = 0.14 diff --git a/providers/openrouter/models/openai/gpt-oss-20b:free.toml b/providers/openrouter/models/openai/gpt-oss-20b:free.toml new file mode 100644 index 000000000..2adf1b8df --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-20b:free.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-20b (free)" +release_date = "2025-08-05" +last_updated = "2025-08-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 128000 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/openai/gpt-oss-safeguard-20b.toml b/providers/openrouter/models/openai/gpt-oss-safeguard-20b.toml new file mode 100644 index 000000000..cfd7160db --- /dev/null +++ b/providers/openrouter/models/openai/gpt-oss-safeguard-20b.toml @@ -0,0 +1,22 @@ +name = "OpenAI: gpt-oss-safeguard-20b" +release_date = "2025-10-29" +last_updated = "2025-10-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 65536 + +[cost] +input = 0.075 +output = 0.3 diff --git a/providers/openrouter/models/openai/o1-pro.toml b/providers/openrouter/models/openai/o1-pro.toml new file mode 100644 index 000000000..f0cd73606 --- /dev/null +++ b/providers/openrouter/models/openai/o1-pro.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o1-pro" +release_date = "2025-03-19" +last_updated = "2025-03-19" +open_weights = false +tool_call = false +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 150.0 +output = 600.0 diff --git a/providers/openrouter/models/openai/o1.toml b/providers/openrouter/models/openai/o1.toml new file mode 100644 index 000000000..14f65bca0 --- /dev/null +++ b/providers/openrouter/models/openai/o1.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o1" +release_date = "2024-12-17" +last_updated = "2024-12-17" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 15.0 +output = 60.0 diff --git a/providers/openrouter/models/openai/o3-deep-research.toml b/providers/openrouter/models/openai/o3-deep-research.toml new file mode 100644 index 000000000..6340c461a --- /dev/null +++ b/providers/openrouter/models/openai/o3-deep-research.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o3 Deep Research" +release_date = "2025-10-10" +last_updated = "2025-10-10" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 10.0 +output = 40.0 diff --git a/providers/openrouter/models/openai/o3-mini-high.toml b/providers/openrouter/models/openai/o3-mini-high.toml new file mode 100644 index 000000000..646366f29 --- /dev/null +++ b/providers/openrouter/models/openai/o3-mini-high.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o3 Mini High" +release_date = "2025-02-12" +last_updated = "2025-02-12" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = false + +[modalities] +input = [ "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 1.1 +output = 4.4 diff --git a/providers/openrouter/models/openai/o3-mini.toml b/providers/openrouter/models/openai/o3-mini.toml new file mode 100644 index 000000000..b54a4d7ba --- /dev/null +++ b/providers/openrouter/models/openai/o3-mini.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o3 Mini" +release_date = "2025-01-31" +last_updated = "2025-01-31" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = false + +[modalities] +input = [ "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 1.1 +output = 4.4 diff --git a/providers/openrouter/models/openai/o3-pro.toml b/providers/openrouter/models/openai/o3-pro.toml new file mode 100644 index 000000000..edd7e80ab --- /dev/null +++ b/providers/openrouter/models/openai/o3-pro.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o3 Pro" +release_date = "2025-06-10" +last_updated = "2025-06-10" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "text", "file", "image",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 20.0 +output = 80.0 diff --git a/providers/openrouter/models/openai/o3.toml b/providers/openrouter/models/openai/o3.toml new file mode 100644 index 000000000..429ef60d3 --- /dev/null +++ b/providers/openrouter/models/openai/o3.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o3" +release_date = "2025-04-16" +last_updated = "2025-04-16" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 2.0 +output = 8.0 diff --git a/providers/openrouter/models/openai/o4-mini-deep-research.toml b/providers/openrouter/models/openai/o4-mini-deep-research.toml new file mode 100644 index 000000000..eba780794 --- /dev/null +++ b/providers/openrouter/models/openai/o4-mini-deep-research.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o4 Mini Deep Research" +release_date = "2025-10-10" +last_updated = "2025-10-10" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "file", "image", "text",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 2.0 +output = 8.0 diff --git a/providers/openrouter/models/openai/o4-mini-high.toml b/providers/openrouter/models/openai/o4-mini-high.toml new file mode 100644 index 000000000..7f5374b64 --- /dev/null +++ b/providers/openrouter/models/openai/o4-mini-high.toml @@ -0,0 +1,22 @@ +name = "OpenAI: o4 Mini High" +release_date = "2025-04-16" +last_updated = "2025-04-16" +open_weights = false +tool_call = true +structured_output = true +temperature = false +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "file",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 100000 + +[cost] +input = 1.1 +output = 4.4 diff --git a/providers/openrouter/models/opengvlab/internvl3-78b.toml b/providers/openrouter/models/opengvlab/internvl3-78b.toml new file mode 100644 index 000000000..5bd672796 --- /dev/null +++ b/providers/openrouter/models/opengvlab/internvl3-78b.toml @@ -0,0 +1,22 @@ +name = "OpenGVLab: InternVL3 78B" +release_date = "2025-09-15" +last_updated = "2025-09-15" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.09999999999999999 +output = 0.39 diff --git a/providers/openrouter/models/openrouter/auto.toml b/providers/openrouter/models/openrouter/auto.toml new file mode 100644 index 000000000..e17110174 --- /dev/null +++ b/providers/openrouter/models/openrouter/auto.toml @@ -0,0 +1,22 @@ +name = "Auto Router" +release_date = "2023-11-07" +last_updated = "2023-11-07" +open_weights = false +tool_call = false +structured_output = false +temperature = false +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 2000000 +input = 2000000 +output = 4096 + +[cost] +input = -1000000.0 +output = -1000000.0 diff --git a/providers/openrouter/models/openrouter/bodybuilder.toml b/providers/openrouter/models/openrouter/bodybuilder.toml new file mode 100644 index 000000000..317c73ff6 --- /dev/null +++ b/providers/openrouter/models/openrouter/bodybuilder.toml @@ -0,0 +1,22 @@ +name = "Body Builder" +release_date = "2025-12-04" +last_updated = "2025-12-04" +open_weights = false +tool_call = false +structured_output = false +temperature = false +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = -1000000.0 +output = -1000000.0 diff --git a/providers/openrouter/models/perplexity/sonar-deep-research.toml b/providers/openrouter/models/perplexity/sonar-deep-research.toml new file mode 100644 index 000000000..3a44319bc --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar-deep-research.toml @@ -0,0 +1,23 @@ +name = "Perplexity: Sonar Deep Research" +release_date = "2025-03-06" +last_updated = "2025-03-06" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 2.0 +output = 8.0 +reasoning = 3.0 diff --git a/providers/openrouter/models/perplexity/sonar-pro-search.toml b/providers/openrouter/models/perplexity/sonar-pro-search.toml new file mode 100644 index 000000000..0c7249bb6 --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar-pro-search.toml @@ -0,0 +1,22 @@ +name = "Perplexity: Sonar Pro Search" +release_date = "2025-10-30" +last_updated = "2025-10-30" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 8000 + +[cost] +input = 3.0 +output = 15.0 diff --git a/providers/openrouter/models/perplexity/sonar-pro.toml b/providers/openrouter/models/perplexity/sonar-pro.toml new file mode 100644 index 000000000..98a72818b --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar-pro.toml @@ -0,0 +1,22 @@ +name = "Perplexity: Sonar Pro" +release_date = "2025-03-06" +last_updated = "2025-03-06" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 200000 +input = 200000 +output = 8000 + +[cost] +input = 3.0 +output = 15.0 diff --git a/providers/openrouter/models/perplexity/sonar-reasoning-pro.toml b/providers/openrouter/models/perplexity/sonar-reasoning-pro.toml new file mode 100644 index 000000000..a4731fb1e --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar-reasoning-pro.toml @@ -0,0 +1,22 @@ +name = "Perplexity: Sonar Reasoning Pro" +release_date = "2025-03-06" +last_updated = "2025-03-06" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 2.0 +output = 8.0 diff --git a/providers/openrouter/models/perplexity/sonar-reasoning.toml b/providers/openrouter/models/perplexity/sonar-reasoning.toml new file mode 100644 index 000000000..179dbb34a --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar-reasoning.toml @@ -0,0 +1,22 @@ +name = "Perplexity: Sonar Reasoning" +release_date = "2025-01-29" +last_updated = "2025-01-29" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 127000 +input = 127000 +output = 4096 + +[cost] +input = 1.0 +output = 5.0 diff --git a/providers/openrouter/models/perplexity/sonar.toml b/providers/openrouter/models/perplexity/sonar.toml new file mode 100644 index 000000000..dbf795434 --- /dev/null +++ b/providers/openrouter/models/perplexity/sonar.toml @@ -0,0 +1,22 @@ +name = "Perplexity: Sonar" +release_date = "2025-01-27" +last_updated = "2025-01-27" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 127072 +input = 127072 +output = 4096 + +[cost] +input = 1.0 +output = 1.0 diff --git a/providers/openrouter/models/prime-intellect/intellect-3.toml b/providers/openrouter/models/prime-intellect/intellect-3.toml new file mode 100644 index 000000000..bed2cb2ac --- /dev/null +++ b/providers/openrouter/models/prime-intellect/intellect-3.toml @@ -0,0 +1,22 @@ +name = "Prime Intellect: INTELLECT-3" +release_date = "2025-11-26" +last_updated = "2025-11-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.19999999999999998 +output = 1.1 diff --git a/providers/openrouter/models/qwen/qwen-2.5-72b-instruct.toml b/providers/openrouter/models/qwen/qwen-2.5-72b-instruct.toml new file mode 100644 index 000000000..d4b7cc3f1 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-2.5-72b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen2.5 72B Instruct" +release_date = "2024-09-18" +last_updated = "2024-09-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.07 +output = 0.26 diff --git a/providers/openrouter/models/qwen/qwen-2.5-7b-instruct.toml b/providers/openrouter/models/qwen/qwen-2.5-7b-instruct.toml new file mode 100644 index 000000000..e27ced2d3 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-2.5-7b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen2.5 7B Instruct" +release_date = "2024-10-15" +last_updated = "2024-10-15" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.04 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/qwen/qwen-2.5-vl-7b-instruct.toml b/providers/openrouter/models/qwen/qwen-2.5-vl-7b-instruct.toml new file mode 100644 index 000000000..e2317cb58 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-2.5-vl-7b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen2.5-VL 7B Instruct" +release_date = "2024-08-27" +last_updated = "2024-08-27" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/qwen/qwen-max.toml b/providers/openrouter/models/qwen/qwen-max.toml new file mode 100644 index 000000000..240fd8ef0 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-max.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen-Max " +release_date = "2025-02-01" +last_updated = "2025-02-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 8192 + +[cost] +input = 1.5999999999999999 +output = 6.3999999999999995 diff --git a/providers/openrouter/models/qwen/qwen-plus-2025-07-28.toml b/providers/openrouter/models/qwen/qwen-plus-2025-07-28.toml new file mode 100644 index 000000000..02d770fae --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-plus-2025-07-28.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen Plus 0728" +release_date = "2025-09-08" +last_updated = "2025-09-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 32768 + +[cost] +input = 0.39999999999999997 +output = 1.2 diff --git a/providers/openrouter/models/qwen/qwen-plus-2025-07-28:thinking.toml b/providers/openrouter/models/qwen/qwen-plus-2025-07-28:thinking.toml new file mode 100644 index 000000000..9c934a175 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-plus-2025-07-28:thinking.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen Plus 0728 (thinking)" +release_date = "2025-09-08" +last_updated = "2025-09-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 32768 + +[cost] +input = 0.39999999999999997 +output = 4.0 diff --git a/providers/openrouter/models/qwen/qwen-plus.toml b/providers/openrouter/models/qwen/qwen-plus.toml new file mode 100644 index 000000000..de30dabfd --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-plus.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen-Plus" +release_date = "2025-02-01" +last_updated = "2025-02-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 8192 + +[cost] +input = 0.39999999999999997 +output = 1.2 diff --git a/providers/openrouter/models/qwen/qwen-turbo.toml b/providers/openrouter/models/qwen/qwen-turbo.toml new file mode 100644 index 000000000..43e3cacb2 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-turbo.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen-Turbo" +release_date = "2025-02-01" +last_updated = "2025-02-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 1000000 +input = 1000000 +output = 8192 + +[cost] +input = 0.049999999999999996 +output = 0.19999999999999998 diff --git a/providers/openrouter/models/qwen/qwen-vl-max.toml b/providers/openrouter/models/qwen/qwen-vl-max.toml new file mode 100644 index 000000000..eb541c1aa --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-vl-max.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen VL Max" +release_date = "2025-02-01" +last_updated = "2025-02-01" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 8192 + +[cost] +input = 0.7999999999999999 +output = 3.1999999999999997 diff --git a/providers/openrouter/models/qwen/qwen-vl-plus.toml b/providers/openrouter/models/qwen/qwen-vl-plus.toml new file mode 100644 index 000000000..9dbc2ac93 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen-vl-plus.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen VL Plus" +release_date = "2025-02-04" +last_updated = "2025-02-04" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 7500 +input = 7500 +output = 1500 + +[cost] +input = 0.21 +output = 0.63 diff --git a/providers/openrouter/models/qwen/qwen2.5-coder-7b-instruct.toml b/providers/openrouter/models/qwen/qwen2.5-coder-7b-instruct.toml new file mode 100644 index 000000000..78ed3e84f --- /dev/null +++ b/providers/openrouter/models/qwen/qwen2.5-coder-7b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen2.5 Coder 7B Instruct" +release_date = "2025-04-15" +last_updated = "2025-04-15" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.03 +output = 0.09 diff --git a/providers/openrouter/models/qwen/qwen2.5-vl-32b-instruct.toml b/providers/openrouter/models/qwen/qwen2.5-vl-32b-instruct.toml new file mode 100644 index 000000000..68079850b --- /dev/null +++ b/providers/openrouter/models/qwen/qwen2.5-vl-32b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen2.5 VL 32B Instruct" +release_date = "2025-03-24" +last_updated = "2025-03-24" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 16384 +input = 16384 +output = 16384 + +[cost] +input = 0.049999999999999996 +output = 0.22 diff --git a/providers/openrouter/models/qwen/qwen3-14b.toml b/providers/openrouter/models/qwen/qwen3-14b.toml new file mode 100644 index 000000000..2885bd8b4 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-14b.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 14B" +release_date = "2025-04-28" +last_updated = "2025-04-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 40960 +input = 40960 +output = 40960 + +[cost] +input = 0.049999999999999996 +output = 0.22 diff --git a/providers/openrouter/models/qwen/qwen3-235b-a22b-2507.toml b/providers/openrouter/models/qwen/qwen3-235b-a22b-2507.toml new file mode 100644 index 000000000..b32ed0a53 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-235b-a22b-2507.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 235B A22B Instruct 2507" +release_date = "2025-07-21" +last_updated = "2025-07-21" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.071 +output = 0.463 diff --git a/providers/openrouter/models/qwen/qwen3-235b-a22b-thinking-2507.toml b/providers/openrouter/models/qwen/qwen3-235b-a22b-thinking-2507.toml new file mode 100644 index 000000000..513dcab86 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-235b-a22b-thinking-2507.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 235B A22B Thinking 2507" +release_date = "2025-07-25" +last_updated = "2025-07-25" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.11 +output = 0.6 diff --git a/providers/openrouter/models/qwen/qwen3-235b-a22b.toml b/providers/openrouter/models/qwen/qwen3-235b-a22b.toml new file mode 100644 index 000000000..b5b37e931 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-235b-a22b.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 235B A22B" +release_date = "2025-04-28" +last_updated = "2025-04-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 40960 +input = 40960 +output = 40960 + +[cost] +input = 0.18 +output = 0.54 diff --git a/providers/openrouter/models/qwen/qwen3-30b-a3b-instruct-2507.toml b/providers/openrouter/models/qwen/qwen3-30b-a3b-instruct-2507.toml new file mode 100644 index 000000000..b6093ee24 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-30b-a3b-instruct-2507.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 30B A3B Instruct 2507" +release_date = "2025-07-29" +last_updated = "2025-07-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.08 +output = 0.33 diff --git a/providers/openrouter/models/qwen/qwen3-30b-a3b-thinking-2507.toml b/providers/openrouter/models/qwen/qwen3-30b-a3b-thinking-2507.toml new file mode 100644 index 000000000..478528f77 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-30b-a3b-thinking-2507.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 30B A3B Thinking 2507" +release_date = "2025-08-28" +last_updated = "2025-08-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.051 +output = 0.33999999999999997 diff --git a/providers/openrouter/models/qwen/qwen3-30b-a3b.toml b/providers/openrouter/models/qwen/qwen3-30b-a3b.toml new file mode 100644 index 000000000..e54332b31 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-30b-a3b.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 30B A3B" +release_date = "2025-04-28" +last_updated = "2025-04-28" +open_weights = true +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 40960 +input = 40960 +output = 40960 + +[cost] +input = 0.06 +output = 0.22 diff --git a/providers/openrouter/models/qwen/qwen3-32b.toml b/providers/openrouter/models/qwen/qwen3-32b.toml new file mode 100644 index 000000000..3ebb8f3fb --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-32b.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 32B" +release_date = "2025-04-28" +last_updated = "2025-04-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 40960 +input = 40960 +output = 40960 + +[cost] +input = 0.08 +output = 0.24 diff --git a/providers/openrouter/models/qwen/qwen3-4b:free.toml b/providers/openrouter/models/qwen/qwen3-4b:free.toml new file mode 100644 index 000000000..a378e41dc --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-4b:free.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 4B (free)" +release_date = "2025-04-30" +last_updated = "2025-04-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 40960 +input = 40960 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/qwen/qwen3-8b.toml b/providers/openrouter/models/qwen/qwen3-8b.toml new file mode 100644 index 000000000..a37977bae --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-8b.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 8B" +release_date = "2025-04-28" +last_updated = "2025-04-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 20000 + +[cost] +input = 0.028 +output = 0.1104 diff --git a/providers/openrouter/models/qwen/qwen3-coder-30b-a3b-instruct.toml b/providers/openrouter/models/qwen/qwen3-coder-30b-a3b-instruct.toml new file mode 100644 index 000000000..5232755a6 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder-30b-a3b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder 30B A3B Instruct" +release_date = "2025-07-31" +last_updated = "2025-07-31" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.06 +output = 0.25 diff --git a/providers/openrouter/models/qwen/qwen3-coder-flash.toml b/providers/openrouter/models/qwen/qwen3-coder-flash.toml new file mode 100644 index 000000000..246a083d0 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder-flash.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder Flash" +release_date = "2025-09-17" +last_updated = "2025-09-17" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 65536 + +[cost] +input = 0.3 +output = 1.5 diff --git a/providers/openrouter/models/qwen/qwen3-coder-plus.toml b/providers/openrouter/models/qwen/qwen3-coder-plus.toml new file mode 100644 index 000000000..2842fcf5c --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder-plus.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder Plus" +release_date = "2025-09-23" +last_updated = "2025-09-23" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 65536 + +[cost] +input = 1.0 +output = 5.0 diff --git a/providers/openrouter/models/qwen/qwen3-coder.toml b/providers/openrouter/models/qwen/qwen3-coder.toml new file mode 100644 index 000000000..25c768405 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder 480B A35B" +release_date = "2025-07-22" +last_updated = "2025-07-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.22 +output = 0.95 diff --git a/providers/openrouter/models/qwen/qwen3-coder:exacto.toml b/providers/openrouter/models/qwen/qwen3-coder:exacto.toml new file mode 100644 index 000000000..0170c6900 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder:exacto.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder 480B A35B (exacto)" +release_date = "2025-07-22" +last_updated = "2025-07-22" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.38 +output = 1.53 diff --git a/providers/openrouter/models/qwen/qwen3-coder:free.toml b/providers/openrouter/models/qwen/qwen3-coder:free.toml new file mode 100644 index 000000000..bcaeaddd6 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-coder:free.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Coder 480B A35B (free)" +release_date = "2025-07-22" +last_updated = "2025-07-22" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262000 +input = 262000 +output = 262000 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/qwen/qwen3-max.toml b/providers/openrouter/models/qwen/qwen3-max.toml new file mode 100644 index 000000000..e0b68110a --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-max.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Max" +release_date = "2025-09-23" +last_updated = "2025-09-23" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 32768 + +[cost] +input = 1.2 +output = 6.0 diff --git a/providers/openrouter/models/qwen/qwen3-next-80b-a3b-instruct.toml b/providers/openrouter/models/qwen/qwen3-next-80b-a3b-instruct.toml new file mode 100644 index 000000000..ba623c7c6 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-next-80b-a3b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Next 80B A3B Instruct" +release_date = "2025-09-11" +last_updated = "2025-09-11" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.09999999999999999 +output = 0.7999999999999999 diff --git a/providers/openrouter/models/qwen/qwen3-next-80b-a3b-thinking.toml b/providers/openrouter/models/qwen/qwen3-next-80b-a3b-thinking.toml new file mode 100644 index 000000000..e13833d21 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-next-80b-a3b-thinking.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 Next 80B A3B Thinking" +release_date = "2025-09-11" +last_updated = "2025-09-11" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32768 + +[cost] +input = 0.12 +output = 1.2 diff --git a/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-instruct.toml b/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-instruct.toml new file mode 100644 index 000000000..fe151cf18 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 235B A22B Instruct" +release_date = "2025-09-23" +last_updated = "2025-09-23" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 4096 + +[cost] +input = 0.19999999999999998 +output = 1.2 diff --git a/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-thinking.toml b/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-thinking.toml new file mode 100644 index 000000000..3cea91751 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-235b-a22b-thinking.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 235B A22B Thinking" +release_date = "2025-09-23" +last_updated = "2025-09-23" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 262144 +input = 262144 +output = 262144 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-instruct.toml b/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-instruct.toml new file mode 100644 index 000000000..0d1358423 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 30B A3B Instruct" +release_date = "2025-10-06" +last_updated = "2025-10-06" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.14 +output = 1.0 diff --git a/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-thinking.toml b/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-thinking.toml new file mode 100644 index 000000000..6d0c4b870 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-30b-a3b-thinking.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 30B A3B Thinking" +release_date = "2025-10-06" +last_updated = "2025-10-06" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32768 + +[cost] +input = 0.16 +output = 0.7999999999999999 diff --git a/providers/openrouter/models/qwen/qwen3-vl-8b-instruct.toml b/providers/openrouter/models/qwen/qwen3-vl-8b-instruct.toml new file mode 100644 index 000000000..01be8acd0 --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-8b-instruct.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 8B Instruct" +release_date = "2025-10-14" +last_updated = "2025-10-14" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 32768 + +[cost] +input = 0.064 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/qwen/qwen3-vl-8b-thinking.toml b/providers/openrouter/models/qwen/qwen3-vl-8b-thinking.toml new file mode 100644 index 000000000..a4732135c --- /dev/null +++ b/providers/openrouter/models/qwen/qwen3-vl-8b-thinking.toml @@ -0,0 +1,22 @@ +name = "Qwen: Qwen3 VL 8B Thinking" +release_date = "2025-10-14" +last_updated = "2025-10-14" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 32768 + +[cost] +input = 0.18 +output = 2.0999999999999996 diff --git a/providers/openrouter/models/qwen/qwq-32b.toml b/providers/openrouter/models/qwen/qwq-32b.toml new file mode 100644 index 000000000..e061664fa --- /dev/null +++ b/providers/openrouter/models/qwen/qwq-32b.toml @@ -0,0 +1,22 @@ +name = "Qwen: QwQ 32B" +release_date = "2025-03-05" +last_updated = "2025-03-05" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.15 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/raifle/sorcererlm-8x22b.toml b/providers/openrouter/models/raifle/sorcererlm-8x22b.toml new file mode 100644 index 000000000..3c6dc96d5 --- /dev/null +++ b/providers/openrouter/models/raifle/sorcererlm-8x22b.toml @@ -0,0 +1,22 @@ +name = "SorcererLM 8x22B" +release_date = "2024-11-08" +last_updated = "2024-11-08" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16000 +input = 16000 +output = 4096 + +[cost] +input = 4.5 +output = 4.5 diff --git a/providers/openrouter/models/relace/relace-apply-3.toml b/providers/openrouter/models/relace/relace-apply-3.toml new file mode 100644 index 000000000..7717c976f --- /dev/null +++ b/providers/openrouter/models/relace/relace-apply-3.toml @@ -0,0 +1,22 @@ +name = "Relace: Relace Apply 3" +release_date = "2025-09-26" +last_updated = "2025-09-26" +open_weights = false +tool_call = false +structured_output = false +temperature = false +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 128000 + +[cost] +input = 0.85 +output = 1.25 diff --git a/providers/openrouter/models/relace/relace-search.toml b/providers/openrouter/models/relace/relace-search.toml new file mode 100644 index 000000000..7e717a363 --- /dev/null +++ b/providers/openrouter/models/relace/relace-search.toml @@ -0,0 +1,22 @@ +name = "Relace: Relace Search" +release_date = "2025-12-08" +last_updated = "2025-12-08" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 128000 + +[cost] +input = 1.0 +output = 3.0 diff --git a/providers/openrouter/models/sao10k/l3-euryale-70b.toml b/providers/openrouter/models/sao10k/l3-euryale-70b.toml new file mode 100644 index 000000000..d50c01681 --- /dev/null +++ b/providers/openrouter/models/sao10k/l3-euryale-70b.toml @@ -0,0 +1,22 @@ +name = "Sao10k: Llama 3 Euryale 70B v2.1" +release_date = "2024-06-17" +last_updated = "2024-06-17" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 8192 + +[cost] +input = 1.48 +output = 1.48 diff --git a/providers/openrouter/models/sao10k/l3-lunaris-8b.toml b/providers/openrouter/models/sao10k/l3-lunaris-8b.toml new file mode 100644 index 000000000..128d5ace6 --- /dev/null +++ b/providers/openrouter/models/sao10k/l3-lunaris-8b.toml @@ -0,0 +1,22 @@ +name = "Sao10K: Llama 3 8B Lunaris" +release_date = "2024-08-12" +last_updated = "2024-08-12" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 8192 +input = 8192 +output = 4096 + +[cost] +input = 0.04 +output = 0.049999999999999996 diff --git a/providers/openrouter/models/sao10k/l3.1-70b-hanami-x1.toml b/providers/openrouter/models/sao10k/l3.1-70b-hanami-x1.toml new file mode 100644 index 000000000..dbaa08aaa --- /dev/null +++ b/providers/openrouter/models/sao10k/l3.1-70b-hanami-x1.toml @@ -0,0 +1,22 @@ +name = "Sao10K: Llama 3.1 70B Hanami x1" +release_date = "2025-01-07" +last_updated = "2025-01-07" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 16000 +input = 16000 +output = 4096 + +[cost] +input = 3.0 +output = 3.0 diff --git a/providers/openrouter/models/sao10k/l3.1-euryale-70b.toml b/providers/openrouter/models/sao10k/l3.1-euryale-70b.toml new file mode 100644 index 000000000..f632795e2 --- /dev/null +++ b/providers/openrouter/models/sao10k/l3.1-euryale-70b.toml @@ -0,0 +1,22 @@ +name = "Sao10K: Llama 3.1 Euryale 70B v2.2" +release_date = "2024-08-27" +last_updated = "2024-08-27" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.65 +output = 0.75 diff --git a/providers/openrouter/models/sao10k/l3.3-euryale-70b.toml b/providers/openrouter/models/sao10k/l3.3-euryale-70b.toml new file mode 100644 index 000000000..e576a4162 --- /dev/null +++ b/providers/openrouter/models/sao10k/l3.3-euryale-70b.toml @@ -0,0 +1,22 @@ +name = "Sao10K: Llama 3.3 Euryale 70B" +release_date = "2024-12-18" +last_updated = "2024-12-18" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 16384 + +[cost] +input = 0.65 +output = 0.75 diff --git a/providers/openrouter/models/stepfun-ai/step3.toml b/providers/openrouter/models/stepfun-ai/step3.toml new file mode 100644 index 000000000..d27def08c --- /dev/null +++ b/providers/openrouter/models/stepfun-ai/step3.toml @@ -0,0 +1,22 @@ +name = "StepFun: Step3" +release_date = "2025-08-28" +last_updated = "2025-08-28" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 65536 + +[cost] +input = 0.5700000000000001 +output = 1.42 diff --git a/providers/openrouter/models/switchpoint/router.toml b/providers/openrouter/models/switchpoint/router.toml new file mode 100644 index 000000000..fce68a07f --- /dev/null +++ b/providers/openrouter/models/switchpoint/router.toml @@ -0,0 +1,22 @@ +name = "Switchpoint Router" +release_date = "2025-07-11" +last_updated = "2025-07-11" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 4096 + +[cost] +input = 0.85 +output = 3.4 diff --git a/providers/openrouter/models/tencent/hunyuan-a13b-instruct.toml b/providers/openrouter/models/tencent/hunyuan-a13b-instruct.toml new file mode 100644 index 000000000..71547fb52 --- /dev/null +++ b/providers/openrouter/models/tencent/hunyuan-a13b-instruct.toml @@ -0,0 +1,22 @@ +name = "Tencent: Hunyuan A13B Instruct" +release_date = "2025-07-08" +last_updated = "2025-07-08" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.14 +output = 0.5700000000000001 diff --git a/providers/openrouter/models/thedrummer/anubis-70b-v1.1.toml b/providers/openrouter/models/thedrummer/anubis-70b-v1.1.toml new file mode 100644 index 000000000..827775631 --- /dev/null +++ b/providers/openrouter/models/thedrummer/anubis-70b-v1.1.toml @@ -0,0 +1,22 @@ +name = "TheDrummer: Anubis 70B V1.1" +release_date = "2025-06-29" +last_updated = "2025-06-29" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.75 +output = 1.0 diff --git a/providers/openrouter/models/thedrummer/cydonia-24b-v4.1.toml b/providers/openrouter/models/thedrummer/cydonia-24b-v4.1.toml new file mode 100644 index 000000000..d2f019aee --- /dev/null +++ b/providers/openrouter/models/thedrummer/cydonia-24b-v4.1.toml @@ -0,0 +1,22 @@ +name = "TheDrummer: Cydonia 24B V4.1" +release_date = "2025-09-26" +last_updated = "2025-09-26" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.3 +output = 0.5 diff --git a/providers/openrouter/models/thedrummer/rocinante-12b.toml b/providers/openrouter/models/thedrummer/rocinante-12b.toml new file mode 100644 index 000000000..89f6e88d0 --- /dev/null +++ b/providers/openrouter/models/thedrummer/rocinante-12b.toml @@ -0,0 +1,22 @@ +name = "TheDrummer: Rocinante 12B" +release_date = "2024-09-29" +last_updated = "2024-09-29" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.16999999999999998 +output = 0.43 diff --git a/providers/openrouter/models/thedrummer/skyfall-36b-v2.toml b/providers/openrouter/models/thedrummer/skyfall-36b-v2.toml new file mode 100644 index 000000000..6385593e5 --- /dev/null +++ b/providers/openrouter/models/thedrummer/skyfall-36b-v2.toml @@ -0,0 +1,22 @@ +name = "TheDrummer: Skyfall 36B V2" +release_date = "2025-03-10" +last_updated = "2025-03-10" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 32768 + +[cost] +input = 0.55 +output = 0.7999999999999999 diff --git a/providers/openrouter/models/thedrummer/unslopnemo-12b.toml b/providers/openrouter/models/thedrummer/unslopnemo-12b.toml new file mode 100644 index 000000000..a5ee497b1 --- /dev/null +++ b/providers/openrouter/models/thedrummer/unslopnemo-12b.toml @@ -0,0 +1,22 @@ +name = "TheDrummer: UnslopNemo 12B" +release_date = "2024-11-08" +last_updated = "2024-11-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 32768 +input = 32768 +output = 4096 + +[cost] +input = 0.39999999999999997 +output = 0.39999999999999997 diff --git a/providers/openrouter/models/thudm/glm-4.1v-9b-thinking.toml b/providers/openrouter/models/thudm/glm-4.1v-9b-thinking.toml new file mode 100644 index 000000000..121d0e3d4 --- /dev/null +++ b/providers/openrouter/models/thudm/glm-4.1v-9b-thinking.toml @@ -0,0 +1,22 @@ +name = "THUDM: GLM 4.1V 9B Thinking" +release_date = "2025-07-11" +last_updated = "2025-07-11" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 8000 + +[cost] +input = 0.028 +output = 0.1104 diff --git a/providers/openrouter/models/tngtech/deepseek-r1t-chimera.toml b/providers/openrouter/models/tngtech/deepseek-r1t-chimera.toml new file mode 100644 index 000000000..9e7be0f21 --- /dev/null +++ b/providers/openrouter/models/tngtech/deepseek-r1t-chimera.toml @@ -0,0 +1,22 @@ +name = "TNG: DeepSeek R1T Chimera" +release_date = "2025-04-27" +last_updated = "2025-04-27" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/tngtech/deepseek-r1t-chimera:free.toml b/providers/openrouter/models/tngtech/deepseek-r1t-chimera:free.toml new file mode 100644 index 000000000..94eb92793 --- /dev/null +++ b/providers/openrouter/models/tngtech/deepseek-r1t-chimera:free.toml @@ -0,0 +1,22 @@ +name = "TNG: DeepSeek R1T Chimera (free)" +release_date = "2025-04-27" +last_updated = "2025-04-27" +open_weights = false +tool_call = false +structured_output = false +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 4096 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/tngtech/deepseek-r1t2-chimera.toml b/providers/openrouter/models/tngtech/deepseek-r1t2-chimera.toml new file mode 100644 index 000000000..ae9afc974 --- /dev/null +++ b/providers/openrouter/models/tngtech/deepseek-r1t2-chimera.toml @@ -0,0 +1,22 @@ +name = "TNG: DeepSeek R1T2 Chimera" +release_date = "2025-07-08" +last_updated = "2025-07-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/tngtech/tng-r1t-chimera.toml b/providers/openrouter/models/tngtech/tng-r1t-chimera.toml new file mode 100644 index 000000000..45c43b421 --- /dev/null +++ b/providers/openrouter/models/tngtech/tng-r1t-chimera.toml @@ -0,0 +1,22 @@ +name = "TNG: R1T Chimera" +release_date = "2025-11-26" +last_updated = "2025-11-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.3 +output = 1.2 diff --git a/providers/openrouter/models/tngtech/tng-r1t-chimera:free.toml b/providers/openrouter/models/tngtech/tng-r1t-chimera:free.toml new file mode 100644 index 000000000..bd1eecb3e --- /dev/null +++ b/providers/openrouter/models/tngtech/tng-r1t-chimera:free.toml @@ -0,0 +1,22 @@ +name = "TNG: R1T Chimera (free)" +release_date = "2025-11-26" +last_updated = "2025-11-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 163840 +input = 163840 +output = 163840 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/undi95/remm-slerp-l2-13b.toml b/providers/openrouter/models/undi95/remm-slerp-l2-13b.toml new file mode 100644 index 000000000..6ed759ba4 --- /dev/null +++ b/providers/openrouter/models/undi95/remm-slerp-l2-13b.toml @@ -0,0 +1,22 @@ +name = "ReMM SLERP 13B" +release_date = "2023-07-21" +last_updated = "2023-07-21" +open_weights = false +tool_call = false +structured_output = true +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 6144 +input = 6144 +output = 4096 + +[cost] +input = 0.44999999999999996 +output = 0.65 diff --git a/providers/openrouter/models/x-ai/grok-4-fast.toml b/providers/openrouter/models/x-ai/grok-4-fast.toml new file mode 100644 index 000000000..3785dabd3 --- /dev/null +++ b/providers/openrouter/models/x-ai/grok-4-fast.toml @@ -0,0 +1,22 @@ +name = "xAI: Grok 4 Fast" +release_date = "2025-09-18" +last_updated = "2025-09-18" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 2000000 +input = 2000000 +output = 30000 + +[cost] +input = 0.19999999999999998 +output = 0.5 diff --git a/providers/openrouter/models/x-ai/grok-4.1-fast.toml b/providers/openrouter/models/x-ai/grok-4.1-fast.toml new file mode 100644 index 000000000..a1deecb6d --- /dev/null +++ b/providers/openrouter/models/x-ai/grok-4.1-fast.toml @@ -0,0 +1,22 @@ +name = "xAI: Grok 4.1 Fast" +release_date = "2025-11-19" +last_updated = "2025-11-19" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 2000000 +input = 2000000 +output = 30000 + +[cost] +input = 0.19999999999999998 +output = 0.5 diff --git a/providers/openrouter/models/x-ai/grok-code-fast-1.toml b/providers/openrouter/models/x-ai/grok-code-fast-1.toml new file mode 100644 index 000000000..3351d0913 --- /dev/null +++ b/providers/openrouter/models/x-ai/grok-code-fast-1.toml @@ -0,0 +1,22 @@ +name = "xAI: Grok Code Fast 1" +release_date = "2025-08-26" +last_updated = "2025-08-26" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 256000 +input = 256000 +output = 10000 + +[cost] +input = 0.19999999999999998 +output = 1.5 diff --git a/providers/openrouter/models/z-ai/glm-4-32b.toml b/providers/openrouter/models/z-ai/glm-4-32b.toml new file mode 100644 index 000000000..fcd3b6d15 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4-32b.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4 32B " +release_date = "2025-07-24" +last_updated = "2025-07-24" +open_weights = false +tool_call = true +structured_output = false +temperature = true +reasoning = false +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 128000 +input = 128000 +output = 4096 + +[cost] +input = 0.09999999999999999 +output = 0.09999999999999999 diff --git a/providers/openrouter/models/z-ai/glm-4.5-air.toml b/providers/openrouter/models/z-ai/glm-4.5-air.toml new file mode 100644 index 000000000..34296d394 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.5-air.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.5 Air" +release_date = "2025-07-25" +last_updated = "2025-07-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 98304 + +[cost] +input = 0.10400000000000001 +output = 0.6799999999999999 diff --git a/providers/openrouter/models/z-ai/glm-4.5-air:free.toml b/providers/openrouter/models/z-ai/glm-4.5-air:free.toml new file mode 100644 index 000000000..ac8d8c36e --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.5-air:free.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.5 Air (free)" +release_date = "2025-07-25" +last_updated = "2025-07-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.0 +output = 0.0 diff --git a/providers/openrouter/models/z-ai/glm-4.5.toml b/providers/openrouter/models/z-ai/glm-4.5.toml new file mode 100644 index 000000000..a3a575f05 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.5.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.5" +release_date = "2025-07-25" +last_updated = "2025-07-25" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 131072 + +[cost] +input = 0.35 +output = 1.55 diff --git a/providers/openrouter/models/z-ai/glm-4.5v.toml b/providers/openrouter/models/z-ai/glm-4.5v.toml new file mode 100644 index 000000000..2879cfbe3 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.5v.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.5V" +release_date = "2025-08-11" +last_updated = "2025-08-11" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "text", "image",] +output = [ "text",] + +[limit] +context = 65536 +input = 65536 +output = 16384 + +[cost] +input = 0.48 +output = 1.44 diff --git a/providers/openrouter/models/z-ai/glm-4.6.toml b/providers/openrouter/models/z-ai/glm-4.6.toml new file mode 100644 index 000000000..9b2f4927e --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.6.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.6" +release_date = "2025-09-30" +last_updated = "2025-09-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 202752 +input = 202752 +output = 202752 + +[cost] +input = 0.39999999999999997 +output = 1.75 diff --git a/providers/openrouter/models/z-ai/glm-4.6:exacto.toml b/providers/openrouter/models/z-ai/glm-4.6:exacto.toml new file mode 100644 index 000000000..f79931454 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.6:exacto.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.6 (exacto)" +release_date = "2025-09-30" +last_updated = "2025-09-30" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = false + +[modalities] +input = [ "text",] +output = [ "text",] + +[limit] +context = 202752 +input = 202752 +output = 4096 + +[cost] +input = 0.43 +output = 1.75 diff --git a/providers/openrouter/models/z-ai/glm-4.6v.toml b/providers/openrouter/models/z-ai/glm-4.6v.toml new file mode 100644 index 000000000..c525f9b52 --- /dev/null +++ b/providers/openrouter/models/z-ai/glm-4.6v.toml @@ -0,0 +1,22 @@ +name = "Z.AI: GLM 4.6V" +release_date = "2025-12-08" +last_updated = "2025-12-08" +open_weights = false +tool_call = true +structured_output = true +temperature = true +reasoning = true +attachment = true + +[modalities] +input = [ "image", "text", "video",] +output = [ "text",] + +[limit] +context = 131072 +input = 131072 +output = 24000 + +[cost] +input = 0.3 +output = 0.8999999999999999