Skip to content

Commit f0da703

Browse files
committed
feat: Add Mistral AI as a free tier provider
- Add mistralai==1.2.5 to requirements - Implement _generate_with_mistral function - Add MISTRAL to ModelProvider enum and tier configuration - Mistral available to free tier users alongside Groq
1 parent ec839a2 commit f0da703

File tree

2 files changed

+58
-3
lines changed

2 files changed

+58
-3
lines changed

backend/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ stripe==14.1.0
3434
# AI Providers (Multi-Model Support)
3535
openai==2.11.0
3636
anthropic==0.75.0
37+
mistralai==1.2.5
3738

3839
# Production server
3940
gunicorn==21.2.0

services/ai_service.py

Lines changed: 57 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,13 @@
4545
Anthropic = None # type: ignore
4646
ANTHROPIC_AVAILABLE = False
4747

48+
try:
49+
from mistralai import Mistral
50+
MISTRAL_AVAILABLE = True
51+
except ImportError:
52+
Mistral = None # type: ignore
53+
MISTRAL_AVAILABLE = False
54+
4855
logger = structlog.get_logger(__name__)
4956

5057
# =============================================================================
@@ -62,12 +69,14 @@
6269
GROQ_API_KEY = os.getenv('GROQ_API_KEY', '')
6370
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY', '')
6471
ANTHROPIC_API_KEY = os.getenv('ANTHROPIC_API_KEY', '')
72+
MISTRAL_API_KEY = os.getenv('MISTRAL_API_KEY', '')
6573
GITHUB_USERNAME = os.getenv('GITHUB_USERNAME', 'cliff-de-tech')
6674

6775
# Model configurations
6876
GROQ_MODEL = "llama-3.3-70b-versatile"
6977
OPENAI_MODEL = "gpt-4o"
7078
ANTHROPIC_MODEL = "claude-3-5-sonnet-20241022"
79+
MISTRAL_MODEL = "mistral-large-latest"
7180

7281

7382
# =============================================================================
@@ -77,6 +86,7 @@
7786
class ModelProvider(str, Enum):
7887
"""Available AI model providers."""
7988
GROQ = "groq"
89+
MISTRAL = "mistral"
8090
OPENAI = "openai"
8191
ANTHROPIC = "anthropic"
8292

@@ -90,9 +100,9 @@ class SubscriptionTier(str, Enum):
90100

91101
# Providers available to each tier
92102
TIER_ALLOWED_PROVIDERS = {
93-
SubscriptionTier.FREE: [ModelProvider.GROQ],
94-
SubscriptionTier.PRO: [ModelProvider.GROQ, ModelProvider.OPENAI, ModelProvider.ANTHROPIC],
95-
SubscriptionTier.ENTERPRISE: [ModelProvider.GROQ, ModelProvider.OPENAI, ModelProvider.ANTHROPIC],
103+
SubscriptionTier.FREE: [ModelProvider.GROQ, ModelProvider.MISTRAL],
104+
SubscriptionTier.PRO: [ModelProvider.GROQ, ModelProvider.MISTRAL, ModelProvider.OPENAI, ModelProvider.ANTHROPIC],
105+
SubscriptionTier.ENTERPRISE: [ModelProvider.GROQ, ModelProvider.MISTRAL, ModelProvider.OPENAI, ModelProvider.ANTHROPIC],
96106
}
97107

98108

@@ -724,6 +734,45 @@ def _generate_with_anthropic(
724734
return None
725735

726736

737+
def _generate_with_mistral(
738+
system_prompt: str,
739+
user_prompt: str,
740+
api_key: Optional[str] = None,
741+
) -> Optional[str]:
742+
"""
743+
Generate post using Mistral AI.
744+
745+
This is a FREE tier provider - good quality and free.
746+
"""
747+
if not MISTRAL_AVAILABLE:
748+
logger.error("Mistral package not installed")
749+
return None
750+
751+
key = api_key or MISTRAL_API_KEY
752+
if not key:
753+
logger.warning("No Mistral API key available")
754+
return None
755+
756+
try:
757+
client = Mistral(api_key=key)
758+
759+
response = client.chat.complete(
760+
model=MISTRAL_MODEL,
761+
messages=[
762+
{"role": "system", "content": system_prompt},
763+
{"role": "user", "content": user_prompt},
764+
],
765+
temperature=0.95,
766+
max_tokens=600,
767+
)
768+
769+
return response.choices[0].message.content
770+
771+
except Exception as e:
772+
logger.error("mistral_generation_failed", error=str(e))
773+
return None
774+
775+
727776
# =============================================================================
728777
# TIER ENFORCEMENT & ROUTING
729778
# =============================================================================
@@ -796,6 +845,7 @@ async def generate_linkedin_post(
796845
groq_api_key: Optional[str] = None,
797846
openai_api_key: Optional[str] = None,
798847
anthropic_api_key: Optional[str] = None,
848+
mistral_api_key: Optional[str] = None,
799849
persona_context: Optional[str] = None,
800850
) -> Optional[GenerationResult]:
801851
"""
@@ -857,6 +907,10 @@ async def generate_linkedin_post(
857907
content = _generate_with_groq(system_prompt, user_prompt, groq_api_key)
858908
model_used = GROQ_MODEL
859909

910+
elif actual_provider == ModelProvider.MISTRAL:
911+
content = _generate_with_mistral(system_prompt, user_prompt, mistral_api_key)
912+
model_used = MISTRAL_MODEL
913+
860914
elif actual_provider == ModelProvider.OPENAI:
861915
content = _generate_with_openai(system_prompt, user_prompt, openai_api_key)
862916
model_used = OPENAI_MODEL

0 commit comments

Comments
 (0)