diff --git a/electron/ConfigHelper.ts b/electron/ConfigHelper.ts index 6d1d2db..b43785e 100644 --- a/electron/ConfigHelper.ts +++ b/electron/ConfigHelper.ts @@ -7,7 +7,8 @@ import { OpenAI } from "openai" interface Config { apiKey: string; - apiProvider: "openai" | "gemini" | "anthropic"; // Added provider selection + apiProvider: "openai" | "gemini" | "anthropic" | "openai-compatible"; // Added provider selection + baseUrl?: string; // Optional custom API URL extractionModel: string; solutionModel: string; debuggingModel: string; @@ -20,6 +21,7 @@ export class ConfigHelper extends EventEmitter { private defaultConfig: Config = { apiKey: "", apiProvider: "gemini", // Default to Gemini + baseUrl: "", // Optional custom API URL extractionModel: "gemini-2.0-flash", // Default to Flash for faster responses solutionModel: "gemini-2.0-flash", debuggingModel: "gemini-2.0-flash", @@ -58,7 +60,7 @@ export class ConfigHelper extends EventEmitter { /** * Validate and sanitize model selection to ensure only allowed models are used */ - private sanitizeModelSelection(model: string, provider: "openai" | "gemini" | "anthropic"): string { + private sanitizeModelSelection(model: string, provider: "openai" | "gemini" | "anthropic" | "openai-compatible"): string { if (provider === "openai") { // Only allow gpt-4o and gpt-4o-mini for OpenAI const allowedModels = ['gpt-4o', 'gpt-4o-mini']; @@ -95,7 +97,7 @@ export class ConfigHelper extends EventEmitter { const config = JSON.parse(configData); // Ensure apiProvider is a valid value - if (config.apiProvider !== "openai" && config.apiProvider !== "gemini" && config.apiProvider !== "anthropic") { + if (config.apiProvider !== "openai" && config.apiProvider !== "gemini" && config.apiProvider !== "anthropic" && config.apiProvider !== "openai-compatible") { config.apiProvider = "gemini"; // Default to Gemini if invalid } @@ -175,6 +177,10 @@ export class ConfigHelper extends EventEmitter { updates.solutionModel = "gpt-4o"; updates.debuggingModel = "gpt-4o"; } else if (updates.apiProvider === "anthropic") { + updates.extractionModel = "Claude#claude-3-7-sonnet-20250219"; + updates.solutionModel = "Claude#claude-3-7-sonnet-20250219"; + updates.debuggingModel = "Claude#claude-3-7-sonnet-20250219"; + } else if (updates.apiProvider === "openai-compatible") { updates.extractionModel = "claude-3-7-sonnet-20250219"; updates.solutionModel = "claude-3-7-sonnet-20250219"; updates.debuggingModel = "claude-3-7-sonnet-20250219"; @@ -311,6 +317,8 @@ export class ConfigHelper extends EventEmitter { return this.testGeminiKey(apiKey); } else if (provider === "anthropic") { return this.testAnthropicKey(apiKey); + } else if (provider === "openai-compatible") { + return this.testOpenAIKey(apiKey); } return { valid: false, error: "Unknown API provider" }; @@ -396,5 +404,6 @@ export class ConfigHelper extends EventEmitter { } } + // Export a singleton instance export const configHelper = new ConfigHelper(); diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 0dcd26f..e906759 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -74,12 +74,13 @@ export class ProcessingHelper { try { const config = configHelper.loadConfig(); - if (config.apiProvider === "openai") { + if (config.apiProvider === "openai" || config.apiProvider === "openai-compatible") { if (config.apiKey) { this.openaiClient = new OpenAI({ apiKey: config.apiKey, timeout: 60000, // 60 second timeout - maxRetries: 2 // Retry up to 2 times + maxRetries: 2, // Retry up to 2 times + baseURL: config.apiProvider === "openai-compatible" ? config.baseUrl : undefined, }); this.geminiApiKey = null; this.anthropicClient = null; @@ -460,7 +461,7 @@ export class ProcessingHelper { let problemInfo; - if (config.apiProvider === "openai") { + if (config.apiProvider === "openai" || config.apiProvider === "openai-compatible") { // Verify OpenAI client if (!this.openaiClient) { this.initializeAIClient(); // Try to reinitialize @@ -506,8 +507,11 @@ export class ProcessingHelper { try { const responseText = extractionResponse.choices[0].message.content; // Handle when OpenAI might wrap the JSON in markdown code blocks - const jsonText = responseText.replace(/```json|```/g, '').trim(); - problemInfo = JSON.parse(jsonText); + const jsonRegex = /```json\n([\s\S]*?)\n```/; + const match = responseText.match(jsonRegex); + if (match && match[1]) { + problemInfo = JSON.parse(match[1]); + } } catch (error) { console.error("Error parsing OpenAI response:", error); return { @@ -764,7 +768,7 @@ Your solution should be efficient, well-commented, and handle edge cases. let responseContent; - if (config.apiProvider === "openai") { + if (config.apiProvider === "openai" || config.apiProvider === "openai-compatible") { // OpenAI processing if (!this.openaiClient) { return { diff --git a/src/components/Settings/SettingsDialog.tsx b/src/components/Settings/SettingsDialog.tsx index 463ea12..b7692c9 100644 --- a/src/components/Settings/SettingsDialog.tsx +++ b/src/components/Settings/SettingsDialog.tsx @@ -13,7 +13,7 @@ import { Button } from "../ui/button"; import { Settings } from "lucide-react"; import { useToast } from "../../contexts/toast"; -type APIProvider = "openai" | "gemini" | "anthropic"; +type APIProvider = "openai" | "gemini" | "anthropic" | "openai-compatible"; type AIModel = { id: string; @@ -181,6 +181,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia const [open, setOpen] = useState(externalOpen || false); const [apiKey, setApiKey] = useState(""); const [apiProvider, setApiProvider] = useState("openai"); + const [baseUrl, setBaseUrl] = useState(""); const [extractionModel, setExtractionModel] = useState("gpt-4o"); const [solutionModel, setSolutionModel] = useState("gpt-4o"); const [debuggingModel, setDebuggingModel] = useState("gpt-4o"); @@ -210,6 +211,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia interface Config { apiKey?: string; apiProvider?: APIProvider; + baseUrl?: string; extractionModel?: string; solutionModel?: string; debuggingModel?: string; @@ -220,6 +222,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia .then((config: Config) => { setApiKey(config.apiKey || ""); setApiProvider(config.apiProvider || "openai"); + setBaseUrl(config.baseUrl || ""); setExtractionModel(config.extractionModel || "gpt-4o"); setSolutionModel(config.solutionModel || "gpt-4o"); setDebuggingModel(config.debuggingModel || "gpt-4o"); @@ -251,6 +254,11 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia setExtractionModel("claude-3-7-sonnet-20250219"); setSolutionModel("claude-3-7-sonnet-20250219"); setDebuggingModel("claude-3-7-sonnet-20250219"); + } else if (provider === "openai-compatible") { + // For OpenAI-compatible APIs, we'll keep the current models or set defaults + if (!extractionModel) setExtractionModel("gpt-3.5-turbo"); + if (!solutionModel) setSolutionModel("gpt-3.5-turbo"); + if (!debuggingModel) setDebuggingModel("gpt-3.5-turbo"); } }; @@ -260,6 +268,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia const result = await window.electronAPI.updateConfig({ apiKey, apiProvider, + baseUrl: apiProvider === "openai-compatible" ? baseUrl : "", extractionModel, solutionModel, debuggingModel, @@ -325,9 +334,9 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia {/* API Provider Selection */}
-
+
+
handleProviderChange("openai-compatible")} + > +
+
+
+

OpenAI Compatible

+

Open Router, etc.

+
+
+
setApiKey(e.target.value)} placeholder={ - apiProvider === "openai" ? "sk-..." : + apiProvider === "openai" ? "sk-..." : apiProvider === "gemini" ? "Enter your Gemini API key" : - "sk-ant-..." + apiProvider === "anthropic" ? "sk-ant-..." : + "Enter your API key" } className="bg-black/50 border-white/10 text-white" /> @@ -413,11 +444,36 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia

)}

- Your API key is stored locally and never sent to any server except {apiProvider === "openai" ? "OpenAI" : "Google"} + Your API key is stored locally and never sent to any server except { + apiProvider === "openai" ? "OpenAI" : + apiProvider === "gemini" ? "Google" : + apiProvider === "anthropic" ? "Anthropic" : + "the API provider you specified" + }

+ + {apiProvider === "openai-compatible" && ( +
+ + setBaseUrl(e.target.value)} + placeholder="https://api.openrouter.ai/api/v1" + className="bg-black/50 border-white/10 text-white" + /> +

+ Enter the base URL for the OpenAI-compatible API (e.g., Open Router) +

+
+ )} +

Don't have an API key?

- {apiProvider === "openai" ? ( + {apiProvider === "openai" && ( <>

1. Create an account at +

+

2. Go to the section +

+

3. Create a new API key and paste it here

+ + )}
@@ -512,6 +583,18 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia apiProvider === "openai" ? category.openaiModels : apiProvider === "gemini" ? category.geminiModels : category.anthropicModels; + + // Determine which state to use based on category key + const currentValue = + category.key === 'extractionModel' ? extractionModel : + category.key === 'solutionModel' ? solutionModel : + debuggingModel; + + // Determine which setter function to use + const setValue = + category.key === 'extractionModel' ? setExtractionModel : + category.key === 'solutionModel' ? setSolutionModel : + setDebuggingModel; return (
@@ -521,19 +604,18 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia

{category.description}

- {models.map((m) => { - // Determine which state to use based on category key - const currentValue = - category.key === 'extractionModel' ? extractionModel : - category.key === 'solutionModel' ? solutionModel : - debuggingModel; - - // Determine which setter function to use - const setValue = - category.key === 'extractionModel' ? setExtractionModel : - category.key === 'solutionModel' ? setSolutionModel : - setDebuggingModel; - + {apiProvider === "openai-compatible" && ( + setValue(e.target.value)} + placeholder="gpt-3.5-turbo" + className="bg-black/50 border-white/10 text-white" + /> + )} + + {apiProvider !== "openai-compatible" && models.map((m) => { return (