From 54886cf081245787c68e8f73fff576552ba88ebe Mon Sep 17 00:00:00 2001 From: Devashish Nayak Date: Mon, 7 Apr 2025 18:14:05 -0500 Subject: [PATCH 1/4] Added Claude AI --- electron/ConfigHelper.ts | 74 ++++++- electron/ProcessingHelper.ts | 216 ++++++++++++++++++++- package.json | 11 +- src/components/Settings/SettingsDialog.tsx | 109 ++++++++++- 4 files changed, 385 insertions(+), 25 deletions(-) diff --git a/electron/ConfigHelper.ts b/electron/ConfigHelper.ts index 5e02af2..8753742 100644 --- a/electron/ConfigHelper.ts +++ b/electron/ConfigHelper.ts @@ -7,7 +7,7 @@ import { OpenAI } from "openai" interface Config { apiKey: string; - apiProvider: "openai" | "gemini"; // Added provider selection + apiProvider: "openai" | "gemini" | "anthropic"; // Added provider selection extractionModel: string; solutionModel: string; debuggingModel: string; @@ -58,7 +58,7 @@ export class ConfigHelper extends EventEmitter { /** * Validate and sanitize model selection to ensure only allowed models are used */ - private sanitizeModelSelection(model: string, provider: "openai" | "gemini"): string { + private sanitizeModelSelection(model: string, provider: "openai" | "gemini" | "anthropic"): string { if (provider === "openai") { // Only allow gpt-4o and gpt-4o-mini for OpenAI const allowedModels = ['gpt-4o', 'gpt-4o-mini']; @@ -67,7 +67,7 @@ export class ConfigHelper extends EventEmitter { return 'gpt-4o'; } return model; - } else { + } else if (provider === "gemini") { // Only allow gemini-1.5-pro and gemini-2.0-flash for Gemini const allowedModels = ['gemini-1.5-pro', 'gemini-2.0-flash']; if (!allowedModels.includes(model)) { @@ -75,7 +75,17 @@ export class ConfigHelper extends EventEmitter { return 'gemini-2.0-flash'; // Changed default to flash } return model; + } else if (provider === "anthropic") { + // Only allow Claude models + const allowedModels = ['claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307']; + if (!allowedModels.includes(model)) { + console.warn(`Invalid Anthropic model specified: ${model}. Using default model: claude-3-opus-20240229`); + return 'claude-3-opus-20240229'; + } + return model; } + // Default fallback + return model; } public loadConfig(): Config { @@ -85,7 +95,7 @@ export class ConfigHelper extends EventEmitter { const config = JSON.parse(configData); // Ensure apiProvider is a valid value - if (config.apiProvider !== "openai" && config.apiProvider !== "gemini") { + if (config.apiProvider !== "openai" && config.apiProvider !== "gemini" && config.apiProvider !== "anthropic") { config.apiProvider = "gemini"; // Default to Gemini if invalid } @@ -146,6 +156,9 @@ export class ConfigHelper extends EventEmitter { if (updates.apiKey.trim().startsWith('sk-')) { provider = "openai"; console.log("Auto-detected OpenAI API key format"); + } else if (updates.apiKey.trim().startsWith('sk-ant-')) { + provider = "anthropic"; + console.log("Auto-detected Anthropic API key format"); } else { provider = "gemini"; console.log("Using Gemini API key format (default)"); @@ -161,6 +174,10 @@ export class ConfigHelper extends EventEmitter { updates.extractionModel = "gpt-4o"; updates.solutionModel = "gpt-4o"; updates.debuggingModel = "gpt-4o"; + } else if (updates.apiProvider === "anthropic") { + updates.extractionModel = "claude-3-opus-20240229"; + updates.solutionModel = "claude-3-opus-20240229"; + updates.debuggingModel = "claude-3-opus-20240229"; } else { updates.extractionModel = "gemini-2.0-flash"; updates.solutionModel = "gemini-2.0-flash"; @@ -208,11 +225,15 @@ export class ConfigHelper extends EventEmitter { /** * Validate the API key format */ - public isValidApiKeyFormat(apiKey: string, provider?: "openai" | "gemini"): boolean { + public isValidApiKeyFormat(apiKey: string, provider?: "openai" | "gemini" | "anthropic" ): boolean { // If provider is not specified, attempt to auto-detect if (!provider) { if (apiKey.trim().startsWith('sk-')) { - provider = "openai"; + if (apiKey.trim().startsWith('sk-ant-')) { + provider = "anthropic"; + } else { + provider = "openai"; + } } else { provider = "gemini"; } @@ -224,6 +245,9 @@ export class ConfigHelper extends EventEmitter { } else if (provider === "gemini") { // Basic format validation for Gemini API keys (usually alphanumeric with no specific prefix) return apiKey.trim().length >= 10; // Assuming Gemini keys are at least 10 chars + } else if (provider === "anthropic") { + // Basic format validation for Anthropic API keys + return /^sk-ant-[a-zA-Z0-9]{32,}$/.test(apiKey.trim()); } return false; @@ -264,12 +288,17 @@ export class ConfigHelper extends EventEmitter { /** * Test API key with the selected provider */ - public async testApiKey(apiKey: string, provider?: "openai" | "gemini"): Promise<{valid: boolean, error?: string}> { + public async testApiKey(apiKey: string, provider?: "openai" | "gemini" | "anthropic"): Promise<{valid: boolean, error?: string}> { // Auto-detect provider based on key format if not specified if (!provider) { if (apiKey.trim().startsWith('sk-')) { - provider = "openai"; - console.log("Auto-detected OpenAI API key format for testing"); + if (apiKey.trim().startsWith('sk-ant-')) { + provider = "anthropic"; + console.log("Auto-detected Anthropic API key format for testing"); + } else { + provider = "openai"; + console.log("Auto-detected OpenAI API key format for testing"); + } } else { provider = "gemini"; console.log("Using Gemini API key format for testing (default)"); @@ -280,6 +309,8 @@ export class ConfigHelper extends EventEmitter { return this.testOpenAIKey(apiKey); } else if (provider === "gemini") { return this.testGeminiKey(apiKey); + } else if (provider === "anthropic") { + return this.testAnthropicKey(apiKey); } return { valid: false, error: "Unknown API provider" }; @@ -338,6 +369,31 @@ export class ConfigHelper extends EventEmitter { return { valid: false, error: errorMessage }; } } + + /** + * Test Anthropic API key + * Note: This is a simplified implementation since we don't have the actual Anthropic client + */ + private async testAnthropicKey(apiKey: string): Promise<{valid: boolean, error?: string}> { + try { + // For now, we'll just do a basic check to ensure the key exists and has valid format + // In production, you would connect to the Anthropic API and validate the key + if (apiKey && /^sk-ant-[a-zA-Z0-9]{32,}$/.test(apiKey.trim())) { + // Here you would actually validate the key with an Anthropic API call + return { valid: true }; + } + return { valid: false, error: 'Invalid Anthropic API key format.' }; + } catch (error: any) { + console.error('Anthropic API key test failed:', error); + let errorMessage = 'Unknown error validating Anthropic API key'; + + if (error.message) { + errorMessage = `Error: ${error.message}`; + } + + return { valid: false, error: errorMessage }; + } + } } // Export a singleton instance diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index bfc5568..025f940 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -7,6 +7,7 @@ import * as axios from "axios" import { app, BrowserWindow, dialog } from "electron" import { OpenAI } from "openai" import { configHelper } from "./ConfigHelper" +import Anthropic from '@anthropic-ai/sdk'; // Interface for Gemini API requests interface GeminiMessage { @@ -30,12 +31,24 @@ interface GeminiResponse { finishReason: string; }>; } - +interface AnthropicMessage { + role: 'user' | 'assistant'; + content: Array<{ + type: 'text' | 'image'; + text?: string; + source?: { + type: 'base64'; + media_type: string; + data: string; + }; + }>; +} export class ProcessingHelper { private deps: IProcessingHelperDeps private screenshotHelper: ScreenshotHelper private openaiClient: OpenAI | null = null private geminiApiKey: string | null = null + private anthropicClient: Anthropic | null = null // AbortControllers for API requests private currentProcessingAbortController: AbortController | null = null @@ -75,7 +88,7 @@ export class ProcessingHelper { this.geminiApiKey = null; console.warn("No API key available, OpenAI client not initialized"); } - } else { + } else if (config.apiProvider === "gemini"){ // Gemini client initialization this.openaiClient = null; if (config.apiKey) { @@ -85,11 +98,27 @@ export class ProcessingHelper { this.geminiApiKey = null; console.warn("No API key available, Gemini client not initialized"); } + } else if (config.apiProvider === "anthropic") { + // Reset other clients + this.openaiClient = null; + this.geminiApiKey = null; + if (config.apiKey) { + this.anthropicClient = new Anthropic({ + apiKey: config.apiKey, + timeout: 60000, + maxRetries: 2 + }); + console.log("Anthropic client initialized successfully"); + } else { + this.anthropicClient = null; + console.warn("No API key available, Anthropic client not initialized"); + } } } catch (error) { console.error("Failed to initialize AI client:", error); this.openaiClient = null; this.geminiApiKey = null; + this.anthropicClient = null; } } @@ -187,6 +216,17 @@ export class ProcessingHelper { ); return; } + } else if (config.apiProvider === "anthropic" && !this.anthropicClient) { + // Add check for Anthropic client + this.initializeAIClient(); + + if (!this.anthropicClient) { + console.error("Anthropic client not initialized"); + mainWindow.webContents.send( + this.deps.PROCESSING_EVENTS.API_KEY_INVALID + ); + return; + } } const view = this.deps.getView() @@ -502,7 +542,7 @@ export class ProcessingHelper { error: "Failed to parse problem information. Please try again or use clearer screenshots." }; } - } else { + } else if (config.apiProvider === "gemini") { // Use Gemini API if (!this.geminiApiKey) { return { @@ -561,6 +601,52 @@ export class ProcessingHelper { error: "Failed to process with Gemini API. Please check your API key or try again later." }; } + } else if (config.apiProvider === "anthropic") { + if (!this.anthropicClient) { + return { + success: false, + error: "Anthropic API key not configured. Please check your settings." + }; + } + + try { + const messages = [ + { + role: "user" as const, + content: [ + { + type: "text" as const, + text: `Extract the coding problem details from these screenshots. Return in JSON format with these fields: problem_statement, constraints, example_input, example_output. Preferred coding language is ${language}.` + }, + ...imageDataList.map(data => ({ + type: "image" as const, + source: { + type: "base64" as const, + media_type: "image/png" as const, + data: data + } + })) + ] + } + ]; + + const response = await this.anthropicClient.messages.create({ + model: config.extractionModel || "claude-3-opus-20240229", + max_tokens: 4000, + messages: messages, + temperature: 0.2 + }); + + const responseText = (response.content[0] as { type: 'text', text: string }).text; + const jsonText = responseText.replace(/```json|```/g, '').trim(); + problemInfo = JSON.parse(jsonText); + } catch (error) { + console.error("Error using Anthropic API:", error); + return { + success: false, + error: "Failed to process with Anthropic API. Please check your API key or try again later." + }; + } } // Update the user on progress @@ -712,7 +798,7 @@ Your solution should be efficient, well-commented, and handle edge cases. }); responseContent = solutionResponse.choices[0].message.content; - } else { + } else if (config.apiProvider === "gemini") { // Gemini processing if (!this.geminiApiKey) { return { @@ -761,6 +847,44 @@ Your solution should be efficient, well-commented, and handle edge cases. error: "Failed to generate solution with Gemini API. Please check your API key or try again later." }; } + } else if (config.apiProvider === "anthropic") { + // Anthropic processing + if (!this.anthropicClient) { + return { + success: false, + error: "Anthropic API key not configured. Please check your settings." + }; + } + + try { + const messages = [ + { + role: "user" as const, + content: [ + { + type: "text" as const, + text: `You are an expert coding interview assistant. Provide a clear, optimal solution with detailed explanations for this problem:\n\n${promptText}` + } + ] + } + ]; + + // Send to Anthropic API + const response = await this.anthropicClient.messages.create({ + model: config.solutionModel || "claude-3-opus-20240229", + max_tokens: 4000, + messages: messages, + temperature: 0.2 + }); + + responseContent = (response.content[0] as { type: 'text', text: string }).text; + } catch (error) { + console.error("Error using Anthropic API for solution:", error); + return { + success: false, + error: "Failed to generate solution with Anthropic API. Please check your API key or try again later." + }; + } } // Extract parts from the response @@ -776,13 +900,13 @@ Your solution should be efficient, well-commented, and handle edge cases. // Extract bullet points or numbered items const bulletPoints = thoughtsMatch[1].match(/(?:^|\n)\s*(?:[-*•]|\d+\.)\s*(.*)/g); if (bulletPoints) { - thoughts = bulletPoints.map(point => + thoughts = bulletPoints.map((point: string) => point.replace(/^\s*(?:[-*•]|\d+\.)\s*/, '').trim() ).filter(Boolean); } else { // If no bullet points found, split by newlines and filter empty lines thoughts = thoughtsMatch[1].split('\n') - .map(line => line.trim()) + .map((line: string) => line.trim()) .filter(Boolean); } } @@ -949,7 +1073,7 @@ If you include code examples, use proper markdown code blocks with language spec }); debugContent = debugResponse.choices[0].message.content; - } else { + } else if (config.apiProvider === "gemini") { if (!this.geminiApiKey) { return { success: false, @@ -1030,8 +1154,84 @@ If you include code examples, use proper markdown code blocks with language spec error: "Failed to process debug request with Gemini API. Please check your API key or try again later." }; } + } else if (config.apiProvider === "anthropic") { + if (!this.anthropicClient) { + return { + success: false, + error: "Anthropic API key not configured. Please check your settings." + }; + } + + try { + const debugPrompt = ` +You are a coding interview assistant helping debug and improve solutions. Analyze these screenshots which include either error messages, incorrect outputs, or test cases, and provide detailed debugging help. + +I'm solving this coding problem: "${problemInfo.problem_statement}" in ${language}. I need help with debugging or improving my solution. + +YOUR RESPONSE MUST FOLLOW THIS EXACT STRUCTURE WITH THESE SECTION HEADERS: +### Issues Identified +- List each issue as a bullet point with clear explanation + +### Specific Improvements and Corrections +- List specific code changes needed as bullet points + +### Optimizations +- List any performance optimizations if applicable + +### Explanation of Changes Needed +Here provide a clear explanation of why the changes are needed + +### Key Points +- Summary bullet points of the most important takeaways + +If you include code examples, use proper markdown code blocks with language specification. +`; + + const messages = [ + { + role: "user" as const, + content: [ + { + type: "text" as const, + text: debugPrompt + }, + ...imageDataList.map(data => ({ + type: "image" as const, + source: { + type: "base64" as const, + media_type: "image/png" as const, + data: data + } + })) + ] + } + ]; + + if (mainWindow) { + mainWindow.webContents.send("processing-status", { + message: "Analyzing code and generating debug feedback with Claude...", + progress: 60 + }); + } + + const response = await this.anthropicClient.messages.create({ + model: config.debuggingModel || "claude-3-opus-20240229", + max_tokens: 4000, + messages: messages, + temperature: 0.2 + }); + + debugContent = (response.content[0] as { type: 'text', text: string }).text; + } catch (error) { + console.error("Error using Anthropic API for debugging:", error); + return { + success: false, + error: "Failed to process debug request with Anthropic API. Please check your API key or try again later." + }; + } } + if (mainWindow) { mainWindow.webContents.send("processing-status", { message: "Debug analysis complete", @@ -1057,7 +1257,7 @@ If you include code examples, use proper markdown code blocks with language spec const bulletPoints = formattedDebugContent.match(/(?:^|\n)[ ]*(?:[-*•]|\d+\.)[ ]+([^\n]+)/g); const thoughts = bulletPoints - ? bulletPoints.map(point => point.replace(/^[ ]*(?:[-*•]|\d+\.)[ ]+/, '').trim()).slice(0, 5) + ? bulletPoints.map((point: string) => point.replace(/^[ ]*(?:[-*•]|\d+\.)[ ]+/, '').trim()).slice(0, 5) : ["Debug analysis based on your screenshots"]; const response = { diff --git a/package.json b/package.json index 1f593c8..ae4c60d 100644 --- a/package.json +++ b/package.json @@ -109,11 +109,18 @@ "main": "dist-electron/main.js" } }, - "keywords": ["interview", "coding", "interview prep", "technical interview", "tool"], + "keywords": [ + "interview", + "coding", + "interview prep", + "technical interview", + "tool" + ], "author": "Interview Coder Contributors", "license": "AGPL-3.0-or-later", "description": "An invisible desktop application to help you pass your technical interviews.", "dependencies": { + "@anthropic-ai/sdk": "^0.39.0", "@electron/notarize": "^2.3.0", "@emotion/react": "^11.11.0", "@emotion/styled": "^11.11.0", @@ -187,4 +194,4 @@ "last 1 safari version" ] } -} \ No newline at end of file +} diff --git a/src/components/Settings/SettingsDialog.tsx b/src/components/Settings/SettingsDialog.tsx index 9f0229b..8a0cb5e 100644 --- a/src/components/Settings/SettingsDialog.tsx +++ b/src/components/Settings/SettingsDialog.tsx @@ -13,7 +13,7 @@ import { Button } from "../ui/button"; import { Settings } from "lucide-react"; import { useToast } from "../../contexts/toast"; -type APIProvider = "openai" | "gemini"; +type APIProvider = "openai" | "gemini" | "anthropic"; type AIModel = { id: string; @@ -27,6 +27,7 @@ type ModelCategory = { description: string; openaiModels: AIModel[]; geminiModels: AIModel[]; + anthropicModels: AIModel[]; }; // Define available models for each category @@ -58,6 +59,23 @@ const modelCategories: ModelCategory[] = [ name: "Gemini 2.0 Flash", description: "Faster, more cost-effective option" } + ], + anthropicModels: [ + { + id: "claude-3-opus-20240229", + name: "Claude 3 Opus", + description: "Best overall performance for problem extraction" + }, + { + id: "claude-3-sonnet-20240229", + name: "Claude 3 Sonnet", + description: "Balanced performance and speed" + }, + { + id: "claude-3-haiku-20240307", + name: "Claude 3 Haiku", + description: "Fastest, most cost-effective option" + } ] }, { @@ -87,6 +105,23 @@ const modelCategories: ModelCategory[] = [ name: "Gemini 2.0 Flash", description: "Faster, more cost-effective option" } + ], + anthropicModels: [ + { + id: "claude-3-opus-20240229", + name: "Claude 3 Opus", + description: "Strong overall performance for coding tasks" + }, + { + id: "claude-3-sonnet-20240229", + name: "Claude 3 Sonnet", + description: "Balanced performance and speed" + }, + { + id: "claude-3-haiku-20240307", + name: "Claude 3 Haiku", + description: "Faster, more cost-effective option" + } ] }, { @@ -116,6 +151,23 @@ const modelCategories: ModelCategory[] = [ name: "Gemini 2.0 Flash", description: "Faster, more cost-effective option" } + ], + anthropicModels: [ + { + id: "claude-3-opus-20240229", + name: "Claude 3 Opus", + description: "Best for analyzing code and error messages" + }, + { + id: "claude-3-sonnet-20240229", + name: "Claude 3 Sonnet", + description: "Balanced performance and speed" + }, + { + id: "claude-3-haiku-20240307", + name: "Claude 3 Haiku", + description: "Faster, more cost-effective option" + } ] } ]; @@ -191,10 +243,14 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia setExtractionModel("gpt-4o"); setSolutionModel("gpt-4o"); setDebuggingModel("gpt-4o"); - } else { + } else if (provider === "gemini") { setExtractionModel("gemini-1.5-pro"); setSolutionModel("gemini-1.5-pro"); setDebuggingModel("gemini-1.5-pro"); + } else if (provider === "anthropic") { + setExtractionModel("claude-3-opus-20240229"); + setSolutionModel("claude-3-opus-20240229"); + setDebuggingModel("claude-3-opus-20240229"); } }; @@ -310,19 +366,45 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia +
handleProviderChange("anthropic")} + > +
+
+
+

Claude

+

Claude 3 models

+
+
+
setApiKey(e.target.value)} - placeholder={apiProvider === "openai" ? "sk-..." : "Enter your Gemini API key"} + placeholder={ + apiProvider === "openai" ? "sk-..." : + apiProvider === "gemini" ? "Enter your Gemini API key" : + "sk-ant-..." + } className="bg-black/50 border-white/10 text-white" /> {apiKey && ( @@ -347,7 +429,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia

3. Create a new secret key and paste it here

- ) : ( + ) : apiProvider === "gemini" ? ( <>

1. Create an account at +

+

2. Go to the section +

+

3. Create a new API key and paste it here

+ )}
@@ -414,7 +508,10 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia {modelCategories.map((category) => { // Get the appropriate model list based on selected provider - const models = apiProvider === "openai" ? category.openaiModels : category.geminiModels; + const models = + apiProvider === "openai" ? category.openaiModels : + apiProvider === "gemini" ? category.geminiModels : + category.anthropicModels; return (
From 27961e057de47f01421c7d6823a142d0ab92298f Mon Sep 17 00:00:00 2001 From: Devashish Nayak Date: Tue, 8 Apr 2025 10:07:31 -0500 Subject: [PATCH 2/4] Reverted explicit type annotation --- electron/ProcessingHelper.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 025f940..9fe27de 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -900,13 +900,13 @@ Your solution should be efficient, well-commented, and handle edge cases. // Extract bullet points or numbered items const bulletPoints = thoughtsMatch[1].match(/(?:^|\n)\s*(?:[-*•]|\d+\.)\s*(.*)/g); if (bulletPoints) { - thoughts = bulletPoints.map((point: string) => + thoughts = bulletPoints.map(point => point.replace(/^\s*(?:[-*•]|\d+\.)\s*/, '').trim() ).filter(Boolean); } else { // If no bullet points found, split by newlines and filter empty lines thoughts = thoughtsMatch[1].split('\n') - .map((line: string) => line.trim()) + .map((line) => line.trim()) .filter(Boolean); } } @@ -1257,7 +1257,7 @@ If you include code examples, use proper markdown code blocks with language spec const bulletPoints = formattedDebugContent.match(/(?:^|\n)[ ]*(?:[-*•]|\d+\.)[ ]+([^\n]+)/g); const thoughts = bulletPoints - ? bulletPoints.map((point: string) => point.replace(/^[ ]*(?:[-*•]|\d+\.)[ ]+/, '').trim()).slice(0, 5) + ? bulletPoints.map(point => point.replace(/^[ ]*(?:[-*•]|\d+\.)[ ]+/, '').trim()).slice(0, 5) : ["Debug analysis based on your screenshots"]; const response = { From a484e779019c932256dc77ed0d66f84361b80c5d Mon Sep 17 00:00:00 2001 From: Devashish Nayak Date: Tue, 8 Apr 2025 11:17:35 -0500 Subject: [PATCH 3/4] Explicit handling for claude ai --- electron/ProcessingHelper.ts | 48 +++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 9fe27de..083cfc8 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -640,8 +640,22 @@ export class ProcessingHelper { const responseText = (response.content[0] as { type: 'text', text: string }).text; const jsonText = responseText.replace(/```json|```/g, '').trim(); problemInfo = JSON.parse(jsonText); - } catch (error) { + } catch (error: any) { console.error("Error using Anthropic API:", error); + + // Add specific handling for Claude's limitations + if (error.status === 429) { + return { + success: false, + error: "Claude API rate limit exceeded. Please wait a few minutes before trying again." + }; + } else if (error.status === 413 || (error.message && error.message.includes("token"))) { + return { + success: false, + error: "Your screenshots contain too much information for Claude to process. Switch to OpenAI or Gemini in settings which can handle larger inputs." + }; + } + return { success: false, error: "Failed to process with Anthropic API. Please check your API key or try again later." @@ -878,8 +892,22 @@ Your solution should be efficient, well-commented, and handle edge cases. }); responseContent = (response.content[0] as { type: 'text', text: string }).text; - } catch (error) { + } catch (error: any) { console.error("Error using Anthropic API for solution:", error); + + // Add specific handling for Claude's limitations + if (error.status === 429) { + return { + success: false, + error: "Claude API rate limit exceeded. Please wait a few minutes before trying again." + }; + } else if (error.status === 413 || (error.message && error.message.includes("token"))) { + return { + success: false, + error: "Your screenshots contain too much information for Claude to process. Switch to OpenAI or Gemini in settings which can handle larger inputs." + }; + } + return { success: false, error: "Failed to generate solution with Anthropic API. Please check your API key or try again later." @@ -1222,8 +1250,22 @@ If you include code examples, use proper markdown code blocks with language spec }); debugContent = (response.content[0] as { type: 'text', text: string }).text; - } catch (error) { + } catch (error: any) { console.error("Error using Anthropic API for debugging:", error); + + // Add specific handling for Claude's limitations + if (error.status === 429) { + return { + success: false, + error: "Claude API rate limit exceeded. Please wait a few minutes before trying again." + }; + } else if (error.status === 413 || (error.message && error.message.includes("token"))) { + return { + success: false, + error: "Your screenshots contain too much information for Claude to process. Switch to OpenAI or Gemini in settings which can handle larger inputs." + }; + } + return { success: false, error: "Failed to process debug request with Anthropic API. Please check your API key or try again later." From 750e05e2e8747f5625c943f4d7583824e679201d Mon Sep 17 00:00:00 2001 From: Devashish Nayak Date: Tue, 8 Apr 2025 11:24:46 -0500 Subject: [PATCH 4/4] Ensures consistent behavior --- electron/ProcessingHelper.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 083cfc8..6b3e317 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -82,20 +82,25 @@ export class ProcessingHelper { maxRetries: 2 // Retry up to 2 times }); this.geminiApiKey = null; + this.anthropicClient = null; console.log("OpenAI client initialized successfully"); } else { this.openaiClient = null; this.geminiApiKey = null; + this.anthropicClient = null; console.warn("No API key available, OpenAI client not initialized"); } } else if (config.apiProvider === "gemini"){ // Gemini client initialization this.openaiClient = null; + this.anthropicClient = null; if (config.apiKey) { this.geminiApiKey = config.apiKey; console.log("Gemini API key set successfully"); } else { + this.openaiClient = null; this.geminiApiKey = null; + this.anthropicClient = null; console.warn("No API key available, Gemini client not initialized"); } } else if (config.apiProvider === "anthropic") { @@ -110,6 +115,8 @@ export class ProcessingHelper { }); console.log("Anthropic client initialized successfully"); } else { + this.openaiClient = null; + this.geminiApiKey = null; this.anthropicClient = null; console.warn("No API key available, Anthropic client not initialized"); }