diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 0dcd26f..022088b 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -544,6 +544,10 @@ export class ProcessingHelper { ]; // Make API request to Gemini + console.log("Gemini extraction request:", { + url: `https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.0-flash"}:generateContent`, + contents: geminiMessages, + }); const response = await axios.default.post( `https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, { @@ -557,6 +561,7 @@ export class ProcessingHelper { ); const responseData = response.data as GeminiResponse; + console.log("Gemini extraction response:", responseData); if (!responseData.candidates || responseData.candidates.length === 0) { throw new Error("Empty response from Gemini API"); @@ -567,11 +572,19 @@ export class ProcessingHelper { // Handle when Gemini might wrap the JSON in markdown code blocks const jsonText = responseText.replace(/```json|```/g, '').trim(); problemInfo = JSON.parse(jsonText); - } catch (error) { + } catch (error: any) { console.error("Error using Gemini API:", error); + let errorMessage = "Failed to process with Gemini API. Please check your API key or try again later."; + if (error.response) { + errorMessage = `Gemini API Error: ${error.response.status} ${error.response.statusText}. ${error.response.data?.error?.message || ''}`; + } else if (error.request) { + errorMessage = "Gemini API Error: No response received from the server. Please check your network connection."; + } else { + errorMessage = `Gemini API Error: ${error.message}`; + } return { success: false, - error: "Failed to process with Gemini API. Please check your API key or try again later." + error: errorMessage }; } } else if (config.apiProvider === "anthropic") { @@ -808,6 +821,10 @@ Your solution should be efficient, well-commented, and handle edge cases. ]; // Make API request to Gemini + console.log("Gemini solution request:", { + url: `https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.0-flash"}:generateContent`, + contents: geminiMessages, + }); const response = await axios.default.post( `https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, { @@ -821,17 +838,26 @@ Your solution should be efficient, well-commented, and handle edge cases. ); const responseData = response.data as GeminiResponse; + console.log("Gemini solution response:", responseData); if (!responseData.candidates || responseData.candidates.length === 0) { throw new Error("Empty response from Gemini API"); } responseContent = responseData.candidates[0].content.parts[0].text; - } catch (error) { + } catch (error: any) { console.error("Error using Gemini API for solution:", error); + let errorMessage = "Failed to generate solution with Gemini API. Please check your API key or try again later."; + if (error.response) { + errorMessage = `Gemini API Error: ${error.response.status} ${error.response.statusText}. ${error.response.data?.error?.message || ''}`; + } else if (error.request) { + errorMessage = "Gemini API Error: No response received from the server. Please check your network connection."; + } else { + errorMessage = `Gemini API Error: ${error.message}`; + } return { success: false, - error: "Failed to generate solution with Gemini API. Please check your API key or try again later." + error: errorMessage }; } } else if (config.apiProvider === "anthropic") { @@ -1129,6 +1155,10 @@ If you include code examples, use proper markdown code blocks with language spec }); } + console.log("Gemini debug request:", { + url: `https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.0-flash"}:generateContent`, + contents: geminiMessages, + }); const response = await axios.default.post( `https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, { @@ -1142,17 +1172,26 @@ If you include code examples, use proper markdown code blocks with language spec ); const responseData = response.data as GeminiResponse; + console.log("Gemini debug response:", responseData); if (!responseData.candidates || responseData.candidates.length === 0) { throw new Error("Empty response from Gemini API"); } debugContent = responseData.candidates[0].content.parts[0].text; - } catch (error) { + } catch (error: any) { console.error("Error using Gemini API for debugging:", error); + let errorMessage = "Failed to process debug request with Gemini API. Please check your API key or try again later."; + if (error.response) { + errorMessage = `Gemini API Error: ${error.response.status} ${error.response.statusText}. ${error.response.data?.error?.message || ''}`; + } else if (error.request) { + errorMessage = "Gemini API Error: No response received from the server. Please check your network connection."; + } else { + errorMessage = `Gemini API Error: ${error.message}`; + } return { success: false, - error: "Failed to process debug request with Gemini API. Please check your API key or try again later." + error: errorMessage }; } } else if (config.apiProvider === "anthropic") { diff --git a/src/components/Settings/SettingsDialog.tsx b/src/components/Settings/SettingsDialog.tsx index 463ea12..83678e8 100644 --- a/src/components/Settings/SettingsDialog.tsx +++ b/src/components/Settings/SettingsDialog.tsx @@ -50,9 +50,9 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", - description: "Best overall performance for problem extraction" + id: "gemini-1.5-flash", + name: "Gemini 1.5 Flash", + description: "Fast and versatile multimodal model" }, { id: "gemini-2.0-flash", @@ -96,9 +96,9 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", - description: "Strong overall performance for coding tasks" + id: "gemini-1.5-flash", + name: "Gemini 1.5 Flash", + description: "Fast and versatile multimodal model" }, { id: "gemini-2.0-flash", @@ -142,9 +142,9 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", - description: "Best for analyzing code and error messages" + id: "gemini-1.5-flash", + name: "Gemini 1.5 Flash", + description: "Fast and versatile multimodal model" }, { id: "gemini-2.0-flash", @@ -244,9 +244,9 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia setSolutionModel("gpt-4o"); setDebuggingModel("gpt-4o"); } else if (provider === "gemini") { - setExtractionModel("gemini-1.5-pro"); - setSolutionModel("gemini-1.5-pro"); - setDebuggingModel("gemini-1.5-pro"); + setExtractionModel("gemini-1.5-flash"); + setSolutionModel("gemini-1.5-flash"); + setDebuggingModel("gemini-1.5-flash"); } else if (provider === "anthropic") { setExtractionModel("claude-3-7-sonnet-20250219"); setSolutionModel("claude-3-7-sonnet-20250219"); @@ -413,7 +413,8 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia

)}

- Your API key is stored locally and never sent to any server except {apiProvider === "openai" ? "OpenAI" : "Google"} + Your API key is stored locally and never sent to any server except {apiProvider === "openai" ? "OpenAI" : "Google"}. + {apiProvider === 'gemini' && Note: Gemini is a Google product. You will need a Google AI Studio API key.}

Don't have an API key?