diff --git a/electron/ConfigHelper.ts b/electron/ConfigHelper.ts index 6d1d2db..52c90a8 100644 --- a/electron/ConfigHelper.ts +++ b/electron/ConfigHelper.ts @@ -1,8 +1,8 @@ // ConfigHelper.ts -import fs from "node:fs" -import path from "node:path" import { app } from "electron" import { EventEmitter } from "events" +import fs from "node:fs" +import path from "node:path" import { OpenAI } from "openai" interface Config { @@ -20,9 +20,9 @@ export class ConfigHelper extends EventEmitter { private defaultConfig: Config = { apiKey: "", apiProvider: "gemini", // Default to Gemini - extractionModel: "gemini-2.0-flash", // Default to Flash for faster responses - solutionModel: "gemini-2.0-flash", - debuggingModel: "gemini-2.0-flash", + extractionModel: "gemini-2.5-flash", // Default to Flash for faster responses + solutionModel: "gemini-2.5-flash", + debuggingModel: "gemini-2.5-flash", language: "python", opacity: 1.0 }; @@ -68,19 +68,19 @@ export class ConfigHelper extends EventEmitter { } return model; } else if (provider === "gemini") { - // Only allow gemini-1.5-pro and gemini-2.0-flash for Gemini - const allowedModels = ['gemini-1.5-pro', 'gemini-2.0-flash']; + // Only allow gemini-2.5-pro and gemini-2.5-flash for Gemini + const allowedModels = ['gemini-2.5-pro', 'gemini-2.5-flash']; if (!allowedModels.includes(model)) { - console.warn(`Invalid Gemini model specified: ${model}. Using default model: gemini-2.0-flash`); - return 'gemini-2.0-flash'; // Changed default to flash + console.warn(`Invalid Gemini model specified: ${model}. Using default model: gemini-2.5-flash`); + return 'gemini-2.5-flash'; // Changed default to flash } return model; } else if (provider === "anthropic") { // Only allow Claude models - const allowedModels = ['claude-3-7-sonnet-20250219', 'claude-3-5-sonnet-20241022', 'claude-3-opus-20240229']; + const allowedModels = ['claude-sonnet-4-20250514', 'claude-3-7-sonnet-20250219', 'claude-4-opus-20250514']; if (!allowedModels.includes(model)) { - console.warn(`Invalid Anthropic model specified: ${model}. Using default model: claude-3-7-sonnet-20250219`); - return 'claude-3-7-sonnet-20250219'; + console.warn(`Invalid Anthropic model specified: ${model}. Using default model: claude-sonnet-4-20250514`); + return 'claude-sonnet-4-20250514'; } return model; } @@ -175,13 +175,13 @@ export class ConfigHelper extends EventEmitter { updates.solutionModel = "gpt-4o"; updates.debuggingModel = "gpt-4o"; } else if (updates.apiProvider === "anthropic") { - updates.extractionModel = "claude-3-7-sonnet-20250219"; - updates.solutionModel = "claude-3-7-sonnet-20250219"; - updates.debuggingModel = "claude-3-7-sonnet-20250219"; + updates.extractionModel = "claude-sonnet-4-20250514"; + updates.solutionModel = "claude-sonnet-4-20250514"; + updates.debuggingModel = "claude-sonnet-4-20250514"; } else { - updates.extractionModel = "gemini-2.0-flash"; - updates.solutionModel = "gemini-2.0-flash"; - updates.debuggingModel = "gemini-2.0-flash"; + updates.extractionModel = "gemini-2.5-flash"; + updates.solutionModel = "gemini-2.5-flash"; + updates.debuggingModel = "gemini-2.5-flash"; } } diff --git a/electron/ProcessingHelper.ts b/electron/ProcessingHelper.ts index 0dcd26f..d982423 100644 --- a/electron/ProcessingHelper.ts +++ b/electron/ProcessingHelper.ts @@ -1,13 +1,12 @@ // ProcessingHelper.ts -import fs from "node:fs" -import path from "node:path" -import { ScreenshotHelper } from "./ScreenshotHelper" -import { IProcessingHelperDeps } from "./main" +import Anthropic from '@anthropic-ai/sdk' import * as axios from "axios" -import { app, BrowserWindow, dialog } from "electron" +import { BrowserWindow } from "electron" +import fs from "node:fs" import { OpenAI } from "openai" import { configHelper } from "./ConfigHelper" -import Anthropic from '@anthropic-ai/sdk'; +import { ScreenshotHelper } from "./ScreenshotHelper" +import { IProcessingHelperDeps } from "./main" // Interface for Gemini API requests interface GeminiMessage { @@ -545,7 +544,7 @@ export class ProcessingHelper { // Make API request to Gemini const response = await axios.default.post( - `https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, + `https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`, { contents: geminiMessages, generationConfig: { @@ -604,7 +603,7 @@ export class ProcessingHelper { ]; const response = await this.anthropicClient.messages.create({ - model: config.extractionModel || "claude-3-7-sonnet-20250219", + model: config.extractionModel || "claude-sonnet-4-20250514", max_tokens: 4000, messages: messages, temperature: 0.2 @@ -677,7 +676,7 @@ export class ProcessingHelper { ); } } - + return { success: false, error: "Failed to process screenshots" }; } catch (error: any) { // If the request was cancelled, don't retry @@ -809,7 +808,7 @@ Your solution should be efficient, well-commented, and handle edge cases. // Make API request to Gemini const response = await axios.default.post( - `https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, + `https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`, { contents: geminiMessages, generationConfig: { @@ -858,7 +857,7 @@ Your solution should be efficient, well-commented, and handle edge cases. // Send to Anthropic API const response = await this.anthropicClient.messages.create({ - model: config.solutionModel || "claude-3-7-sonnet-20250219", + model: config.solutionModel || "claude-sonnet-4-20250514", max_tokens: 4000, messages: messages, temperature: 0.2 @@ -1130,7 +1129,7 @@ If you include code examples, use proper markdown code blocks with language spec } const response = await axios.default.post( - `https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`, + `https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`, { contents: geminiMessages, generationConfig: { @@ -1216,7 +1215,7 @@ If you include code examples, use proper markdown code blocks with language spec } const response = await this.anthropicClient.messages.create({ - model: config.debuggingModel || "claude-3-7-sonnet-20250219", + model: config.debuggingModel || "claude-sonnet-4-20250514", max_tokens: 4000, messages: messages, temperature: 0.2 diff --git a/src/components/Settings/SettingsDialog.tsx b/src/components/Settings/SettingsDialog.tsx index 463ea12..92ade74 100644 --- a/src/components/Settings/SettingsDialog.tsx +++ b/src/components/Settings/SettingsDialog.tsx @@ -1,17 +1,15 @@ -import { useState, useEffect } from "react"; +import { useEffect, useState } from "react"; +import { useToast } from "../../contexts/toast"; +import { Button } from "../ui/button"; import { Dialog, - DialogTrigger, DialogContent, DialogDescription, - DialogHeader, - DialogTitle, DialogFooter, + DialogHeader, + DialogTitle } from "../ui/dialog"; import { Input } from "../ui/input"; -import { Button } from "../ui/button"; -import { Settings } from "lucide-react"; -import { useToast } from "../../contexts/toast"; type APIProvider = "openai" | "gemini" | "anthropic"; @@ -50,30 +48,30 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", + id: "gemini-2.5-pro", + name: "Gemini 2.5 Pro", description: "Best overall performance for problem extraction" }, { - id: "gemini-2.0-flash", - name: "Gemini 2.0 Flash", + id: "gemini-2.5-flash", + name: "Gemini 2.5 Flash", description: "Faster, more cost-effective option" } ], anthropicModels: [ { - id: "claude-3-7-sonnet-20250219", - name: "Claude 3.7 Sonnet", + id: "claude-sonnet-4-20250514", + name: "Claude Sonnet 4", description: "Best overall performance for problem extraction" }, { - id: "claude-3-5-sonnet-20241022", - name: "Claude 3.5 Sonnet", + id: "claude-3-7-sonnet-20240814", + name: "Claude 3.7 Sonnet", description: "Balanced performance and speed" }, { - id: "claude-3-opus-20240229", - name: "Claude 3 Opus", + id: "claude-opus-4-20250514", + name: "Claude Opus 4", description: "Top-level intelligence, fluency, and understanding" } ] @@ -96,30 +94,30 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", + id: "gemini-2.5-pro", + name: "Gemini 2.5 Pro", description: "Strong overall performance for coding tasks" }, { - id: "gemini-2.0-flash", - name: "Gemini 2.0 Flash", + id: "gemini-2.5-flash", + name: "Gemini 2.5 Flash", description: "Faster, more cost-effective option" } ], anthropicModels: [ { - id: "claude-3-7-sonnet-20250219", - name: "Claude 3.7 Sonnet", + id: "claude-sonnet-4-20250514", + name: "Claude Sonnet 4", description: "Strong overall performance for coding tasks" }, { - id: "claude-3-5-sonnet-20241022", - name: "Claude 3.5 Sonnet", + id: "claude-3-7-sonnet-20250219", + name: "Claude 3.7 Sonnet", description: "Balanced performance and speed" }, { - id: "claude-3-opus-20240229", - name: "Claude 3 Opus", + id: "claude-4-opus-20250514", + name: "Claude 4 Opus", description: "Top-level intelligence, fluency, and understanding" } ] @@ -142,30 +140,30 @@ const modelCategories: ModelCategory[] = [ ], geminiModels: [ { - id: "gemini-1.5-pro", - name: "Gemini 1.5 Pro", + id: "gemini-2.5-pro", + name: "Gemini 2.5 Pro", description: "Best for analyzing code and error messages" }, { - id: "gemini-2.0-flash", - name: "Gemini 2.0 Flash", + id: "gemini-2.5-flash", + name: "Gemini 2.5 Flash", description: "Faster, more cost-effective option" } ], anthropicModels: [ { - id: "claude-3-7-sonnet-20250219", - name: "Claude 3.7 Sonnet", + id: "claude-sonnet-4-20250514", + name: "Claude Sonnet 4", description: "Best for analyzing code and error messages" }, { - id: "claude-3-5-sonnet-20241022", - name: "Claude 3.5 Sonnet", + id: "claude-3-7-sonnet-20250219", + name: "Claude 3.7 Sonnet", description: "Balanced performance and speed" }, { - id: "claude-3-opus-20240229", - name: "Claude 3 Opus", + id: "claude-4-opus-20250514", + name: "Claude 4 Opus", description: "Top-level intelligence, fluency, and understanding" } ] @@ -244,9 +242,9 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia setSolutionModel("gpt-4o"); setDebuggingModel("gpt-4o"); } else if (provider === "gemini") { - setExtractionModel("gemini-1.5-pro"); - setSolutionModel("gemini-1.5-pro"); - setDebuggingModel("gemini-1.5-pro"); + setExtractionModel("gemini-2.5-pro"); + setSolutionModel("gemini-2.5-pro"); + setDebuggingModel("gemini-2.5-pro"); } else if (provider === "anthropic") { setExtractionModel("claude-3-7-sonnet-20250219"); setSolutionModel("claude-3-7-sonnet-20250219"); @@ -362,7 +360,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia />

Gemini

-

Gemini 1.5 models

+

Gemini 2.5 models

@@ -382,7 +380,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia />

Claude

-

Claude 3 models

+

Claude 4 models