Skip to content

Updated Gemini and Claude models. #127

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 18 additions & 18 deletions electron/ConfigHelper.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
// ConfigHelper.ts
import fs from "node:fs"
import path from "node:path"
import { app } from "electron"
import { EventEmitter } from "events"
import fs from "node:fs"
import path from "node:path"
import { OpenAI } from "openai"

interface Config {
Expand All @@ -20,9 +20,9 @@ export class ConfigHelper extends EventEmitter {
private defaultConfig: Config = {
apiKey: "",
apiProvider: "gemini", // Default to Gemini
extractionModel: "gemini-2.0-flash", // Default to Flash for faster responses
solutionModel: "gemini-2.0-flash",
debuggingModel: "gemini-2.0-flash",
extractionModel: "gemini-2.5-flash", // Default to Flash for faster responses
solutionModel: "gemini-2.5-flash",
debuggingModel: "gemini-2.5-flash",
language: "python",
opacity: 1.0
};
Expand Down Expand Up @@ -68,19 +68,19 @@ export class ConfigHelper extends EventEmitter {
}
return model;
} else if (provider === "gemini") {
// Only allow gemini-1.5-pro and gemini-2.0-flash for Gemini
const allowedModels = ['gemini-1.5-pro', 'gemini-2.0-flash'];
// Only allow gemini-2.5-pro and gemini-2.5-flash for Gemini
const allowedModels = ['gemini-2.5-pro', 'gemini-2.5-flash'];
if (!allowedModels.includes(model)) {
console.warn(`Invalid Gemini model specified: ${model}. Using default model: gemini-2.0-flash`);
return 'gemini-2.0-flash'; // Changed default to flash
console.warn(`Invalid Gemini model specified: ${model}. Using default model: gemini-2.5-flash`);
return 'gemini-2.5-flash'; // Changed default to flash
}
return model;
} else if (provider === "anthropic") {
// Only allow Claude models
const allowedModels = ['claude-3-7-sonnet-20250219', 'claude-3-5-sonnet-20241022', 'claude-3-opus-20240229'];
const allowedModels = ['claude-sonnet-4-20250514', 'claude-3-7-sonnet-20250219', 'claude-4-opus-20250514'];
if (!allowedModels.includes(model)) {
console.warn(`Invalid Anthropic model specified: ${model}. Using default model: claude-3-7-sonnet-20250219`);
return 'claude-3-7-sonnet-20250219';
console.warn(`Invalid Anthropic model specified: ${model}. Using default model: claude-sonnet-4-20250514`);
return 'claude-sonnet-4-20250514';
}
return model;
}
Expand Down Expand Up @@ -175,13 +175,13 @@ export class ConfigHelper extends EventEmitter {
updates.solutionModel = "gpt-4o";
updates.debuggingModel = "gpt-4o";
} else if (updates.apiProvider === "anthropic") {
updates.extractionModel = "claude-3-7-sonnet-20250219";
updates.solutionModel = "claude-3-7-sonnet-20250219";
updates.debuggingModel = "claude-3-7-sonnet-20250219";
updates.extractionModel = "claude-sonnet-4-20250514";
updates.solutionModel = "claude-sonnet-4-20250514";
updates.debuggingModel = "claude-sonnet-4-20250514";
} else {
updates.extractionModel = "gemini-2.0-flash";
updates.solutionModel = "gemini-2.0-flash";
updates.debuggingModel = "gemini-2.0-flash";
updates.extractionModel = "gemini-2.5-flash";
updates.solutionModel = "gemini-2.5-flash";
updates.debuggingModel = "gemini-2.5-flash";
}
}

Expand Down
25 changes: 12 additions & 13 deletions electron/ProcessingHelper.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
// ProcessingHelper.ts
import fs from "node:fs"
import path from "node:path"
import { ScreenshotHelper } from "./ScreenshotHelper"
import { IProcessingHelperDeps } from "./main"
import Anthropic from '@anthropic-ai/sdk'
import * as axios from "axios"
import { app, BrowserWindow, dialog } from "electron"
import { BrowserWindow } from "electron"
import fs from "node:fs"
import { OpenAI } from "openai"
import { configHelper } from "./ConfigHelper"
import Anthropic from '@anthropic-ai/sdk';
import { ScreenshotHelper } from "./ScreenshotHelper"
import { IProcessingHelperDeps } from "./main"

// Interface for Gemini API requests
interface GeminiMessage {
Expand Down Expand Up @@ -545,7 +544,7 @@ export class ProcessingHelper {

// Make API request to Gemini
const response = await axios.default.post(
`https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`,
`https://generativelanguage.googleapis.com/v1beta/models/${config.extractionModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`,
{
contents: geminiMessages,
generationConfig: {
Expand Down Expand Up @@ -604,7 +603,7 @@ export class ProcessingHelper {
];

const response = await this.anthropicClient.messages.create({
model: config.extractionModel || "claude-3-7-sonnet-20250219",
model: config.extractionModel || "claude-sonnet-4-20250514",
max_tokens: 4000,
messages: messages,
temperature: 0.2
Expand Down Expand Up @@ -677,7 +676,7 @@ export class ProcessingHelper {
);
}
}

return { success: false, error: "Failed to process screenshots" };
} catch (error: any) {
// If the request was cancelled, don't retry
Expand Down Expand Up @@ -809,7 +808,7 @@ Your solution should be efficient, well-commented, and handle edge cases.

// Make API request to Gemini
const response = await axios.default.post(
`https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`,
`https://generativelanguage.googleapis.com/v1beta/models/${config.solutionModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`,
{
contents: geminiMessages,
generationConfig: {
Expand Down Expand Up @@ -858,7 +857,7 @@ Your solution should be efficient, well-commented, and handle edge cases.

// Send to Anthropic API
const response = await this.anthropicClient.messages.create({
model: config.solutionModel || "claude-3-7-sonnet-20250219",
model: config.solutionModel || "claude-sonnet-4-20250514",
max_tokens: 4000,
messages: messages,
temperature: 0.2
Expand Down Expand Up @@ -1130,7 +1129,7 @@ If you include code examples, use proper markdown code blocks with language spec
}

const response = await axios.default.post(
`https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.0-flash"}:generateContent?key=${this.geminiApiKey}`,
`https://generativelanguage.googleapis.com/v1beta/models/${config.debuggingModel || "gemini-2.5-flash"}:generateContent?key=${this.geminiApiKey}`,
{
contents: geminiMessages,
generationConfig: {
Expand Down Expand Up @@ -1216,7 +1215,7 @@ If you include code examples, use proper markdown code blocks with language spec
}

const response = await this.anthropicClient.messages.create({
model: config.debuggingModel || "claude-3-7-sonnet-20250219",
model: config.debuggingModel || "claude-sonnet-4-20250514",
max_tokens: 4000,
messages: messages,
temperature: 0.2
Expand Down
82 changes: 40 additions & 42 deletions src/components/Settings/SettingsDialog.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
import { useState, useEffect } from "react";
import { useEffect, useState } from "react";
import { useToast } from "../../contexts/toast";
import { Button } from "../ui/button";
import {
Dialog,
DialogTrigger,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogFooter,
DialogHeader,
DialogTitle
} from "../ui/dialog";
import { Input } from "../ui/input";
import { Button } from "../ui/button";
import { Settings } from "lucide-react";
import { useToast } from "../../contexts/toast";

type APIProvider = "openai" | "gemini" | "anthropic";

Expand Down Expand Up @@ -50,30 +48,30 @@ const modelCategories: ModelCategory[] = [
],
geminiModels: [
{
id: "gemini-1.5-pro",
name: "Gemini 1.5 Pro",
id: "gemini-2.5-pro",
name: "Gemini 2.5 Pro",
description: "Best overall performance for problem extraction"
},
{
id: "gemini-2.0-flash",
name: "Gemini 2.0 Flash",
id: "gemini-2.5-flash",
name: "Gemini 2.5 Flash",
description: "Faster, more cost-effective option"
}
],
anthropicModels: [
{
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
id: "claude-sonnet-4-20250514",
name: "Claude Sonnet 4",
description: "Best overall performance for problem extraction"
},
{
id: "claude-3-5-sonnet-20241022",
name: "Claude 3.5 Sonnet",
id: "claude-3-7-sonnet-20240814",
name: "Claude 3.7 Sonnet",
description: "Balanced performance and speed"
},
{
id: "claude-3-opus-20240229",
name: "Claude 3 Opus",
id: "claude-opus-4-20250514",
name: "Claude Opus 4",
description: "Top-level intelligence, fluency, and understanding"
}
]
Expand All @@ -96,30 +94,30 @@ const modelCategories: ModelCategory[] = [
],
geminiModels: [
{
id: "gemini-1.5-pro",
name: "Gemini 1.5 Pro",
id: "gemini-2.5-pro",
name: "Gemini 2.5 Pro",
description: "Strong overall performance for coding tasks"
},
{
id: "gemini-2.0-flash",
name: "Gemini 2.0 Flash",
id: "gemini-2.5-flash",
name: "Gemini 2.5 Flash",
description: "Faster, more cost-effective option"
}
],
anthropicModels: [
{
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
id: "claude-sonnet-4-20250514",
name: "Claude Sonnet 4",
description: "Strong overall performance for coding tasks"
},
{
id: "claude-3-5-sonnet-20241022",
name: "Claude 3.5 Sonnet",
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
description: "Balanced performance and speed"
},
{
id: "claude-3-opus-20240229",
name: "Claude 3 Opus",
id: "claude-4-opus-20250514",
name: "Claude 4 Opus",
description: "Top-level intelligence, fluency, and understanding"
}
]
Expand All @@ -142,30 +140,30 @@ const modelCategories: ModelCategory[] = [
],
geminiModels: [
{
id: "gemini-1.5-pro",
name: "Gemini 1.5 Pro",
id: "gemini-2.5-pro",
name: "Gemini 2.5 Pro",
description: "Best for analyzing code and error messages"
},
{
id: "gemini-2.0-flash",
name: "Gemini 2.0 Flash",
id: "gemini-2.5-flash",
name: "Gemini 2.5 Flash",
description: "Faster, more cost-effective option"
}
],
anthropicModels: [
{
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
id: "claude-sonnet-4-20250514",
name: "Claude Sonnet 4",
description: "Best for analyzing code and error messages"
},
{
id: "claude-3-5-sonnet-20241022",
name: "Claude 3.5 Sonnet",
id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet",
description: "Balanced performance and speed"
},
{
id: "claude-3-opus-20240229",
name: "Claude 3 Opus",
id: "claude-4-opus-20250514",
name: "Claude 4 Opus",
description: "Top-level intelligence, fluency, and understanding"
}
]
Expand Down Expand Up @@ -244,9 +242,9 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia
setSolutionModel("gpt-4o");
setDebuggingModel("gpt-4o");
} else if (provider === "gemini") {
setExtractionModel("gemini-1.5-pro");
setSolutionModel("gemini-1.5-pro");
setDebuggingModel("gemini-1.5-pro");
setExtractionModel("gemini-2.5-pro");
setSolutionModel("gemini-2.5-pro");
setDebuggingModel("gemini-2.5-pro");
} else if (provider === "anthropic") {
setExtractionModel("claude-3-7-sonnet-20250219");
setSolutionModel("claude-3-7-sonnet-20250219");
Expand Down Expand Up @@ -362,7 +360,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia
/>
<div className="flex flex-col">
<p className="font-medium text-white text-sm">Gemini</p>
<p className="text-xs text-white/60">Gemini 1.5 models</p>
<p className="text-xs text-white/60">Gemini 2.5 models</p>
</div>
</div>
</div>
Expand All @@ -382,7 +380,7 @@ export function SettingsDialog({ open: externalOpen, onOpenChange }: SettingsDia
/>
<div className="flex flex-col">
<p className="font-medium text-white text-sm">Claude</p>
<p className="text-xs text-white/60">Claude 3 models</p>
<p className="text-xs text-white/60">Claude 4 models</p>
</div>
</div>
</div>
Expand Down