Skip to content

Feature/xai base url overrides #273

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 14, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,4 @@ packages/docs/.env.local
packages/docs/.env.development.local
packages/docs/.env.test.local
packages/docs/.env.production.local
mcp.server.setup.json
2 changes: 2 additions & 0 deletions mycoder.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ export default {
//provider: 'ollama',
//model: 'medragondot/Sky-T1-32B-Preview:latest',
//model: 'llama3.2:3b',
//provider: 'xai',
//model: 'grok-2-latest',
maxTokens: 4096,
temperature: 0.7,

Expand Down
5 changes: 2 additions & 3 deletions packages/agent/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
# [mycoder-agent-v1.3.1](https://github.com/drivecore/mycoder/compare/mycoder-agent-v1.3.0...mycoder-agent-v1.3.1) (2025-03-13)


### Bug Fixes

* redo ollama llm provider using ollama sdk ([586fe82](https://github.com/drivecore/mycoder/commit/586fe827d048aa6c13675ba838bd50309b3980e2))
* update Ollama provider to use official npm package API correctly ([738a84a](https://github.com/drivecore/mycoder/commit/738a84aff560076e4ad24129f5dc9bf09d304ffa))
- redo ollama llm provider using ollama sdk ([586fe82](https://github.com/drivecore/mycoder/commit/586fe827d048aa6c13675ba838bd50309b3980e2))
- update Ollama provider to use official npm package API correctly ([738a84a](https://github.com/drivecore/mycoder/commit/738a84aff560076e4ad24129f5dc9bf09d304ffa))

# [mycoder-agent-v1.3.0](https://github.com/drivecore/mycoder/compare/mycoder-agent-v1.2.0...mycoder-agent-v1.3.0) (2025-03-12)

Expand Down
13 changes: 0 additions & 13 deletions packages/agent/src/core/llm/__tests__/openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,19 +69,6 @@ describe('OpenAIProvider', () => {
expect(provider.model).toBe('gpt-4');
});

it('should throw error if API key is missing', () => {
// Clear environment variable
const originalKey = process.env.OPENAI_API_KEY;
delete process.env.OPENAI_API_KEY;

expect(() => new OpenAIProvider('gpt-4')).toThrow(
'OpenAI API key is required',
);

// Restore environment variable
process.env.OPENAI_API_KEY = originalKey;
});

it('should generate text and handle tool calls', async () => {
const response = await provider.generateText({
messages: [
Expand Down
72 changes: 49 additions & 23 deletions packages/agent/src/core/llm/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,41 +35,67 @@ export interface LLMProvider {
generateText(options: GenerateOptions): Promise<LLMResponse>;
}

export type ProviderConfig = {
keyName?: string;
docsUrl?: string;
baseUrl?: string;
model: string;
factory: (model: string, options: ProviderOptions) => LLMProvider;
};

// Provider factory registry
const providerFactories: Record<
string,
(model: string, options: ProviderOptions) => LLMProvider
> = {
anthropic: (model, options) => new AnthropicProvider(model, options),
openai: (model, options) => new OpenAIProvider(model, options),
ollama: (model, options) => new OllamaProvider(model, options),
export const providerConfig: Record<string, ProviderConfig> = {
anthropic: {
keyName: 'ANTHROPIC_API_KEY',
docsUrl: 'https://mycoder.ai/docs/provider/anthropic',
model: 'claude-3-7-sonnet-20250219',
factory: (model, options) => new AnthropicProvider(model, options),
},
openai: {
keyName: 'OPENAI_API_KEY',
docsUrl: 'https://mycoder.ai/docs/provider/openai',
model: 'gpt-4o-2024-05-13',
factory: (model, options) => new OpenAIProvider(model, options),
},
gpustack: {
docsUrl: 'https://mycoder.ai/docs/provider/local-openai',
model: 'llama3.2',
baseUrl: 'http://localhost:80',
factory: (model, options) => new OpenAIProvider(model, options),
},
ollama: {
docsUrl: 'https://mycoder.ai/docs/provider/ollama',
model: 'llama3.2',
baseUrl: 'http://localhost:11434',
factory: (model, options) => new OllamaProvider(model, options),
},
xai: {
keyName: 'XAI_API_KEY',
docsUrl: 'https://mycoder.ai/docs/provider/xai',
baseUrl: 'https://api.x.ai/v1',
model: 'grok-2-latest',
factory: (model, options) => new OpenAIProvider(model, options),
},
};

/**
* Create a provider instance
*/
export function createProvider(
providerType: string,
model: string,
provider: string,
model?: string,
options: ProviderOptions = {},
): LLMProvider {
const factory = providerFactories[providerType.toLowerCase()];
const config = providerConfig[provider];

if (!factory) {
if (!config) {
throw new Error(
`Provider '${providerType}' not found. Available providers: ${Object.keys(providerFactories).join(', ')}`,
`Provider '${provider}' not found. Available providers: ${Object.keys(providerConfig).join(', ')}`,
);
}

return factory(model, options);
}

/**
* Register a new provider implementation
*/
export function registerProvider(
providerType: string,
factory: (model: string, options: ProviderOptions) => LLMProvider,
): void {
providerFactories[providerType.toLowerCase()] = factory;
return config.factory(model ?? config.model, {
...options,
baseUrl: options.baseUrl ?? config.baseUrl,
});
}
2 changes: 1 addition & 1 deletion packages/agent/src/core/llm/providers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ export class AnthropicProvider implements LLMProvider {

constructor(model: string, options: AnthropicOptions = {}) {
this.model = model;
this.apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || '';
this.apiKey = options.apiKey ?? '';
this.baseUrl = options.baseUrl;

if (!this.apiKey) {
Expand Down
1 change: 0 additions & 1 deletion packages/agent/src/core/llm/providers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ export class OllamaProvider implements LLMProvider {
}

return response.message.tool_calls.map((toolCall: OllamaTooCall) => {
//console.log('ollama tool call', toolCall);
return {
id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 11)}`,
name: toolCall.function?.name,
Expand Down
8 changes: 1 addition & 7 deletions packages/agent/src/core/llm/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,13 @@ export class OpenAIProvider implements LLMProvider {

constructor(model: string, options: OpenAIOptions = {}) {
this.model = model;
this.apiKey = options.apiKey || process.env.OPENAI_API_KEY || '';
this.apiKey = options.apiKey ?? '';
this.baseUrl = options.baseUrl;
this.organization = options.organization || process.env.OPENAI_ORGANIZATION;

if (!this.apiKey) {
throw new Error('OpenAI API key is required');
}

// Initialize OpenAI client
this.client = new OpenAI({
apiKey: this.apiKey,
...(this.baseUrl && { baseURL: this.baseUrl }),
...(this.organization && { organization: this.organization }),
});
}

Expand Down
4 changes: 1 addition & 3 deletions packages/agent/src/core/toolAgent/config.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ import { describe, expect, it } from 'vitest';

import { createProvider } from '../llm/provider.js';

import { getModel } from './config.js';

describe('createProvider', () => {
it('should return the correct model for anthropic', () => {
const model = createProvider('anthropic', 'claude-3-7-sonnet-20250219', {
Expand All @@ -27,7 +25,7 @@ describe('createProvider', () => {
});

it('should return the correct model for ollama with custom base URL', () => {
const model = getModel('ollama', 'llama3', {
const model = createProvider('ollama', 'llama3', {
ollamaBaseUrl: 'http://custom-ollama:11434',
});
expect(model).toBeDefined();
Expand Down
29 changes: 0 additions & 29 deletions packages/agent/src/core/toolAgent/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';

import { createProvider, LLMProvider } from '../llm/provider.js';
import { ToolContext } from '../types';

/**
Expand All @@ -18,34 +17,6 @@ export type AgentConfig = {
getSystemPrompt: (toolContext: ToolContext) => string;
};

/**
* Get the model instance based on provider and model name
*/
export function getModel(
provider: ModelProvider,
model: string,
options?: { ollamaBaseUrl?: string },
): LLMProvider {
switch (provider) {
case 'anthropic':
return createProvider('anthropic', model);
case 'openai':
return createProvider('openai', model);
case 'ollama':
if (options?.ollamaBaseUrl) {
return createProvider('ollama', model, {
baseUrl: options.ollamaBaseUrl,
});
}
return createProvider('ollama', model);
/*case 'xai':
return createProvider('xai', model);
case 'mistral':
return createProvider('mistral', model);*/
default:
throw new Error(`Unknown model provider: ${provider}`);
}
}
/**
* Default configuration for the tool agent
*/
Expand Down
5 changes: 4 additions & 1 deletion packages/agent/src/core/toolAgent/toolAgentCore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,10 @@ export const toolAgent = async (
const systemPrompt = config.getSystemPrompt(localContext);

// Create the LLM provider
const provider = createProvider(localContext.provider, localContext.model);
const provider = createProvider(localContext.provider, localContext.model, {
baseUrl: context.baseUrl,
apiKey: context.apiKey,
});

for (let i = 0; i < config.maxIterations; i++) {
logger.verbose(
Expand Down
4 changes: 3 additions & 1 deletion packages/agent/src/core/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ export type ToolContext = {
userPrompt?: boolean;
agentId?: string; // Unique identifier for the agent, used for background tool tracking
provider: ModelProvider;
model: string;
model?: string;
baseUrl?: string;
apiKey?: string;
maxTokens: number;
temperature: number;
backgroundTools: BackgroundTools;
Expand Down
25 changes: 2 additions & 23 deletions packages/agent/src/utils/errors.ts
Original file line number Diff line number Diff line change
@@ -1,27 +1,6 @@
// Provider configuration map
export const providerConfig: Record<
string,
{ keyName: string; docsUrl: string } | undefined
> = {
anthropic: {
keyName: 'ANTHROPIC_API_KEY',
docsUrl: 'https://mycoder.ai/docs/getting-started/anthropic',
},
openai: {
keyName: 'OPENAI_API_KEY',
docsUrl: 'https://mycoder.ai/docs/getting-started/openai',
},
/*xai: {
keyName: 'XAI_API_KEY',
docsUrl: 'https://mycoder.ai/docs/getting-started/xai',
},
mistral: {
keyName: 'MISTRAL_API_KEY',
docsUrl: 'https://mycoder.ai/docs/getting-started/mistral',
},*/
// No API key needed for ollama as it uses a local server
ollama: undefined,
};

import { providerConfig } from '../core/llm/provider';

/**
* Generates a provider-specific API key error message
Expand Down
3 changes: 1 addition & 2 deletions packages/cli/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
# [mycoder-v1.3.1](https://github.com/drivecore/mycoder/compare/mycoder-v1.3.0...mycoder-v1.3.1) (2025-03-13)


### Bug Fixes

* redo ollama llm provider using ollama sdk ([586fe82](https://github.com/drivecore/mycoder/commit/586fe827d048aa6c13675ba838bd50309b3980e2))
- redo ollama llm provider using ollama sdk ([586fe82](https://github.com/drivecore/mycoder/commit/586fe827d048aa6c13675ba838bd50309b3980e2))

# [mycoder-v1.3.0](https://github.com/drivecore/mycoder/compare/mycoder-v1.2.0...mycoder-v1.3.0) (2025-03-12)

Expand Down
6 changes: 6 additions & 0 deletions packages/cli/mycoder.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
// mycoder.config.js
import config from '../../mycoder.config.js';

export default {
...config,
};
2 changes: 1 addition & 1 deletion packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@
"license": "MIT",
"dependencies": {
"@sentry/node": "^9.3.0",
"c12": "^3.0.2",
"chalk": "^5",
"cosmiconfig": "^9.0.0",
"deepmerge": "^4.3.1",
"dotenv": "^16",
"mycoder-agent": "workspace:*",
Expand Down
40 changes: 18 additions & 22 deletions packages/cli/src/commands/$default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,35 +108,29 @@ export async function executePrompt(
const providerSettings =
providerConfig[config.provider as keyof typeof providerConfig];

if (providerSettings) {
const { keyName } = providerSettings;
if (!providerSettings) {
// Unknown provider
logger.info(`Unknown provider: ${config.provider}`);
throw new Error(`Unknown provider: ${config.provider}`);
}

// First check if the API key is in the config
const configApiKey = config[keyName as keyof typeof config] as string;
const { keyName } = providerSettings;
let apiKey: string | undefined = undefined;
if (keyName) {
// Then fall back to environment variable
const envApiKey = process.env[keyName];
// Use config key if available, otherwise use env key
const apiKey = configApiKey || envApiKey;

apiKey = process.env[keyName];
if (!apiKey) {
logger.error(getProviderApiKeyError(config.provider));
throw new Error(`${config.provider} API key not found`);
}
}

// If we're using a key from config, set it as an environment variable
// This ensures it's available to the provider libraries
if (configApiKey && !envApiKey) {
process.env[keyName] = configApiKey;
logger.info(`Using ${keyName} from configuration`);
}
} else if (config.provider === 'ollama') {
logger.info(`LLM: ${config.provider}/${config.model}`);
if (config.baseUrl) {
// For Ollama, we check if the base URL is set
logger.info(`Using Ollama with base URL: ${config.ollamaBaseUrl}`);
} else {
// Unknown provider
logger.info(`Unknown provider: ${config.provider}`);
throw new Error(`Unknown provider: ${config.provider}`);
logger.info(`Using base url: ${config.baseUrl}`);
}
console.log();

// Add the standard suffix to all prompts
prompt += [
Expand Down Expand Up @@ -176,10 +170,12 @@ export async function executePrompt(
tokenCache: config.tokenCache,
userPrompt: config.userPrompt,
provider: config.provider as ModelProvider,
baseUrl: config.baseUrl,
model: config.model,
maxTokens: config.maxTokens,
temperature: config.temperature,
backgroundTools,
apiKey,
});

const output =
Expand Down Expand Up @@ -216,8 +212,8 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
},
handler: async (argv) => {
// Get configuration for model provider and name
const config = await loadConfig(getConfigFromArgv(argv));

const argvConfig = getConfigFromArgv(argv);
const config = await loadConfig(argvConfig);
let prompt: string | undefined;

// If promptFile is specified, read from file
Expand Down
Loading
Loading