Skip to content

Commit e2070d4

Browse files
authored
fix: support for chat.models override base configuration (#60)
1 parent 726e842 commit e2070d4

File tree

5 files changed

+69
-43
lines changed

5 files changed

+69
-43
lines changed

src/base/common/language-models/providers/TongyiProvider.ts

+17-6
Original file line numberDiff line numberDiff line change
@@ -79,14 +79,25 @@ export class TongyiLanguageModelProvider implements ILanguageModelProvider {
7979
private _newLLM(options: { [name: string]: any }) {
8080
const config = this.configService;
8181

82+
const {
83+
model,
84+
temperature,
85+
maxTokens,
86+
topP,
87+
apiKey = config.get<string>('tongyi.apiKey'),
88+
enableSearch = config.get('tongyi.enableSearch', true),
89+
clientOptions = {},
90+
} = options;
91+
8292
return new ChatAlibabaTongyi({
83-
alibabaApiKey: config.get<string>('tongyi.apiKey'),
93+
alibabaApiKey: apiKey,
8494
streaming: true,
85-
model: this._resolveChatModel(options.model),
86-
temperature: options.temperature,
87-
maxTokens: options.maxTokens,
88-
topP: options.topP,
89-
enableSearch: config.get('tongyi.enableSearch'),
95+
model: this._resolveChatModel(model),
96+
temperature: temperature,
97+
maxTokens: maxTokens,
98+
topP: topP,
99+
enableSearch: enableSearch,
100+
...clientOptions,
90101
});
91102
}
92103

src/base/common/language-models/providers/WenxinProvider.ts

+17-6
Original file line numberDiff line numberDiff line change
@@ -79,14 +79,25 @@ export class WenxinLanguageModelProvider implements ILanguageModelProvider {
7979
private _newLLM(options: { [name: string]: any }) {
8080
const config = this.configService;
8181

82+
const {
83+
model,
84+
temperature,
85+
penaltyScore,
86+
topP,
87+
apiKey = config.get<string>('qianfan.apiKey'),
88+
secretKey = config.get('qianfan.secretKey'),
89+
clientOptions = {},
90+
} = options;
91+
8292
return new ChatBaiduWenxin({
83-
baiduApiKey: config.get<string>('qianfan.apiKey'),
84-
baiduSecretKey: config.get<string>('qianfan.secretKey'),
93+
baiduApiKey: apiKey,
94+
baiduSecretKey: secretKey,
8595
streaming: true,
86-
model: this._resolveChatModel(options.model),
87-
temperature: options.temperature,
88-
topP: options.topP,
89-
penaltyScore: options.penaltyScore,
96+
model: this._resolveChatModel(model),
97+
temperature: temperature,
98+
topP: topP,
99+
penaltyScore: penaltyScore,
100+
...clientOptions,
90101
});
91102
}
92103

src/base/common/language-models/providers/anthropicProvider.ts

+16-6
Original file line numberDiff line numberDiff line change
@@ -78,15 +78,25 @@ export class AnthropicLanguageModelProvider implements ILanguageModelProvider {
7878

7979
private _newLLM(options: { [name: string]: any }) {
8080
const config = this.configService;
81+
const {
82+
model,
83+
baseURL = config.get('anthropic.baseURL'),
84+
apiKey = config.get('anthropic.apiKey'),
85+
temperature,
86+
maxTokens,
87+
topP,
88+
clientOptions = {}
89+
} = options;
8190

8291
return new ChatAnthropic({
83-
anthropicApiKey: config.get<string>('anthropic.apiKey'),
84-
anthropicApiUrl: config.get<string>('anthropic.baseURL'),
92+
anthropicApiKey: apiKey,
93+
anthropicApiUrl: baseURL,
8594
streaming: true,
86-
model: this._resolveChatModel(options.model),
87-
temperature: options.temperature,
88-
maxTokens: options.maxTokens,
89-
topP: options.topP,
95+
temperature,
96+
maxTokens,
97+
topP,
98+
model: this._resolveChatModel(model),
99+
...clientOptions,
90100
});
91101
}
92102

src/base/common/language-models/providers/openaiProvider.ts

+16-11
Original file line numberDiff line numberDiff line change
@@ -178,24 +178,29 @@ export class OpenAILanguageModelProvider implements ILanguageModelProvider {
178178
}
179179

180180
private _newLLM(options: { [name: string]: any }) {
181-
const { baseURL, apiKey, project, organization } = options;
182-
const configService = this.configService;
183-
184-
const apiType = configService.get<'openai' | 'azure'>('openai.apiType');
181+
const config = this.configService;
182+
const {
183+
baseURL = config.get('openai.baseURL'),
184+
apiKey = config.get('openai.apiKey'),
185+
project = config.get('openai.project'),
186+
organization = config.get('openai.organization'),
187+
deployment = organization,
188+
apiType = config.get<'openai' | 'azure'>('openai.apiType'),
189+
} = options;
185190

186191
if (apiType === 'azure') {
187192
return new AzureOpenAI({
188-
baseURL: baseURL || configService.get('openai.baseURL'),
189-
apiKey: apiKey || configService.get('openai.apiKey'),
190-
deployment: organization || configService.get('openai.organization'),
193+
baseURL: baseURL,
194+
apiKey: apiKey,
195+
deployment: deployment,
191196
});
192197
}
193198

194199
return new OpenAI({
195-
baseURL: configService.get('openai.baseURL'),
196-
project: project || configService.get('openai.project'),
197-
apiKey: apiKey || configService.get('openai.apiKey'),
198-
organization: organization || configService.get('openai.organization'),
200+
baseURL: baseURL,
201+
project: project,
202+
apiKey: apiKey,
203+
organization: organization,
199204
});
200205
}
201206

src/editor/views/chat/continue/continueViewProvider.ts

+3-14
Original file line numberDiff line numberDiff line change
@@ -235,17 +235,7 @@ export class ContinueViewProvider extends AbstractWebviewViewProvider implements
235235

236236
private async listModels(): Promise<IChatModelResource[]> {
237237
const resources = this.configService.getConfig<IChatModelResource[]>('chat.models');
238-
if (!resources) {
239-
return [];
240-
}
241-
242-
return resources.map(res => {
243-
return {
244-
title: res.title,
245-
provider: res.provider,
246-
model: res.model,
247-
};
248-
});
238+
return resources ?? [];
249239
}
250240

251241
readonly slashCommandsMap: Record<string, string> = {
@@ -324,16 +314,15 @@ export class ContinueViewProvider extends AbstractWebviewViewProvider implements
324314
const models = await this.listModels();
325315

326316
const title = event.data.title;
327-
const metadata = models.find(m => m.title === title);
317+
const resource = models.find(m => m.title === title);
328318

329319
const completionOptions = event.data.completionOptions;
330320

331321
await this.lm.chat(
332322
mapToChatMessages(event.data.messages),
333323
{
324+
...resource,
334325
...completionOptions,
335-
provider: metadata?.provider,
336-
model: metadata?.model,
337326
},
338327
{
339328
report(fragment) {

0 commit comments

Comments
 (0)