|
1 | 1 | # Update Models
|
2 | 2 |
|
3 |
| -The chatbot template ships with [OpenAI](https://sdk.vercel.ai/providers/ai-sdk-providers/openai) as the default model provider. Since the template is powered by the [AI SDK](https://sdk.vercel.ai), which supports [multiple providers](https://sdk.vercel.ai/providers/ai-sdk-providers) out of the box, you can easily switch to another provider of your choice. |
| 3 | +The chatbot template ships with [xAI](https://sdk.vercel.ai/providers/ai-sdk-providers/xai) as the default model provider. Since the template is powered by the [AI SDK](https://sdk.vercel.ai), which supports [multiple providers](https://sdk.vercel.ai/providers/ai-sdk-providers) out of the box, you can easily switch to another provider of your choice. |
4 | 4 |
|
5 | 5 | To update the models, you will need to update the custom provider called `myProvider` at `/lib/ai/models.ts` shown below.
|
6 | 6 |
|
7 | 7 | ```ts
|
8 | 8 | import { customProvider } from "ai";
|
9 |
| -import { openai } from "@ai-sdk/openai"; |
| 9 | +import { xai } from "@ai-sdk/xai"; |
10 | 10 |
|
11 | 11 | export const myProvider = customProvider({
|
12 | 12 | languageModels: {
|
13 |
| - "chat-model-small": openai("gpt-4o-mini"), |
14 |
| - "chat-model-large": openai("gpt-4o"), |
| 13 | + "chat-model": xai("grok-2-1212"), |
15 | 14 | "chat-model-reasoning": wrapLanguageModel({
|
16 | 15 | model: fireworks("accounts/fireworks/models/deepseek-r1"),
|
17 | 16 | middleware: extractReasoningMiddleware({ tagName: "think" }),
|
18 | 17 | }),
|
19 |
| - "title-model": openai("gpt-4-turbo"), |
20 |
| - "artifact-model": openai("gpt-4o-mini"), |
| 18 | + "title-model": xai("grok-2-1212"), |
| 19 | + "artifact-model": xai("grok-2-1212"), |
21 | 20 | },
|
22 | 21 | imageModels: {
|
23 |
| - "small-model": openai.image("dall-e-3"), |
| 22 | + "small-model": openai.image("dall-e-2"), |
| 23 | + "large-model": openai.image("dall-e-3"), |
24 | 24 | },
|
25 | 25 | });
|
26 | 26 | ```
|
27 | 27 |
|
28 |
| -You can replace the `openai` models with any other provider of your choice. You will need to install the provider library and switch the models accordingly. |
| 28 | +You can replace the models with any other provider of your choice. You will need to install the provider library and switch the models accordingly. |
29 | 29 |
|
30 |
| -For example, if you want to use Anthropic's `claude-3-5-sonnet` model for `chat-model-large`, you can replace the `openai` model with the `anthropic` model as shown below. |
| 30 | +For example, if you want to use Anthropic's `claude-3-5-sonnet` model for `chat-model`, you can replace the `xai` model with the `anthropic` model as shown below. |
31 | 31 |
|
32 | 32 | ```ts
|
33 | 33 | import { customProvider } from "ai";
|
34 | 34 | import { anthropic } from "@ai-sdk/anthropic";
|
35 | 35 |
|
36 | 36 | export const myProvider = customProvider({
|
37 | 37 | languageModels: {
|
38 |
| - "chat-model-small": openai("gpt-4o-mini"), |
39 |
| - "chat-model-large": anthropic("claude-3-5-sonnet"), // Replace openai with anthropic |
| 38 | + "chat-model": anthropic("claude-3-5-sonnet"), // Replace xai with anthropic |
40 | 39 | "chat-model-reasoning": wrapLanguageModel({
|
41 | 40 | model: fireworks("accounts/fireworks/models/deepseek-r1"),
|
42 | 41 | middleware: extractReasoningMiddleware({ tagName: "think" }),
|
|
0 commit comments