forked from mckaywrigley/chatbot-ui
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit f4ec4df
Showing
271 changed files
with
34,589 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
# Supabase Public | ||
NEXT_PUBLIC_SUPABASE_URL= | ||
NEXT_PUBLIC_SUPABASE_ANON_KEY= | ||
|
||
# Supabase Private | ||
SUPABASE_SERVICE_ROLE_KEY= | ||
|
||
# Ollama | ||
NEXT_PUBLIC_OLLAMA_URL=http://localhost:11434 # Default |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
{ | ||
"$schema": "https://json.schemastore.org/eslintrc", | ||
"root": true, | ||
"extends": [ | ||
"next/core-web-vitals", | ||
"prettier", | ||
"plugin:tailwindcss/recommended" | ||
], | ||
"plugins": ["tailwindcss"], | ||
"rules": { | ||
"tailwindcss/no-custom-classname": "off" | ||
}, | ||
"settings": { | ||
"tailwindcss": { | ||
"callees": ["cn", "cva"], | ||
"config": "tailwind.config.js" | ||
} | ||
}, | ||
"overrides": [ | ||
{ | ||
"files": ["*.ts", "*.tsx"], | ||
"parser": "@typescript-eslint/parser" | ||
} | ||
] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# If you find my open-source work helpful, please consider sponsoring me! | ||
|
||
github: mckaywrigley |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. | ||
|
||
# dependencies | ||
/node_modules | ||
/.pnp | ||
.pnp.js | ||
.yarn/install-state.gz | ||
|
||
# testing | ||
/coverage | ||
|
||
# next.js | ||
/.next/ | ||
/out/ | ||
|
||
# production | ||
/build | ||
|
||
# misc | ||
.DS_Store | ||
*.pem | ||
|
||
# debug | ||
npm-debug.log* | ||
yarn-debug.log* | ||
yarn-error.log* | ||
|
||
# local env files | ||
.env | ||
.env*.local | ||
|
||
# vercel | ||
.vercel | ||
|
||
# typescript | ||
*.tsbuildinfo | ||
next-env.d.ts | ||
|
||
seed.sql | ||
.VSCodeCounter |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
#!/usr/bin/env sh | ||
. "$(dirname -- "$0")/_/husky.sh" | ||
|
||
npm run lint:fix && npm run format:write && git add . |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
# Chatbot UI | ||
|
||
The open-source AI chat app for everyone. | ||
|
||
 | ||
|
||
## Demo | ||
|
||
View the latest demo [here](https://twitter.com/mckaywrigley). | ||
|
||
## Support | ||
|
||
If you find Chatbot UI useful, please consider [sponsoring](https://github.com/mckaywrigley?tab=sponsoring) me :) | ||
|
||
## Quickstart | ||
|
||
### 1. Clone the repo | ||
|
||
```bash | ||
git clone https://github.com/mckaywrigley/chatbot-ui.git | ||
``` | ||
|
||
### 2. Install dependencies | ||
|
||
```bash | ||
npm install | ||
``` | ||
|
||
### 3. Install Supabase & run locally | ||
|
||
1. Install Supabase CLI | ||
|
||
```bash | ||
brew install supabase/tap/supabase | ||
``` | ||
|
||
2. Start Supabase | ||
|
||
```bash | ||
supabase start | ||
``` | ||
|
||
### 4. Install Ollama (for local models) | ||
|
||
Follow the instructions [here](https://github.com/jmorganca/ollama#macos) | ||
|
||
### 5. Fill in secrets | ||
|
||
1. .env | ||
|
||
```bash | ||
cp .env.local.example .env.local | ||
``` | ||
|
||
Get the required values by running: | ||
|
||
```bash | ||
supabase status | ||
``` | ||
|
||
2. sql | ||
|
||
In the 1st migration file `20240108234540_setup.sql` you will need to replace 2 values: | ||
|
||
- `project_url` (line 53): This can remain unchanged if you don't change your `config.toml` file. | ||
- `service_role_key` (line 54): You got this value from running `supabase status` in step 5.1. | ||
|
||
You will also need to to fill in the values for project_url | ||
|
||
### 6. Run app locally | ||
|
||
```bash | ||
npm run chat | ||
``` | ||
|
||
## Contributing | ||
|
||
We are working on a guide for contributing. | ||
|
||
## Contact | ||
|
||
Message Mckay on [Twitter/X](https://twitter.com/mckaywrigley) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
import { CHAT_SETTING_LIMITS } from "@/lib/chat-setting-limits" | ||
import { checkApiKey, getServerProfile } from "@/lib/server-chat-helpers" | ||
import { ChatSettings } from "@/types" | ||
import Anthropic from "@anthropic-ai/sdk" | ||
import { AnthropicStream, StreamingTextResponse } from "ai" | ||
|
||
export const runtime = "edge" | ||
|
||
export async function POST(request: Request) { | ||
const json = await request.json() | ||
const { chatSettings, messages } = json as { | ||
chatSettings: ChatSettings | ||
messages: any[] | ||
} | ||
|
||
try { | ||
const profile = await getServerProfile() | ||
|
||
checkApiKey(profile.anthropic_api_key, "Anthropic") | ||
|
||
let ANTHROPIC_FORMATTED_MESSAGES: any = messages.slice(1) | ||
|
||
const anthropic = new Anthropic({ | ||
apiKey: profile.anthropic_api_key || "" | ||
}) | ||
|
||
const response = await anthropic.beta.messages.create({ | ||
model: chatSettings.model, | ||
messages: ANTHROPIC_FORMATTED_MESSAGES, | ||
temperature: chatSettings.temperature, | ||
system: messages[0].content, | ||
max_tokens: | ||
CHAT_SETTING_LIMITS[chatSettings.model].MAX_TOKEN_OUTPUT_LENGTH, | ||
stream: true | ||
}) | ||
|
||
const stream = AnthropicStream(response) | ||
|
||
return new StreamingTextResponse(stream) | ||
} catch (error: any) { | ||
const errorMessage = error.message || "An unexpected error occurred" | ||
const errorCode = error.status || 500 | ||
return new Response(JSON.stringify({ message: errorMessage }), { | ||
status: errorCode | ||
}) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
import { CHAT_SETTING_LIMITS } from "@/lib/chat-setting-limits" | ||
import { checkApiKey, getServerProfile } from "@/lib/server-chat-helpers" | ||
import { ChatAPIPayload } from "@/types" | ||
import { OpenAIStream, StreamingTextResponse } from "ai" | ||
import OpenAI from "openai" | ||
import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions.mjs" | ||
|
||
export const runtime = "edge" | ||
|
||
export async function POST(request: Request) { | ||
const json = await request.json() | ||
const { chatSettings, messages } = json as ChatAPIPayload | ||
|
||
try { | ||
const profile = await getServerProfile() | ||
|
||
checkApiKey(profile.azure_openai_api_key, "Azure") | ||
|
||
const ENDPOINT = profile.azure_openai_endpoint | ||
const KEY = profile.azure_openai_api_key | ||
|
||
let DEPLOYMENT_ID = "" | ||
switch (chatSettings.model) { | ||
case "gpt-3.5-turbo-1106": | ||
DEPLOYMENT_ID = profile.azure_openai_35_turbo_id || "" | ||
break | ||
case "gpt-4-1106-preview": | ||
DEPLOYMENT_ID = profile.azure_openai_45_turbo_id || "" | ||
break | ||
case "gpt-4-vision-preview": | ||
DEPLOYMENT_ID = profile.azure_openai_45_vision_id || "" | ||
break | ||
default: | ||
return new Response(JSON.stringify({ message: "Model not found" }), { | ||
status: 400 | ||
}) | ||
} | ||
|
||
if (!ENDPOINT || !KEY || !DEPLOYMENT_ID) { | ||
return new Response( | ||
JSON.stringify({ message: "Azure resources not found" }), | ||
{ | ||
status: 400 | ||
} | ||
) | ||
} | ||
|
||
const azureOpenai = new OpenAI({ | ||
apiKey: KEY, | ||
baseURL: `${ENDPOINT}/openai/deployments/${DEPLOYMENT_ID}`, | ||
defaultQuery: { "api-version": "2023-07-01-preview" }, | ||
defaultHeaders: { "api-key": KEY } | ||
}) | ||
|
||
const response = await azureOpenai.chat.completions.create({ | ||
model: DEPLOYMENT_ID as ChatCompletionCreateParamsBase["model"], | ||
messages: messages as ChatCompletionCreateParamsBase["messages"], | ||
temperature: chatSettings.temperature, | ||
max_tokens: | ||
CHAT_SETTING_LIMITS[chatSettings.model].MAX_TOKEN_OUTPUT_LENGTH, | ||
stream: true | ||
}) | ||
|
||
const stream = OpenAIStream(response) | ||
|
||
return new StreamingTextResponse(stream) | ||
} catch (error: any) { | ||
const errorMessage = error.error?.message || "An unexpected error occurred" | ||
const errorCode = error.status || 500 | ||
return new Response(JSON.stringify({ message: errorMessage }), { | ||
status: errorCode | ||
}) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import { checkApiKey, getServerProfile } from "@/lib/server-chat-helpers" | ||
import { ChatSettings } from "@/types" | ||
import { GoogleGenerativeAI } from "@google/generative-ai" | ||
|
||
export const runtime = "edge" | ||
|
||
export async function POST(request: Request) { | ||
const json = await request.json() | ||
const { chatSettings, messages } = json as { | ||
chatSettings: ChatSettings | ||
messages: any[] | ||
} | ||
|
||
try { | ||
const profile = await getServerProfile() | ||
|
||
checkApiKey(profile.google_gemini_api_key, "Google") | ||
|
||
const genAI = new GoogleGenerativeAI(profile.google_gemini_api_key || "") | ||
const googleModel = genAI.getGenerativeModel({ model: chatSettings.model }) | ||
|
||
if (chatSettings.model === "gemini-pro") { | ||
const lastMessage = messages.pop() | ||
|
||
const chat = googleModel.startChat({ | ||
history: messages, | ||
generationConfig: { | ||
temperature: chatSettings.temperature | ||
} | ||
}) | ||
|
||
const response = await chat.sendMessageStream(lastMessage.parts) | ||
|
||
const encoder = new TextEncoder() | ||
const readableStream = new ReadableStream({ | ||
async start(controller) { | ||
for await (const chunk of response.stream) { | ||
const chunkText = chunk.text() | ||
controller.enqueue(encoder.encode(chunkText)) | ||
} | ||
controller.close() | ||
} | ||
}) | ||
|
||
return new Response(readableStream, { | ||
headers: { "Content-Type": "text/plain" } | ||
}) | ||
} else if (chatSettings.model === "gemini-pro-vision") { | ||
// FIX: Hacky until chat messages are supported | ||
const HACKY_MESSAGE = messages[messages.length - 1] | ||
|
||
const result = await googleModel.generateContent([ | ||
HACKY_MESSAGE.prompt, | ||
HACKY_MESSAGE.imageParts | ||
]) | ||
|
||
const response = result.response | ||
|
||
const text = response.text() | ||
|
||
return new Response(text, { | ||
headers: { "Content-Type": "text/plain" } | ||
}) | ||
} | ||
} catch (error: any) { | ||
const errorMessage = error.error?.message || "An unexpected error occurred" | ||
const errorCode = error.status || 500 | ||
return new Response(JSON.stringify({ message: errorMessage }), { | ||
status: errorCode | ||
}) | ||
} | ||
} |
Oops, something went wrong.