diff --git a/.changeset/many-knives-warn.md b/.changeset/many-knives-warn.md new file mode 100644 index 000000000..417206598 --- /dev/null +++ b/.changeset/many-knives-warn.md @@ -0,0 +1,6 @@ +--- +"create-llama": patch +"@llamaindex/server": patch +--- + +support eject to fully customize next folder diff --git a/eslint.config.mjs b/eslint.config.mjs index a5c2c559d..e5e489fc5 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -58,6 +58,8 @@ export default tseslint.config( "**/node_modules/**", "**/build/**", "packages/server/server/**", + "packages/server/project/**", + "packages/server/bin/**", ], }, ); diff --git a/packages/create-llama/e2e/shared/llamaindexserver_template.spec.ts b/packages/create-llama/e2e/shared/llamaindexserver_template.spec.ts index f85db166c..c99d5a3ba 100644 --- a/packages/create-llama/e2e/shared/llamaindexserver_template.spec.ts +++ b/packages/create-llama/e2e/shared/llamaindexserver_template.spec.ts @@ -1,5 +1,5 @@ import { expect, test } from "@playwright/test"; -import { ChildProcess } from "child_process"; +import { ChildProcess, execSync } from "child_process"; import fs from "fs"; import path from "path"; import type { @@ -28,6 +28,7 @@ const templateUseCases = [ "deep_research", "code_generator", ]; +const ejectDir = "next"; for (const useCase of templateUseCases) { test.describe(`Test use case ${useCase} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => { @@ -110,6 +111,28 @@ for (const useCase of templateUseCases) { expect(response.ok()).toBeTruthy(); }); + test("Should successfully eject, install dependencies and build without errors", async () => { + test.skip( + templateFramework !== "nextjs" || + useCase !== "code_generator" || + dataSource === "--llamacloud", + "Eject test only applies to Next.js framework, code generator use case, and non-llamacloud", + ); + + // Run eject command + execSync("npm run eject", { cwd: path.join(cwd, name) }); + + // Verify next directory exists + const nextDirExists = fs.existsSync(path.join(cwd, name, ejectDir)); + expect(nextDirExists).toBeTruthy(); + + // Install dependencies in next directory + execSync("npm install", { cwd: path.join(cwd, name, ejectDir) }); + + // Run build + execSync("npm run build", { cwd: path.join(cwd, name, ejectDir) }); + }); + // clean processes test.afterAll(async () => { appProcess?.kill(); diff --git a/packages/create-llama/templates/components/use-cases/typescript/agentic_rag/README-template.md b/packages/create-llama/templates/components/use-cases/typescript/agentic_rag/README-template.md index 73182ab9f..995eba057 100644 --- a/packages/create-llama/templates/components/use-cases/typescript/agentic_rag/README-template.md +++ b/packages/create-llama/templates/components/use-cases/typescript/agentic_rag/README-template.md @@ -41,6 +41,14 @@ curl --location 'localhost:3000/api/chat' \ --data '{ "messages": [{ "role": "user", "content": "What standards for a letter exist?" }] }' ``` +## Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. + +```bash +npm run eject +``` + ## Learn More To learn more about LlamaIndex, take a look at the following resources: diff --git a/packages/create-llama/templates/components/use-cases/typescript/code_generator/README-template.md b/packages/create-llama/templates/components/use-cases/typescript/code_generator/README-template.md index f79151961..463237c71 100644 --- a/packages/create-llama/templates/components/use-cases/typescript/code_generator/README-template.md +++ b/packages/create-llama/templates/components/use-cases/typescript/code_generator/README-template.md @@ -42,6 +42,14 @@ curl --location 'localhost:3000/api/chat' \ --data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }' ``` +## Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. + +```bash +npm run eject +``` + ## Learn More To learn more about LlamaIndex, take a look at the following resources: diff --git a/packages/create-llama/templates/components/use-cases/typescript/deep_research/README-template.md b/packages/create-llama/templates/components/use-cases/typescript/deep_research/README-template.md index 5789bd486..45ce35d79 100644 --- a/packages/create-llama/templates/components/use-cases/typescript/deep_research/README-template.md +++ b/packages/create-llama/templates/components/use-cases/typescript/deep_research/README-template.md @@ -53,6 +53,14 @@ curl --location 'localhost:3000/api/chat' \ --data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }' ``` +## Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. + +```bash +npm run eject +``` + ## Learn More To learn more about LlamaIndex, take a look at the following resources: diff --git a/packages/create-llama/templates/components/use-cases/typescript/document_generator/README-template.md b/packages/create-llama/templates/components/use-cases/typescript/document_generator/README-template.md index 528ec9ff6..c54958681 100644 --- a/packages/create-llama/templates/components/use-cases/typescript/document_generator/README-template.md +++ b/packages/create-llama/templates/components/use-cases/typescript/document_generator/README-template.md @@ -42,6 +42,14 @@ curl --location 'localhost:3000/api/chat' \ --data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }' ``` +## Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. + +```bash +npm run eject +``` + ## Learn More To learn more about LlamaIndex, take a look at the following resources: diff --git a/packages/create-llama/templates/components/use-cases/typescript/financial_report/README-template.md b/packages/create-llama/templates/components/use-cases/typescript/financial_report/README-template.md index 3c31a8283..be912db97 100644 --- a/packages/create-llama/templates/components/use-cases/typescript/financial_report/README-template.md +++ b/packages/create-llama/templates/components/use-cases/typescript/financial_report/README-template.md @@ -41,6 +41,14 @@ curl --location 'localhost:3000/api/chat' \ --data '{ "messages": [{ "role": "user", "content": "Generate a financial report that compares the financial performance of Apple and Tesla" }] }' ``` +## Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. + +```bash +npm run eject +``` + ## Learn More To learn more about LlamaIndex, take a look at the following resources: diff --git a/packages/create-llama/templates/components/vectordbs/llamaindexserver/llamacloud/typescript/generate.ts b/packages/create-llama/templates/components/vectordbs/llamaindexserver/llamacloud/typescript/generate.ts index 63623e69d..5c07d42b1 100644 --- a/packages/create-llama/templates/components/vectordbs/llamaindexserver/llamacloud/typescript/generate.ts +++ b/packages/create-llama/templates/components/vectordbs/llamaindexserver/llamacloud/typescript/generate.ts @@ -1,8 +1,9 @@ +import { OpenAI } from "@llamaindex/openai"; import { generateEventComponent } from "@llamaindex/server"; import * as dotenv from "dotenv"; import "dotenv/config"; import * as fs from "fs/promises"; -import { LLamaCloudFileService, OpenAI } from "llamaindex"; +import { LLamaCloudFileService } from "llamaindex"; import * as path from "path"; import { getIndex } from "./app/data"; import { initSettings } from "./app/settings"; diff --git a/packages/create-llama/templates/types/llamaindexserver/nextjs/package.json b/packages/create-llama/templates/types/llamaindexserver/nextjs/package.json index f68276d1c..c0864dd54 100644 --- a/packages/create-llama/templates/types/llamaindexserver/nextjs/package.json +++ b/packages/create-llama/templates/types/llamaindexserver/nextjs/package.json @@ -6,7 +6,8 @@ "generate:datasource": "tsx src/generate.ts datasource", "generate:ui": "tsx src/generate.ts ui", "dev": "nodemon", - "start": "tsx src/index.ts" + "start": "tsx src/index.ts", + "eject": "llamaindex-server eject" }, "dependencies": { "@llamaindex/openai": "~0.4.0", diff --git a/packages/server/.gitignore b/packages/server/.gitignore index 589466f3e..b5936fb95 100644 --- a/packages/server/.gitignore +++ b/packages/server/.gitignore @@ -1,5 +1,8 @@ # server contains Nextjs frontend code (not compiled) server/ +# the ejected nextjs project +project/ + # temp is the copy of next folder but without API folder, used to build frontend static files temp/ diff --git a/packages/server/README.md b/packages/server/README.md index 51bedca90..8b91a2c57 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -300,6 +300,23 @@ The server always provides a chat interface at the root path (`/`) with: - The server automatically mounts the `data` and `output` folders at `{server_url}{api_prefix}/files/data` (default: `/api/files/data`) and `{server_url}{api_prefix}/files/output` (default: `/api/files/output`) respectively. - Your workflows can use both folders to store and access files. By convention, the `data` folder is used for documents that are ingested, and the `output` folder is used for documents generated by the workflow. +### Eject Mode + +If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server. +By default, the ejected project will be in the `next` directory in the current working directory. You can change the output directory by providing custom path after `eject` command: + +```bash +npm eject +``` + +How eject works: + +1. Init nextjs project with eslint, prettier, postcss, tailwindcss, shadcn components, etc. +2. Copy your workflow definition and setting files in src/app/\* to the ejected project in app/api/chat +3. Copy your components, data, output, storage folders to the ejected project +4. Copy your current .env file to the ejected project +5. Clean up files that are no longer needed and update imports + ## API Reference - [LlamaIndexServer](https://ts.llamaindex.ai/docs/api/classes/LlamaIndexServer) diff --git a/packages/server/bin/eject.cjs b/packages/server/bin/eject.cjs new file mode 100644 index 000000000..126860913 --- /dev/null +++ b/packages/server/bin/eject.cjs @@ -0,0 +1,172 @@ +#!/usr/bin/env node + +const fs = require("fs").promises; +const path = require("path"); + +// Resolve the project directory in node_modules/@llamaindex/server/project +// This is the template that used to construct the nextjs project +const projectDir = path.resolve(__dirname, "../project"); + +// Resolve the src directory that contains workflow & setting files +const srcDir = path.join(process.cwd(), "src"); +const srcAppDir = path.join(srcDir, "app"); +const generateFile = path.join(srcDir, "generate.ts"); +const envFile = path.join(process.cwd(), ".env"); + +// The environment variables that are used as LlamaIndexServer configs +const SERVER_CONFIG_VARS = [ + { + key: "OPENAI_API_KEY", + defaultValue: "", + description: "OpenAI API key", + }, + { + key: "SUGGEST_NEXT_QUESTIONS", + defaultValue: "true", + description: "Whether to suggest next questions (`suggestNextQuestions`)", + }, + { + key: "COMPONENTS_DIR", + defaultValue: "components", + description: "Directory for custom components (`componentsDir`)", + }, + { + key: "WORKFLOW_FILE_PATH", + defaultValue: "app/api/chat/app/workflow.ts", + description: "The path to the workflow file (will be updated in dev mode)", + }, + { + key: "NEXT_PUBLIC_USE_COMPONENTS_DIR", + defaultValue: "true", + description: "Whether to enable components directory feature on frontend", + }, + { + key: "NEXT_PUBLIC_DEV_MODE", + defaultValue: "true", + description: "Whether to enable dev mode (`devMode`)", + }, + { + key: "NEXT_PUBLIC_STARTER_QUESTIONS", + defaultValue: '["Summarize the document", "What are the key points?"]', + description: + "Initial questions to display in the chat (`starterQuestions`)", + }, + { + key: "NEXT_PUBLIC_SHOW_LLAMACLOUD_SELECTOR", + defaultValue: "false", + description: + "Whether to show LlamaCloud selector for frontend (`llamaCloudIndexSelector`)", + }, +]; + +async function eject() { + try { + // validate required directories (nextjs project template, src directory, src/app directory) + const requiredDirs = [projectDir, srcDir, srcAppDir]; + for (const dir of requiredDirs) { + const exists = await fs + .access(dir) + .then(() => true) + .catch(() => false); + if (!exists) { + console.error("Error: directory does not exist at", dir); + process.exit(1); + } + } + + // Get destination directory from command line arguments (pnpm eject ) + const args = process.argv; + const outputIndex = args.indexOf("eject"); + const destDir = + outputIndex !== -1 && args[outputIndex + 1] + ? path.resolve(args[outputIndex + 1]) // Use provided path after eject + : path.join(process.cwd(), "next"); // Default to "next" folder in the current working directory + + // remove destination directory if it exists + await fs.rm(destDir, { recursive: true, force: true }); + + // create destination directory + await fs.mkdir(destDir, { recursive: true }); + + // Copy the nextjs project template to the destination directory + await fs.cp(projectDir, destDir, { recursive: true }); + + // copy src/app/* to destDir/app/api/chat + const chatRouteDir = path.join(destDir, "app", "api", "chat"); + await fs.cp(srcAppDir, path.join(chatRouteDir, "app"), { recursive: true }); + + // nextjs project doesn't depend on @llamaindex/server anymore, we need to update the imports in workflow file + const workflowFile = path.join(chatRouteDir, "app", "workflow.ts"); + let workflowContent = await fs.readFile(workflowFile, "utf-8"); + workflowContent = workflowContent.replace("@llamaindex/server", "../utils"); + await fs.writeFile(workflowFile, workflowContent); + + // copy generate.ts if it exists + const genFilePath = path.join(chatRouteDir, "generate.ts"); + const genFileExists = await copy(generateFile, genFilePath); + if (genFileExists) { + // update the import @llamaindex/server in generate.ts + let genContent = await fs.readFile(genFilePath, "utf-8"); + genContent = genContent.replace("@llamaindex/server", "./utils"); + await fs.writeFile(genFilePath, genContent); + } + + // copy folders in root directory if exists + const rootFolders = ["components", "data", "output", "storage"]; + for (const folder of rootFolders) { + await copy(path.join(process.cwd(), folder), path.join(destDir, folder)); + } + + // copy .env if it exists or create a new one + const envFileExists = await copy(envFile, path.join(destDir, ".env")); + if (!envFileExists) { + await fs.writeFile(path.join(destDir, ".env"), ""); + } + + // update .env file with more server configs + let envFileContent = await fs.readFile(path.join(destDir, ".env"), "utf-8"); + for (const envVar of SERVER_CONFIG_VARS) { + const { key, defaultValue, description } = envVar; + if (!envFileContent.includes(key)) { + // if the key is not exists in the env file, add it + envFileContent += `\n# ${description}\n${key}=${defaultValue}\n`; + } + } + await fs.writeFile(path.join(destDir, ".env"), envFileContent); + + // rename gitignore -> .gitignore + await fs.rename( + path.join(destDir, "gitignore"), + path.join(destDir, ".gitignore"), + ); + + // user can customize layout directory in nextjs project, remove layout api + await fs.rm(path.join(destDir, "app", "api", "layout"), { + recursive: true, + force: true, + }); + + // remove no-needed files + await fs.unlink(path.join(destDir, "public", "config.js")); + await fs.unlink(path.join(destDir, "next-build.config.ts")); + + console.log("Successfully ejected @llamaindex/server to", destDir); + } catch (error) { + console.error("Error during eject:", error.message); + process.exit(1); + } +} + +// copy src to dest if src exists, return true if src exists +async function copy(src, dest) { + const srcExists = await fs + .access(src) + .then(() => true) + .catch(() => false); + if (srcExists) { + await fs.cp(src, dest, { recursive: true }); + } + return srcExists; +} + +eject(); diff --git a/packages/server/next/README.md b/packages/server/next/README.md new file mode 100644 index 000000000..e57488eb7 --- /dev/null +++ b/packages/server/next/README.md @@ -0,0 +1,45 @@ +This is a [LlamaIndex](https://www.llamaindex.ai/) project using [Next.js](https://nextjs.org/) that is ejected from [`llamaindex-server`](https://github.com/run-llama/create-llama/tree/main/packages/server) via `npm eject` command. + +## Quick Start + +As this is a Next.js project, you can use the following commands to start the development server: + +```bash +npm install +npm run dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +## Useful Commands + +- Generate Datasource (in case you're having a `./data` folder): `npm run generate` +- Typecheck: `npm run typecheck` +- Lint: `npm run lint` +- Format: `npm run format` +- Build & Start: `npm run build && npm run start` + +## Deployment + +The project can be deployed to any platform that supports Next.js like Vercel. + +## Configuration + +Your original [`llamaindex-server`](https://github.com/run-llama/create-llama/tree/main/packages/server#configuration-options) configurations have been migrated to a [`.env`](.env) file. + +Changing the `.env` file will change the behavior of the application, e.g. for changing the initial questions to display in the chat, you can do: + +``` +NEXT_PUBLIC_STARTER_QUESTIONS=['What is the capital of France?'] +``` + +Alternatively, you can also change the file referencing `process.env.NEXT_PUBLIC_STARTER_QUESTIONS` directly in the source code. + +## Learn More + +To learn more about LlamaIndex, take a look at the following resources: + +- [LlamaIndex Documentation](https://docs.llamaindex.ai) - learn about LlamaIndex (Python features). +- [LlamaIndexTS Documentation](https://ts.llamaindex.ai) - learn about LlamaIndex (Typescript features). + +You can check out [the LlamaIndexTS GitHub repository](https://github.com/run-llama/LlamaIndexTS) - your feedback and contributions are welcome! diff --git a/packages/server/next/app/api/chat/route.ts b/packages/server/next/app/api/chat/route.ts index b11801458..2eee69e86 100644 --- a/packages/server/next/app/api/chat/route.ts +++ b/packages/server/next/app/api/chat/route.ts @@ -4,16 +4,23 @@ import { type MessageType } from "llamaindex"; import { NextRequest, NextResponse } from "next/server"; // import chat utils -import { toDataStream } from "./utils/stream"; -import { sendSuggestedQuestionsEvent } from "./utils/suggestion"; -import { runWorkflow } from "./utils/workflow"; +import { + runWorkflow, + sendSuggestedQuestionsEvent, + toDataStream, +} from "./utils"; -// import workflow factory from local file -import { workflowFactory } from "../../../../app/workflow"; +// import workflow factory and settings from local file +import { initSettings } from "./app/settings"; +import { workflowFactory } from "./app/workflow"; + +initSettings(); export async function POST(req: NextRequest) { try { const reqBody = await req.json(); + const suggestNextQuestions = process.env.SUGGEST_NEXT_QUESTIONS === "true"; + const { messages } = reqBody as { messages: Message[] }; const chatHistory = messages.map((message) => ({ role: message.role as MessageType, @@ -47,14 +54,15 @@ export async function POST(req: NextRequest) { ); const dataStream = toDataStream(workflowEventStream, { - // TODO: Support enable/disable suggestion callbacks: { onFinal: async (completion, dataStreamWriter) => { chatHistory.push({ role: "assistant" as MessageType, content: completion, }); - await sendSuggestedQuestionsEvent(dataStreamWriter, chatHistory); + if (suggestNextQuestions) { + await sendSuggestedQuestionsEvent(dataStreamWriter, chatHistory); + } }, }, }); diff --git a/packages/server/next/app/api/components/route.ts b/packages/server/next/app/api/components/route.ts index 168e8c21a..ce72faf86 100644 --- a/packages/server/next/app/api/components/route.ts +++ b/packages/server/next/app/api/components/route.ts @@ -3,6 +3,7 @@ import { handleComponentRoute } from "../shared/component-handler"; export async function GET(request: NextRequest) { const params = request.nextUrl.searchParams; - const directory = params.get("componentsDir") || "components"; + const directory = + params.get("componentsDir") || process.env.COMPONENTS_DIR || "components"; return handleComponentRoute(directory); } diff --git a/packages/server/next/app/api/dev/files/workflow/route.ts b/packages/server/next/app/api/dev/files/workflow/route.ts index e7b471443..6f6eb9173 100644 --- a/packages/server/next/app/api/dev/files/workflow/route.ts +++ b/packages/server/next/app/api/dev/files/workflow/route.ts @@ -4,7 +4,8 @@ import { NextRequest, NextResponse } from "next/server"; import path from "path"; import { promisify } from "util"; -const DEFAULT_WORKFLOW_FILE_PATH = "src/app/workflow.ts"; // TODO: we can make it as a parameter in server later +const DEFAULT_WORKFLOW_FILE_PATH = + process.env.WORKFLOW_FILE_PATH || "src/app/workflow.ts"; export async function GET(request: NextRequest) { const filePath = DEFAULT_WORKFLOW_FILE_PATH; diff --git a/packages/server/next/app/components/ui/calendar.tsx b/packages/server/next/app/components/ui/calendar.tsx index 41c0c0875..f977c0c19 100644 --- a/packages/server/next/app/components/ui/calendar.tsx +++ b/packages/server/next/app/components/ui/calendar.tsx @@ -60,12 +60,12 @@ function Calendar({ ...classNames, }} components={{ - IconLeft: ({ className, ...props }) => ( - - ), - IconRight: ({ className, ...props }) => ( - - ), + Chevron: ({ ...props }) => + props.orientation === "left" ? ( + + ) : ( + + ), }} {...props} /> diff --git a/packages/server/next/app/components/ui/chat/chat-input.tsx b/packages/server/next/app/components/ui/chat/chat-input.tsx index 9a7fb7a95..7d8d8665f 100644 --- a/packages/server/next/app/components/ui/chat/chat-input.tsx +++ b/packages/server/next/app/components/ui/chat/chat-input.tsx @@ -8,7 +8,11 @@ import { LlamaCloudSelector } from "./custom/llama-cloud-selector"; export default function CustomChatInput() { const { requestData, isLoading, input } = useChatUI(); const uploadAPI = getConfig("UPLOAD_API") ?? ""; - const llamaCloudAPI = getConfig("LLAMA_CLOUD_API") ?? ""; + const llamaCloudAPI = + getConfig("LLAMA_CLOUD_API") ?? + (process.env.NEXT_PUBLIC_SHOW_LLAMACLOUD_SELECTOR === "true" + ? "/api/chat/config/llamacloud" + : ""); const { imageUrl, setImageUrl, diff --git a/packages/server/next/app/components/ui/chat/chat-section.tsx b/packages/server/next/app/components/ui/chat/chat-section.tsx index 6a5f27923..73c8c8c4f 100644 --- a/packages/server/next/app/components/ui/chat/chat-section.tsx +++ b/packages/server/next/app/components/ui/chat/chat-section.tsx @@ -17,7 +17,7 @@ import { ChatLayout } from "./layout"; export default function ChatSection() { const handler = useChat({ - api: getConfig("CHAT_API"), + api: getConfig("CHAT_API") || "/api/chat", onError: (error: unknown) => { if (!(error instanceof Error)) throw error; let errorMessage: string; diff --git a/packages/server/next/app/components/ui/chat/chat-starter.tsx b/packages/server/next/app/components/ui/chat/chat-starter.tsx index e3b072b7c..149d94253 100644 --- a/packages/server/next/app/components/ui/chat/chat-starter.tsx +++ b/packages/server/next/app/components/ui/chat/chat-starter.tsx @@ -6,7 +6,9 @@ import { getConfig } from "../lib/utils"; export function ChatStarter({ className }: { className?: string }) { const { append, messages, requestData } = useChatUI(); - const starterQuestions = getConfig("STARTER_QUESTIONS") ?? []; + const starterQuestions = + getConfig("STARTER_QUESTIONS") ?? + JSON.parse(process.env.NEXT_PUBLIC_STARTER_QUESTIONS || "[]"); if (starterQuestions.length === 0 || messages.length > 0) return null; return ( diff --git a/packages/server/next/app/components/ui/chat/custom/events/loader.ts b/packages/server/next/app/components/ui/chat/custom/events/loader.ts index 54b6450b6..4c944b91e 100644 --- a/packages/server/next/app/components/ui/chat/custom/events/loader.ts +++ b/packages/server/next/app/components/ui/chat/custom/events/loader.ts @@ -17,7 +17,11 @@ export async function fetchComponentDefinitions(): Promise<{ components: ComponentDef[]; errors: string[]; }> { - const endpoint = getConfig("COMPONENTS_API"); + const endpoint = + getConfig("COMPONENTS_API") ?? + (process.env.NEXT_PUBLIC_USE_COMPONENTS_DIR === "true" + ? "/api/components" + : undefined); if (!endpoint) { console.warn("/api/components endpoint is not defined in config"); return { components: [], errors: [] }; diff --git a/packages/server/next/app/components/ui/chat/custom/llama-cloud-selector.tsx b/packages/server/next/app/components/ui/chat/custom/llama-cloud-selector.tsx index f7db88464..6928ea797 100644 --- a/packages/server/next/app/components/ui/chat/custom/llama-cloud-selector.tsx +++ b/packages/server/next/app/components/ui/chat/custom/llama-cloud-selector.tsx @@ -65,8 +65,14 @@ export function LlamaCloudSelector({ ); useEffect(() => { - if (!config && getConfig("LLAMA_CLOUD_API")) { - fetch(getConfig("LLAMA_CLOUD_API")) + const llamaCloudAPI = + getConfig("LLAMA_CLOUD_API") ?? + (process.env.NEXT_PUBLIC_SHOW_LLAMACLOUD_SELECTOR === "true" + ? "/api/chat/config/llamacloud" + : ""); + + if (!config && llamaCloudAPI) { + fetch(llamaCloudAPI) .then((response) => { if (!response.ok) { return response.json().then((errorData) => { diff --git a/packages/server/next/app/components/ui/chat/dev-mode-panel.tsx b/packages/server/next/app/components/ui/chat/dev-mode-panel.tsx index 38658729c..1ab110ce4 100644 --- a/packages/server/next/app/components/ui/chat/dev-mode-panel.tsx +++ b/packages/server/next/app/components/ui/chat/dev-mode-panel.tsx @@ -19,7 +19,8 @@ type WorkflowFile = { }; export function DevModePanel() { - const devModeEnabled = getConfig("DEV_MODE"); + const devModeEnabled = + getConfig("DEV_MODE") ?? process.env.NEXT_PUBLIC_DEV_MODE === "true"; if (!devModeEnabled) return null; return ; } diff --git a/packages/server/next/app/components/ui/chat/layout/index.tsx b/packages/server/next/app/components/ui/chat/layout/index.tsx index 440a36d2c..66100481f 100644 --- a/packages/server/next/app/components/ui/chat/layout/index.tsx +++ b/packages/server/next/app/components/ui/chat/layout/index.tsx @@ -121,7 +121,9 @@ async function parseLayoutComponents(layoutFiles: LayoutFile[]) { async function fetchLayoutFiles(): Promise { try { - const response = await fetch(getConfig("LAYOUT_API")); + const layoutApi = getConfig("LAYOUT_API"); + if (!layoutApi) return []; + const response = await fetch(layoutApi); const layoutFiles: LayoutFile[] = await response.json(); return layoutFiles; } catch (error) { diff --git a/packages/server/package.json b/packages/server/package.json index 88b9ea2e9..d84459aa2 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -19,8 +19,13 @@ }, "files": [ "dist", - "server" + "server", + "project", + "bin" ], + "bin": { + "llamaindex-server": "./bin/eject.cjs" + }, "repository": { "type": "git", "url": "git+https://github.com/run-llama/LlamaIndexTS.git", @@ -28,10 +33,11 @@ }, "scripts": { "dev": "bunchee --watch", - "clean": "rm -rf ./dist ./server next/.next next/out ./temp", + "clean": "rm -rf ./dist ./server ./project next/.next next/out ./temp", "prebuild": "pnpm clean", "build": "bunchee", - "postbuild": "pnpm prepare:ts-server && pnpm prepare:py-static", + "postbuild": "pnpm prepare:nextjs && pnpm prepare:ts-server && pnpm prepare:py-static", + "prepare:nextjs": "cp -r ./next ./project && cp -r ./src/utils ./project/app/api/chat && cp -r ./project-config/* ./project/", "prepare:ts-server": "pnpm copy:next-src && pnpm build:css && pnpm build:api", "prepare:py-static": "pnpm prepare:static && pnpm build:static && pnpm copy:static", "copy:next-src": "cp -r ./next ./server", @@ -97,7 +103,7 @@ "next": "^15.3.0", "next-themes": "^0.4.3", "react": "^19.1.0", - "react-day-picker": "8.10.1", + "react-day-picker": "9.7.0", "react-dom": "^19.1.0", "react-hook-form": "^7.55.0", "react-resizable-panels": "^2.1.7", diff --git a/packages/server/project-config/eslint.config.mjs b/packages/server/project-config/eslint.config.mjs new file mode 100644 index 000000000..4da0f732b --- /dev/null +++ b/packages/server/project-config/eslint.config.mjs @@ -0,0 +1,34 @@ +import { FlatCompat } from "@eslint/eslintrc"; +import { dirname } from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const compat = new FlatCompat({ + baseDirectory: __dirname, +}); + +const eslintConfig = [ + ...compat.extends("next/core-web-vitals", "next/typescript", "prettier"), + { + rules: { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-unused-vars": "off", + "react-hooks/exhaustive-deps": "off", + "@next/next/no-img-element": "off", + "@next/next/no-assign-module-variable": "off", + }, + }, + { + ignores: [ + "**/.next/**", + "**/node_modules/**", + "prettier.config.mjs", + "eslint.config.mjs", + "postcss.config.js", + ], + }, +]; + +export default eslintConfig; diff --git a/packages/server/next/.gitignore b/packages/server/project-config/gitignore similarity index 96% rename from packages/server/next/.gitignore rename to packages/server/project-config/gitignore index 9b2d3e929..571c33d7e 100644 --- a/packages/server/next/.gitignore +++ b/packages/server/project-config/gitignore @@ -26,6 +26,7 @@ yarn-error.log* # local env files .env*.local +.env # vercel .vercel @@ -35,5 +36,6 @@ yarn-error.log* next-env.d.ts output/ +storage/ !lib/ \ No newline at end of file diff --git a/packages/server/project-config/package.json b/packages/server/project-config/package.json new file mode 100644 index 000000000..b43d62d02 --- /dev/null +++ b/packages/server/project-config/package.json @@ -0,0 +1,100 @@ +{ + "name": "nextjs-project", + "description": "Next.js project with full feature set of @llamaindex/server", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint", + "format": "prettier --ignore-unknown --cache --check .", + "format:write": "prettier --ignore-unknown --write .", + "typecheck": "tsc --noEmit", + "generate": "tsx app\\api\\chat\\generate.ts" + }, + "devDependencies": { + "@eslint/eslintrc": "^3", + "@next/eslint-plugin-next": "^15.3.2", + "@tailwindcss/postcss": "^4", + "@types/babel__standalone": "^7.1.9", + "@types/babel__traverse": "^7.20.7", + "@types/node": "^20", + "@types/react": "^19", + "@types/react-dom": "^19", + "eslint": "^9", + "eslint-config-next": "^15.1.3", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-react-hooks": "^5.2.0", + "prettier": "^3.2.5", + "prettier-plugin-organize-imports": "^4.1.0", + "prettier-plugin-tailwindcss": "^0.6.11", + "tailwindcss": "^4", + "tsx": "^4.19.3", + "tw-animate-css": "1.2.5", + "typescript": "^5" + }, + "dependencies": { + "@babel/parser": "^7.27.0", + "@babel/standalone": "^7.27.0", + "@babel/traverse": "^7.27.0", + "@babel/types": "^7.27.0", + "@hookform/resolvers": "^5.0.1", + "@llamaindex/chat-ui": "0.4.5", + "@llamaindex/env": "~0.1.30", + "@llamaindex/openai": "~0.4.0", + "@llamaindex/readers": "~3.1.4", + "@llamaindex/tools": "~0.0.11", + "@llamaindex/workflow": "~1.1.3", + "@radix-ui/react-accordion": "^1.2.3", + "@radix-ui/react-alert-dialog": "^1.1.7", + "@radix-ui/react-aspect-ratio": "^1.1.3", + "@radix-ui/react-avatar": "^1.1.4", + "@radix-ui/react-checkbox": "^1.1.5", + "@radix-ui/react-collapsible": "^1.1.3", + "@radix-ui/react-context-menu": "^2.2.7", + "@radix-ui/react-dialog": "^1.1.2", + "@radix-ui/react-dropdown-menu": "^2.1.7", + "@radix-ui/react-hover-card": "^1.1.7", + "@radix-ui/react-label": "^2.1.0", + "@radix-ui/react-menubar": "^1.1.7", + "@radix-ui/react-navigation-menu": "^1.2.6", + "@radix-ui/react-popover": "^1.1.7", + "@radix-ui/react-progress": "^1.1.3", + "@radix-ui/react-radio-group": "^1.2.4", + "@radix-ui/react-scroll-area": "^1.2.4", + "@radix-ui/react-select": "^2.1.6", + "@radix-ui/react-separator": "^1.1.3", + "@radix-ui/react-slider": "^1.2.1", + "@radix-ui/react-slot": "^1.1.2", + "@radix-ui/react-switch": "^1.1.4", + "@radix-ui/react-tabs": "^1.1.3", + "@radix-ui/react-toggle": "^1.1.3", + "@radix-ui/react-toggle-group": "^1.1.3", + "@radix-ui/react-tooltip": "^1.1.4", + "ai": "^4.2.0", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "date-fns": "^4.1.0", + "dotenv": "^16.5.0", + "embla-carousel-react": "^8.6.0", + "input-otp": "^1.4.2", + "llamaindex": "~0.11.0", + "lucide-react": "^0.460.0", + "next": "^15.3.0", + "next-themes": "^0.4.3", + "react": "^19.1.0", + "react-day-picker": "9.7.0", + "react-dom": "^19.1.0", + "react-hook-form": "^7.55.0", + "react-resizable-panels": "^2.1.7", + "recharts": "^2.15.2", + "sonner": "^2.0.3", + "tailwind-merge": "^2.6.0", + "vaul": "^1.1.2", + "zod": "^3.23.8", + "zod-to-json-schema": "^3.23.3" + } +} diff --git a/packages/server/project-config/prettier.config.mjs b/packages/server/project-config/prettier.config.mjs new file mode 100644 index 000000000..e2c5f1198 --- /dev/null +++ b/packages/server/project-config/prettier.config.mjs @@ -0,0 +1,3 @@ +export default { + plugins: ["prettier-plugin-organize-imports", "prettier-plugin-tailwindcss"], +}; diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 643f25846..e08c57892 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -1,5 +1,5 @@ -export * from "./events"; -export * from "./prompts"; export * from "./server"; export * from "./types"; +export * from "./utils/events"; export { generateEventComponent } from "./utils/gen-ui"; +export * from "./utils/prompts"; diff --git a/packages/server/src/events.ts b/packages/server/src/utils/events.ts similarity index 100% rename from packages/server/src/events.ts rename to packages/server/src/utils/events.ts diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts new file mode 100644 index 000000000..8b5184b00 --- /dev/null +++ b/packages/server/src/utils/index.ts @@ -0,0 +1,8 @@ +export * from "./events"; +export * from "./file"; +export * from "./gen-ui"; +export * from "./prompts"; +export * from "./request"; +export * from "./stream"; +export * from "./suggestion"; +export * from "./workflow"; diff --git a/packages/server/src/prompts.ts b/packages/server/src/utils/prompts.ts similarity index 100% rename from packages/server/src/prompts.ts rename to packages/server/src/utils/prompts.ts diff --git a/packages/server/src/utils/suggestion.ts b/packages/server/src/utils/suggestion.ts index 047544acf..3a5e00270 100644 --- a/packages/server/src/utils/suggestion.ts +++ b/packages/server/src/utils/suggestion.ts @@ -1,7 +1,7 @@ import { getEnv } from "@llamaindex/env"; import type { DataStreamWriter } from "ai"; import { type ChatMessage, Settings } from "llamaindex"; -import { NEXT_QUESTION_PROMPT } from "../prompts"; +import { NEXT_QUESTION_PROMPT } from "./prompts"; export const sendSuggestedQuestionsEvent = async ( streamWriter: DataStreamWriter, diff --git a/packages/server/src/utils/workflow.ts b/packages/server/src/utils/workflow.ts index cd83dea1a..566c8f330 100644 --- a/packages/server/src/utils/workflow.ts +++ b/packages/server/src/utils/workflow.ts @@ -19,7 +19,7 @@ import { toAgentRunEvent, toSourceEvent, type SourceEventNode, -} from "../events"; +} from "./events"; import { downloadFile } from "./file"; export async function runWorkflow( diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fac443fe8..fc7146117 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -301,8 +301,8 @@ importers: specifier: ^19.1.0 version: 19.1.0 react-day-picker: - specifier: 8.10.1 - version: 8.10.1(date-fns@4.1.0)(react@19.1.0) + specifier: 9.7.0 + version: 9.7.0(react@19.1.0) react-dom: specifier: ^19.1.0 version: 19.1.0(react@19.1.0) @@ -613,6 +613,9 @@ packages: peerDependencies: '@bufbuild/protobuf': ^2.2.0 + '@date-fns/tz@1.2.0': + resolution: {integrity: sha512-LBrd7MiJZ9McsOgxqWX7AaxrDjcFVjWH/tIKJd7pnR7McaslGYOP1QmmiBXdJH/H/yLCT+rcQ7FaPBUxRGUtrg==} + '@discoveryjs/json-ext@0.6.3': resolution: {integrity: sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==} engines: {node: '>=14.17.0'} @@ -3256,6 +3259,9 @@ packages: resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} engines: {node: '>= 0.4'} + date-fns-jalali@4.1.0-0: + resolution: {integrity: sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==} + date-fns@4.1.0: resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} @@ -5358,11 +5364,11 @@ packages: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true - react-day-picker@8.10.1: - resolution: {integrity: sha512-TMx7fNbhLk15eqcMt+7Z7S2KF7mfTId/XJDjKE8f+IUcFn0l08/kI4FiYTL/0yuOLmEcbR4Fwe3GJf/NiiMnPA==} + react-day-picker@9.7.0: + resolution: {integrity: sha512-urlK4C9XJZVpQ81tmVgd2O7lZ0VQldZeHzNejbwLWZSkzHH498KnArT0EHNfKBOWwKc935iMLGZdxXPRISzUxQ==} + engines: {node: '>=18'} peerDependencies: - date-fns: ^2.28.0 || ^3.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react: '>=16.8.0' react-dom@19.1.0: resolution: {integrity: sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==} @@ -6812,6 +6818,8 @@ snapshots: dependencies: '@bufbuild/protobuf': 2.3.0 + '@date-fns/tz@1.2.0': {} + '@discoveryjs/json-ext@0.6.3': {} '@e2b/code-interpreter@1.5.0': @@ -9443,6 +9451,8 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.2 + date-fns-jalali@4.1.0-0: {} + date-fns@4.1.0: {} debug@3.2.7: @@ -11895,9 +11905,11 @@ snapshots: minimist: 1.2.8 strip-json-comments: 2.0.1 - react-day-picker@8.10.1(date-fns@4.1.0)(react@19.1.0): + react-day-picker@9.7.0(react@19.1.0): dependencies: + '@date-fns/tz': 1.2.0 date-fns: 4.1.0 + date-fns-jalali: 4.1.0-0 react: 19.1.0 react-dom@19.1.0(react@19.1.0):