Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
67 commits
Select commit Hold shift + click to select a range
9f86770
add script to copy server folder
thucpn May 26, 2025
9a35242
change command
thucpn May 26, 2025
07578f8
clean up before copy
thucpn May 26, 2025
3e8ec67
copy route handler folder
thucpn May 26, 2025
009858e
fix lint
thucpn May 26, 2025
254ab1c
add eject options
thucpn May 26, 2025
ffe836a
use EJECT from env
thucpn May 26, 2025
65864e5
update import workflow path
thucpn May 26, 2025
60afca8
init settings in chat route
thucpn May 26, 2025
20c662e
refactor: move prompts & events to chat utils folder
thucpn May 26, 2025
f32e971
copy utils for chat route
thucpn May 26, 2025
b66cebb
update config.js file in eject mode
thucpn May 26, 2025
7c338b3
pass suggestNextQuestions as query params to chat route handler
thucpn May 26, 2025
5be6a60
enable/disable suggestNextQuestions in route handler
thucpn May 26, 2025
9dce7f5
Merge branch 'main' into tp/eject-to-fully-support-custom-next-server
thucpn May 26, 2025
9e021b0
remove todo
thucpn May 26, 2025
d8b6390
add document about eject mode
thucpn May 26, 2025
d12b7a4
Create many-knives-warn.md
thucpn May 26, 2025
5841c44
ignore ejected next/ folder
thucpn May 26, 2025
1310352
wrong doc
thucpn May 26, 2025
e35f9bd
fix: doc
thucpn May 26, 2025
172bacf
revert server
thucpn May 26, 2025
9eb0ea6
keep server options
thucpn May 26, 2025
80e7267
use config from .env for route handler
thucpn May 26, 2025
18b0722
prepare nextjs project
thucpn May 26, 2025
1135931
fix: scripts
thucpn May 27, 2025
b56e589
Merge branch 'main' into tp/eject-to-fully-support-custom-next-server
thucpn May 27, 2025
77146a1
update script
thucpn May 27, 2025
f7a0871
fix eject output argument
thucpn May 27, 2025
d418603
Merge branch 'main' into tp/eject-to-fully-support-custom-next-server
thucpn May 27, 2025
a5ecc74
update doc
thucpn May 27, 2025
a846adc
update route imports
thucpn May 27, 2025
7efd227
update package.json
thucpn May 27, 2025
7279545
update gitignore
thucpn May 27, 2025
e8f899b
copy other files
thucpn May 27, 2025
2fed965
nextjs project config files
thucpn May 27, 2025
d4fc315
update eject script
thucpn May 27, 2025
8f01f37
update package.json
thucpn May 27, 2025
83c8203
missing eslint packages
thucpn May 27, 2025
fdcd776
bump react-day-picker to fix peer deps issue with date-fns
thucpn May 27, 2025
cf8678c
missing dotenv
thucpn May 27, 2025
13ee73a
update ignore
thucpn May 27, 2025
cdfbf95
copy env
thucpn May 27, 2025
2c0fbdc
update frontend config.js file
thucpn May 27, 2025
4c92335
fix: config path
thucpn May 27, 2025
d7082b7
enable llamacloud
thucpn May 27, 2025
f8a4275
no need layout for normal nextjs
thucpn May 27, 2025
fffae76
Merge branch 'main' into tp/eject-to-fully-support-custom-next-server
thucpn May 27, 2025
27994c9
remove LAYOUT_DIR from env
thucpn May 27, 2025
81b0fc9
don't use LAYOUT_DIR
thucpn May 27, 2025
9875737
update document
thucpn May 27, 2025
a8d78c2
update imports in workflow file
thucpn May 27, 2025
541cdd3
update document
thucpn May 27, 2025
9bcd6f3
use NEXT_PUBLIC for frontend config
thucpn May 27, 2025
1811957
fix: update workflow file path for ejected project
thucpn May 27, 2025
b011630
fix: path
thucpn May 27, 2025
91d1cb9
fix: eject dest path
thucpn May 27, 2025
3f553ef
use single entry point for utils
thucpn May 28, 2025
bd167c1
update imports for generate.ts
thucpn May 28, 2025
adbd671
use argument after eject
thucpn May 28, 2025
97bdf67
fix path
thucpn May 28, 2025
76cad7e
fix: starter questions
thucpn May 28, 2025
9b81778
remove config.js instead of making it empty
thucpn May 28, 2025
ebea495
update document
thucpn May 28, 2025
65937c5
add eject to e2e
thucpn May 28, 2025
aba3879
import { OpenAI } from "@llamaindex/openai";
thucpn May 28, 2025
763bc23
disable eject test for llamacloud
thucpn May 28, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/many-knives-warn.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"create-llama": patch
"@llamaindex/server": patch
---

support eject to fully customize next folder
2 changes: 2 additions & 0 deletions eslint.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ export default tseslint.config(
"**/node_modules/**",
"**/build/**",
"packages/server/server/**",
"packages/server/project/**",
"packages/server/bin/**",
],
},
);
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { expect, test } from "@playwright/test";
import { ChildProcess } from "child_process";
import { ChildProcess, execSync } from "child_process";
import fs from "fs";
import path from "path";
import type {
Expand Down Expand Up @@ -28,6 +28,7 @@ const templateUseCases = [
"deep_research",
"code_generator",
];
const ejectDir = "next";

for (const useCase of templateUseCases) {
test.describe(`Test use case ${useCase} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
Expand Down Expand Up @@ -110,6 +111,28 @@ for (const useCase of templateUseCases) {
expect(response.ok()).toBeTruthy();
});

test("Should successfully eject, install dependencies and build without errors", async () => {
test.skip(
templateFramework !== "nextjs" ||
useCase !== "code_generator" ||
dataSource === "--llamacloud",
"Eject test only applies to Next.js framework, code generator use case, and non-llamacloud",
);

// Run eject command
execSync("npm run eject", { cwd: path.join(cwd, name) });

// Verify next directory exists
const nextDirExists = fs.existsSync(path.join(cwd, name, ejectDir));
expect(nextDirExists).toBeTruthy();

// Install dependencies in next directory
execSync("npm install", { cwd: path.join(cwd, name, ejectDir) });

// Run build
execSync("npm run build", { cwd: path.join(cwd, name, ejectDir) });
});

// clean processes
test.afterAll(async () => {
appProcess?.kill();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,14 @@ curl --location 'localhost:3000/api/chat' \
--data '{ "messages": [{ "role": "user", "content": "What standards for a letter exist?" }] }'
```

## Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.

```bash
npm run eject
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ curl --location 'localhost:3000/api/chat' \
--data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }'
```

## Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.

```bash
npm run eject
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,14 @@ curl --location 'localhost:3000/api/chat' \
--data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }'
```

## Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.

```bash
npm run eject
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ curl --location 'localhost:3000/api/chat' \
--data '{ "messages": [{ "role": "user", "content": "Compare the financial performance of Apple and Tesla" }] }'
```

## Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.

```bash
npm run eject
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,14 @@ curl --location 'localhost:3000/api/chat' \
--data '{ "messages": [{ "role": "user", "content": "Generate a financial report that compares the financial performance of Apple and Tesla" }] }'
```

## Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.

```bash
npm run eject
```

## Learn More

To learn more about LlamaIndex, take a look at the following resources:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { OpenAI } from "@llamaindex/openai";
import { generateEventComponent } from "@llamaindex/server";
import * as dotenv from "dotenv";
import "dotenv/config";
import * as fs from "fs/promises";
import { LLamaCloudFileService, OpenAI } from "llamaindex";
import { LLamaCloudFileService } from "llamaindex";
import * as path from "path";
import { getIndex } from "./app/data";
import { initSettings } from "./app/settings";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"generate:datasource": "tsx src/generate.ts datasource",
"generate:ui": "tsx src/generate.ts ui",
"dev": "nodemon",
"start": "tsx src/index.ts"
"start": "tsx src/index.ts",
"eject": "llamaindex-server eject"
},
"dependencies": {
"@llamaindex/openai": "~0.4.0",
Expand Down
3 changes: 3 additions & 0 deletions packages/server/.gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# server contains Nextjs frontend code (not compiled)
server/

# the ejected nextjs project
project/

# temp is the copy of next folder but without API folder, used to build frontend static files
temp/
17 changes: 17 additions & 0 deletions packages/server/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -300,6 +300,23 @@ The server always provides a chat interface at the root path (`/`) with:
- The server automatically mounts the `data` and `output` folders at `{server_url}{api_prefix}/files/data` (default: `/api/files/data`) and `{server_url}{api_prefix}/files/output` (default: `/api/files/output`) respectively.
- Your workflows can use both folders to store and access files. By convention, the `data` folder is used for documents that are ingested, and the `output` folder is used for documents generated by the workflow.

### Eject Mode

If you want to fully customize the server UI and routes, you can use `npm eject`. It will create a normal Next.js project with the same functionality as @llamaindex/server.
By default, the ejected project will be in the `next` directory in the current working directory. You can change the output directory by providing custom path after `eject` command:

```bash
npm eject <path-to-output-directory>
```

How eject works:

1. Init nextjs project with eslint, prettier, postcss, tailwindcss, shadcn components, etc.
2. Copy your workflow definition and setting files in src/app/\* to the ejected project in app/api/chat
3. Copy your components, data, output, storage folders to the ejected project
4. Copy your current .env file to the ejected project
5. Clean up files that are no longer needed and update imports

## API Reference

- [LlamaIndexServer](https://ts.llamaindex.ai/docs/api/classes/LlamaIndexServer)
172 changes: 172 additions & 0 deletions packages/server/bin/eject.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/usr/bin/env node

const fs = require("fs").promises;
const path = require("path");

// Resolve the project directory in node_modules/@llamaindex/server/project
// This is the template that used to construct the nextjs project
const projectDir = path.resolve(__dirname, "../project");

// Resolve the src directory that contains workflow & setting files
const srcDir = path.join(process.cwd(), "src");
const srcAppDir = path.join(srcDir, "app");
const generateFile = path.join(srcDir, "generate.ts");
const envFile = path.join(process.cwd(), ".env");

// The environment variables that are used as LlamaIndexServer configs
const SERVER_CONFIG_VARS = [
{
key: "OPENAI_API_KEY",
defaultValue: "<your-openai-api-key>",
description: "OpenAI API key",
},
{
key: "SUGGEST_NEXT_QUESTIONS",
defaultValue: "true",
description: "Whether to suggest next questions (`suggestNextQuestions`)",
},
{
key: "COMPONENTS_DIR",
defaultValue: "components",
description: "Directory for custom components (`componentsDir`)",
},
{
key: "WORKFLOW_FILE_PATH",
defaultValue: "app/api/chat/app/workflow.ts",
description: "The path to the workflow file (will be updated in dev mode)",
},
{
key: "NEXT_PUBLIC_USE_COMPONENTS_DIR",
defaultValue: "true",
description: "Whether to enable components directory feature on frontend",
},
{
key: "NEXT_PUBLIC_DEV_MODE",
defaultValue: "true",
description: "Whether to enable dev mode (`devMode`)",
},
{
key: "NEXT_PUBLIC_STARTER_QUESTIONS",
defaultValue: '["Summarize the document", "What are the key points?"]',
description:
"Initial questions to display in the chat (`starterQuestions`)",
},
{
key: "NEXT_PUBLIC_SHOW_LLAMACLOUD_SELECTOR",
defaultValue: "false",
description:
"Whether to show LlamaCloud selector for frontend (`llamaCloudIndexSelector`)",
},
];

async function eject() {
try {
// validate required directories (nextjs project template, src directory, src/app directory)
const requiredDirs = [projectDir, srcDir, srcAppDir];
for (const dir of requiredDirs) {
const exists = await fs
.access(dir)
.then(() => true)
.catch(() => false);
if (!exists) {
console.error("Error: directory does not exist at", dir);
process.exit(1);
}
}

// Get destination directory from command line arguments (pnpm eject <path>)
const args = process.argv;
const outputIndex = args.indexOf("eject");
const destDir =
outputIndex !== -1 && args[outputIndex + 1]
? path.resolve(args[outputIndex + 1]) // Use provided path after eject
: path.join(process.cwd(), "next"); // Default to "next" folder in the current working directory

// remove destination directory if it exists
await fs.rm(destDir, { recursive: true, force: true });

// create destination directory
await fs.mkdir(destDir, { recursive: true });

// Copy the nextjs project template to the destination directory
await fs.cp(projectDir, destDir, { recursive: true });

// copy src/app/* to destDir/app/api/chat
const chatRouteDir = path.join(destDir, "app", "api", "chat");
await fs.cp(srcAppDir, path.join(chatRouteDir, "app"), { recursive: true });

// nextjs project doesn't depend on @llamaindex/server anymore, we need to update the imports in workflow file
const workflowFile = path.join(chatRouteDir, "app", "workflow.ts");
let workflowContent = await fs.readFile(workflowFile, "utf-8");
workflowContent = workflowContent.replace("@llamaindex/server", "../utils");
await fs.writeFile(workflowFile, workflowContent);

// copy generate.ts if it exists
const genFilePath = path.join(chatRouteDir, "generate.ts");
const genFileExists = await copy(generateFile, genFilePath);
if (genFileExists) {
// update the import @llamaindex/server in generate.ts
let genContent = await fs.readFile(genFilePath, "utf-8");
genContent = genContent.replace("@llamaindex/server", "./utils");
await fs.writeFile(genFilePath, genContent);
}

// copy folders in root directory if exists
const rootFolders = ["components", "data", "output", "storage"];
for (const folder of rootFolders) {
await copy(path.join(process.cwd(), folder), path.join(destDir, folder));
}

// copy .env if it exists or create a new one
const envFileExists = await copy(envFile, path.join(destDir, ".env"));
if (!envFileExists) {
await fs.writeFile(path.join(destDir, ".env"), "");
}

// update .env file with more server configs
let envFileContent = await fs.readFile(path.join(destDir, ".env"), "utf-8");
for (const envVar of SERVER_CONFIG_VARS) {
const { key, defaultValue, description } = envVar;
if (!envFileContent.includes(key)) {
// if the key is not exists in the env file, add it
envFileContent += `\n# ${description}\n${key}=${defaultValue}\n`;
}
}
await fs.writeFile(path.join(destDir, ".env"), envFileContent);

// rename gitignore -> .gitignore
await fs.rename(
path.join(destDir, "gitignore"),
path.join(destDir, ".gitignore"),
);

// user can customize layout directory in nextjs project, remove layout api
await fs.rm(path.join(destDir, "app", "api", "layout"), {
recursive: true,
force: true,
});

// remove no-needed files
await fs.unlink(path.join(destDir, "public", "config.js"));
await fs.unlink(path.join(destDir, "next-build.config.ts"));

console.log("Successfully ejected @llamaindex/server to", destDir);
} catch (error) {
console.error("Error during eject:", error.message);
process.exit(1);
}
}

// copy src to dest if src exists, return true if src exists
async function copy(src, dest) {
const srcExists = await fs
.access(src)
.then(() => true)
.catch(() => false);
if (srcExists) {
await fs.cp(src, dest, { recursive: true });
}
return srcExists;
}

eject();
Loading
Loading