diff --git a/.changeset/smooth-bees-brush.md b/.changeset/smooth-bees-brush.md new file mode 100644 index 000000000..e12762c9e --- /dev/null +++ b/.changeset/smooth-bees-brush.md @@ -0,0 +1,6 @@ +--- +"create-llama": patch +"@llamaindex/server": patch +--- + +chore: add llamaindex server package diff --git a/packages/create-llama/README.md b/README.md similarity index 94% rename from packages/create-llama/README.md rename to README.md index f7ac4b64b..da45192d3 100644 --- a/packages/create-llama/README.md +++ b/README.md @@ -130,4 +130,11 @@ Pro mode is ideal for developers who want fine-grained control over their projec - [TS/JS docs](https://ts.llamaindex.ai/) - [Python docs](https://docs.llamaindex.ai/en/stable/) +## LlamaIndex Server + +We also provide a server implementation for LlamaIndex that you can use to serve your LlamaIndex workflows and agent workflows as an API server. See the following guides for more information: + +- [LlamaIndex Server For TypeScript](./packages/server/README.md) +- [LlamaIndex Server For Python](./python/llama-index-server/README.md) + Inspired by and adapted from [create-next-app](https://github.com/vercel/next.js/tree/canary/packages/create-next-app) diff --git a/package.json b/package.json index a6e8b8ad1..de8d8e065 100644 --- a/package.json +++ b/package.json @@ -21,12 +21,12 @@ "new-version": "pnpm -r build && changeset version", "release": "pnpm -r build && changeset publish", "release-snapshot": "pnpm -r build && changeset publish --tag snapshot", - "build": "pnpm -r --filter create-llama build", - "e2e": "pnpm -r --filter create-llama e2e", - "dev": "pnpm -r --filter create-llama dev", - "format": "pnpm -r --filter create-llama format", - "format:write": "pnpm -r --filter create-llama format:write", - "lint": "pnpm -r --filter create-llama lint" + "build": "pnpm -r build", + "e2e": "pnpm -r e2e", + "dev": "pnpm -r dev", + "format": "pnpm -r format", + "format:write": "pnpm -r format:write", + "lint": "pnpm -r lint" }, "devDependencies": { "@changesets/cli": "^2.27.1", diff --git a/packages/create-llama/.gitignore b/packages/create-llama/.gitignore index 39e4eeea7..a84755d38 100644 --- a/packages/create-llama/.gitignore +++ b/packages/create-llama/.gitignore @@ -59,3 +59,7 @@ __pycache__ # build artifacts create-llama-*.tgz + +# copied from root +README.md +LICENSE.md \ No newline at end of file diff --git a/CHANGELOG.md b/packages/create-llama/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to packages/create-llama/CHANGELOG.md diff --git a/packages/create-llama/package.json b/packages/create-llama/package.json index 274651e86..fbc279d66 100644 --- a/packages/create-llama/package.json +++ b/packages/create-llama/package.json @@ -17,11 +17,15 @@ "create-llama": "./dist/index.js" }, "files": [ - "dist" + "dist", + "README.md", + "LICENSE.md" ], "scripts": { + "copy": "cp -r ../../README.md ../../LICENSE.md .", "build": "bash ./scripts/build.sh", "build:ncc": "pnpm run clean && ncc build ./index.ts -o ./dist/ --minify --no-cache --no-source-map-register", + "postbuild": "pnpm run copy", "clean": "rimraf --glob ./dist ./templates/**/__pycache__ ./templates/**/node_modules ./templates/**/poetry.lock", "dev": "ncc build ./index.ts -w -o dist/", "e2e": "playwright test", diff --git a/packages/server/.gitignore b/packages/server/.gitignore new file mode 100644 index 000000000..1550f1629 --- /dev/null +++ b/packages/server/.gitignore @@ -0,0 +1 @@ +server/ \ No newline at end of file diff --git a/packages/server/.prettierignore b/packages/server/.prettierignore new file mode 100644 index 000000000..ede4c86d6 --- /dev/null +++ b/packages/server/.prettierignore @@ -0,0 +1,7 @@ +lib/ +dist/ +server/ +next/.next/ +next/out/ +node_modules/ +build/ diff --git a/packages/server/CHANGELOG.md b/packages/server/CHANGELOG.md new file mode 100644 index 000000000..d24f2cafb --- /dev/null +++ b/packages/server/CHANGELOG.md @@ -0,0 +1,115 @@ +# @llamaindex/server + +## 0.1.6 + +### Patch Changes + +- 82d4b46: feat: re-add supports for artifacts + +## 0.1.5 + +### Patch Changes + +- 7ca9ddf: Add generate ui workflow to @llamaindex/server +- 3310eaa: chore: bump chat-ui + - llamaindex@0.10.2 + +## 0.1.4 + +### Patch Changes + +- llamaindex@0.10.1 + +## 0.1.3 + +### Patch Changes + +- edb8b87: fix: shadcn components cannot be used in next server +- Updated dependencies [6cf928f] + - llamaindex@0.10.0 + +## 0.1.2 + +### Patch Changes + +- bb34ade: feat: support cn utils for server UI + - llamaindex@0.9.19 + +## 0.1.1 + +### Patch Changes + +- 400b3b5: feat: use full-source code with import statements for custom comps + - llamaindex@0.9.18 + +## 0.1.0 + +### Minor Changes + +- 3ffee26: feat: enhance config params for LlamaIndexServer + +## 0.0.9 + +### Patch Changes + +- 0b75bd6: feat: component dir in llamaindex server + +## 0.0.8 + +### Patch Changes + +- Updated dependencies [3534c37] + - llamaindex@0.9.17 + +## 0.0.7 + +### Patch Changes + +- 4999df1: bump nextjs +- Updated dependencies [f5e4d09] + - llamaindex@0.9.16 + +## 0.0.6 + +### Patch Changes + +- 8c02684: fix: handle stream error +- c515a32: feat: return raw output for agent toolcall result + - llamaindex@0.9.15 + +## 0.0.5 + +### Patch Changes + +- 9d951b2: feat: support llamacloud in @llamaindex/server +- Updated dependencies [9d951b2] + - llamaindex@0.9.14 + +## 0.0.4 + +### Patch Changes + +- 164cf7a: fix: custom next server start fail + +## 0.0.3 + +### Patch Changes + +- 299008b: feat: copy create-llama to @llamaindex/servers +- 75d6e29: feat: response source nodes in query tool output +- Updated dependencies [75d6e29] + - llamaindex@0.9.13 + +## 0.0.2 + +### Patch Changes + +- f8a86e4: feat: @llamaindex/server +- Updated dependencies [21bebfc] +- Updated dependencies [93bc0ff] +- Updated dependencies [91a18e7] +- Updated dependencies [f8a86e4] +- Updated dependencies [5189b44] +- Updated dependencies [58a9446] + - @llamaindex/core@0.6.0 + - @llamaindex/workflow@1.0.0 diff --git a/packages/server/README.md b/packages/server/README.md new file mode 100644 index 000000000..3ae5d96df --- /dev/null +++ b/packages/server/README.md @@ -0,0 +1,161 @@ +# LlamaIndex Server + +LlamaIndexServer is a Next.js-based application that allows you to quickly launch your [LlamaIndex Workflows](https://ts.llamaindex.ai/docs/llamaindex/modules/agents/workflows) and [Agent Workflows](https://ts.llamaindex.ai/docs/llamaindex/modules/agents/agent_workflow) as an API server with an optional chat UI. It provides a complete environment for running LlamaIndex workflows with both API endpoints and a user interface for interaction. + +## Features + +- Serving a workflow as a chatbot +- Built on Next.js for high performance and easy API development +- Optional built-in chat UI with extendable UI components +- Prebuilt development code + +## Installation + +```bash +npm i @llamaindex/server +``` + +## Quick Start + +Create an `index.ts` file and add the following code: + +```ts +import { LlamaIndexServer } from "@llamaindex/server"; +import { wiki } from "@llamaindex/tools"; // or any other tool + +const createWorkflow = () => agent({ tools: [wiki()] }); + +new LlamaIndexServer({ + workflow: createWorkflow, + uiConfig: { + appTitle: "LlamaIndex App", + starterQuestions: ["Who is the first president of the United States?"], + }, +}).start(); +``` + +## Running the Server + +In the same directory as `index.ts`, run the following command to start the server: + +```bash +tsx index.ts +``` + +The server will start at `http://localhost:3000` + +You can also make a request to the server: + +```bash +curl -X POST "http://localhost:3000/api/chat" -H "Content-Type: application/json" -d '{"message": "Who is the first president of the United States?"}' +``` + +## Configuration Options + +The `LlamaIndexServer` accepts the following configuration options: + +- `workflow`: A callable function that creates a workflow instance for each request +- `uiConfig`: An object to configure the chat UI containing the following properties: + - `appTitle`: The title of the application (default: `"LlamaIndex App"`) + - `starterQuestions`: List of starter questions for the chat UI (default: `[]`) + - `componentsDir`: The directory for custom UI components rendering events emitted by the workflow. The default is undefined, which does not render custom UI components. + - `llamaCloudIndexSelector`: Whether to show the LlamaCloud index selector in the chat UI (requires `LLAMA_CLOUD_API_KEY` to be set in the environment variables) (default: `false`) + +LlamaIndexServer accepts all the configuration options from Nextjs Custom Server such as `port`, `hostname`, `dev`, etc. +See all Nextjs Custom Server options [here](https://nextjs.org/docs/app/building-your-application/configuring/custom-server). + +## AI-generated UI Components + +The LlamaIndex server provides support for rendering workflow events using custom UI components, allowing you to extend and customize the chat interface. +These components can be auto-generated using an LLM by providing a JSON schema of the workflow event. + +### UI Event Schema + +To display custom UI components, your workflow needs to emit UI events that have an event type for identification and a data object: + +```typescript +class UIEvent extends WorkflowEvent<{ + type: "ui_event"; + data: UIEventData; +}> {} +``` + +The `data` object can be any JSON object. To enable AI generation of the UI component, you need to provide a schema for that data (here we're using Zod): + +```typescript +const MyEventDataSchema = z + .object({ + stage: z + .enum(["retrieve", "analyze", "answer"]) + .describe("The current stage the workflow process is in."), + progress: z + .number() + .min(0) + .max(1) + .describe("The progress in percent of the current stage"), + }) + .describe("WorkflowStageProgress"); + +type UIEventData = z.infer; +``` + +### Generate UI Components + +The `generateEventComponent` function uses an LLM to generate a custom UI component based on the JSON schema of a workflow event. The schema should contain accurate descriptions of each field so that the LLM can generate matching components for your use case. We've done this for you in the example above using the `describe` function from Zod: + +```typescript +import { OpenAI } from "llamaindex"; +import { generateEventComponent } from "@llamaindex/server"; +import { MyEventDataSchema } from "./your-workflow"; + +// Also works well with Claude 3.5 Sonnet and Google Gemini 2.5 Pro +const llm = new OpenAI({ model: "gpt-4.1" }); +const code = generateEventComponent(MyEventDataSchema, llm); +``` + +After generating the code, we need to save it to a file. The file name must match the event type from your workflow (e.g., `ui_event.jsx` for handling events with `ui_event` type): + +```ts +fs.writeFileSync("components/ui_event.jsx", code); +``` + +Feel free to modify the generated code to match your needs. If you're not satisfied with the generated code, we suggest improving the provided JSON schema first or trying another LLM. + +> Note that `generateEventComponent` is generating JSX code, but you can also provide a TSX file. + +### Server Setup + +To use the generated UI components, you need to initialize the LlamaIndex server with the `componentsDir` that contains your custom UI components: + +```ts +new LlamaIndexServer({ + workflow: createWorkflow, + uiConfig: { + appTitle: "LlamaIndex App", + componentsDir: "components", + }, +}).start(); +``` + +## Default Endpoints and Features + +### Chat Endpoint + +The server includes a default chat endpoint at `/api/chat` for handling chat interactions. + +### Chat UI + +The server always provides a chat interface at the root path (`/`) with: + +- Configurable starter questions +- Real-time chat interface +- API endpoint integration + +### Static File Serving + +- The server automatically mounts the `data` and `output` folders at `{server_url}{api_prefix}/files/data` (default: `/api/files/data`) and `{server_url}{api_prefix}/files/output` (default: `/api/files/output`) respectively. +- Your workflows can use both folders to store and access files. By convention, the `data` folder is used for documents that are ingested, and the `output` folder is used for documents generated by the workflow. + +## API Reference + +- [LlamaIndexServer](https://ts.llamaindex.ai/docs/api/classes/LlamaIndexServer) diff --git a/packages/server/eslint.config.mjs b/packages/server/eslint.config.mjs new file mode 100644 index 000000000..85cc1009a --- /dev/null +++ b/packages/server/eslint.config.mjs @@ -0,0 +1,42 @@ +import eslint from "@eslint/js"; +import eslintConfigPrettier from "eslint-config-prettier"; +import globals from "globals"; +import tseslint from "typescript-eslint"; + +export default tseslint.config( + eslint.configs.recommended, + ...tseslint.configs.recommended, + eslintConfigPrettier, + { + languageOptions: { + ecmaVersion: 2022, + sourceType: "module", + globals: { + ...globals.browser, + ...globals.node, + }, + }, + }, + { + rules: { + "no-irregular-whitespace": "off", + "@typescript-eslint/no-unused-vars": "off", + "@typescript-eslint/no-explicit-any": [ + "error", + { + ignoreRestArgs: true, + }, + ], + }, + }, + { + ignores: [ + "**/dist/**", + "**/lib/*", + "**/.next/**", + "**/out/**", + "**/node_modules/**", + "**/build/**", + ], + }, +); diff --git a/packages/server/next/.gitignore b/packages/server/next/.gitignore new file mode 100644 index 000000000..9b2d3e929 --- /dev/null +++ b/packages/server/next/.gitignore @@ -0,0 +1,39 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + +output/ + +!lib/ \ No newline at end of file diff --git a/packages/server/next/app/components/ui/accordion.tsx b/packages/server/next/app/components/ui/accordion.tsx new file mode 100644 index 000000000..9a46fdecf --- /dev/null +++ b/packages/server/next/app/components/ui/accordion.tsx @@ -0,0 +1,56 @@ +"use client"; + +import * as AccordionPrimitive from "@radix-ui/react-accordion"; +import { ChevronDown } from "lucide-react"; +import * as React from "react"; +import { cn } from "./lib/utils"; + +const Accordion = AccordionPrimitive.Root; + +const AccordionItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +AccordionItem.displayName = "AccordionItem"; + +const AccordionTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + svg]:rotate-180", + className, + )} + {...props} + > + {children} + + + +)); +AccordionTrigger.displayName = AccordionPrimitive.Trigger.displayName; + +const AccordionContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + +
{children}
+
+)); +AccordionContent.displayName = AccordionPrimitive.Content.displayName; + +export { Accordion, AccordionContent, AccordionItem, AccordionTrigger }; diff --git a/packages/server/next/app/components/ui/alert-dialog.tsx b/packages/server/next/app/components/ui/alert-dialog.tsx new file mode 100644 index 000000000..da2038625 --- /dev/null +++ b/packages/server/next/app/components/ui/alert-dialog.tsx @@ -0,0 +1,157 @@ +"use client"; + +import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog"; +import * as React from "react"; + +import { buttonVariants } from "./button"; +import { cn } from "./lib/utils"; + +function AlertDialog({ + ...props +}: React.ComponentProps) { + return ; +} + +function AlertDialogTrigger({ + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogPortal({ + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogOverlay({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogContent({ + className, + ...props +}: React.ComponentProps) { + return ( + + + + + ); +} + +function AlertDialogHeader({ + className, + ...props +}: React.ComponentProps<"div">) { + return ( +
+ ); +} + +function AlertDialogFooter({ + className, + ...props +}: React.ComponentProps<"div">) { + return ( +
+ ); +} + +function AlertDialogTitle({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogDescription({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogAction({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AlertDialogCancel({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +export { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogOverlay, + AlertDialogPortal, + AlertDialogTitle, + AlertDialogTrigger, +}; diff --git a/packages/server/next/app/components/ui/alert.tsx b/packages/server/next/app/components/ui/alert.tsx new file mode 100644 index 000000000..1f757f69b --- /dev/null +++ b/packages/server/next/app/components/ui/alert.tsx @@ -0,0 +1,68 @@ +"use client"; + +import { cva, type VariantProps } from "class-variance-authority"; +import * as React from "react"; + +import { cn } from "./lib/utils"; + +const alertVariants = cva( + "relative w-full rounded-lg border px-4 py-3 text-sm grid has-[>svg]:grid-cols-[calc(var(--spacing)*4)_1fr] grid-cols-[0_1fr] has-[>svg]:gap-x-3 gap-y-0.5 items-start [&>svg]:size-4 [&>svg]:translate-y-0.5 [&>svg]:text-current", + { + variants: { + variant: { + default: "bg-card text-card-foreground", + destructive: + "text-destructive bg-card [&>svg]:text-current *:data-[slot=alert-description]:text-destructive/90", + }, + }, + defaultVariants: { + variant: "default", + }, + }, +); + +function Alert({ + className, + variant, + ...props +}: React.ComponentProps<"div"> & VariantProps) { + return ( +
+ ); +} + +function AlertTitle({ className, ...props }: React.ComponentProps<"div">) { + return ( +
+ ); +} + +function AlertDescription({ + className, + ...props +}: React.ComponentProps<"div">) { + return ( +
+ ); +} + +export { Alert, AlertDescription, AlertTitle }; diff --git a/packages/server/next/app/components/ui/aspect-ratio.tsx b/packages/server/next/app/components/ui/aspect-ratio.tsx new file mode 100644 index 000000000..c16d6bcb9 --- /dev/null +++ b/packages/server/next/app/components/ui/aspect-ratio.tsx @@ -0,0 +1,11 @@ +"use client"; + +import * as AspectRatioPrimitive from "@radix-ui/react-aspect-ratio"; + +function AspectRatio({ + ...props +}: React.ComponentProps) { + return ; +} + +export { AspectRatio }; diff --git a/packages/server/next/app/components/ui/avatar.tsx b/packages/server/next/app/components/ui/avatar.tsx new file mode 100644 index 000000000..c95a1ec49 --- /dev/null +++ b/packages/server/next/app/components/ui/avatar.tsx @@ -0,0 +1,53 @@ +"use client"; + +import * as AvatarPrimitive from "@radix-ui/react-avatar"; +import * as React from "react"; + +import { cn } from "./lib/utils"; + +function Avatar({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AvatarImage({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +function AvatarFallback({ + className, + ...props +}: React.ComponentProps) { + return ( + + ); +} + +export { Avatar, AvatarFallback, AvatarImage }; diff --git a/packages/server/next/app/components/ui/badge.tsx b/packages/server/next/app/components/ui/badge.tsx new file mode 100644 index 000000000..18cd684a3 --- /dev/null +++ b/packages/server/next/app/components/ui/badge.tsx @@ -0,0 +1,38 @@ +"use client"; + +import { cva, type VariantProps } from "class-variance-authority"; +import * as React from "react"; + +import { cn } from "./lib/utils"; + +const badgeVariants = cva( + "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", + { + variants: { + variant: { + default: + "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", + secondary: + "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", + destructive: + "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80", + outline: "text-foreground", + }, + }, + defaultVariants: { + variant: "default", + }, + }, +); + +export interface BadgeProps + extends React.HTMLAttributes, + VariantProps {} + +function Badge({ className, variant, ...props }: BadgeProps) { + return ( +
+ ); +} + +export { Badge, badgeVariants }; diff --git a/packages/server/next/app/components/ui/breadcrumb.tsx b/packages/server/next/app/components/ui/breadcrumb.tsx new file mode 100644 index 000000000..1e7e31c04 --- /dev/null +++ b/packages/server/next/app/components/ui/breadcrumb.tsx @@ -0,0 +1,111 @@ +"use client"; + +import { Slot } from "@radix-ui/react-slot"; +import { ChevronRight, MoreHorizontal } from "lucide-react"; +import * as React from "react"; + +import { cn } from "./lib/utils"; + +function Breadcrumb({ ...props }: React.ComponentProps<"nav">) { + return