Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support llamacloud in @llamaindex/server #1796

Merged
merged 22 commits into from
Apr 1, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/sharp-donuts-study.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"llamaindex": patch
"@llamaindex/tools": patch
"@llamaindex/server": patch
---

feat: support llamacloud in @llamaindex/server
18 changes: 18 additions & 0 deletions packages/llamaindex/src/cloud/LlamaCloudIndex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ import {
} from "@llamaindex/cloud/api";
import type { BaseRetriever } from "@llamaindex/core/retriever";
import { getEnv } from "@llamaindex/env";
import type { QueryToolParams } from "../indices/BaseIndex.js";
import { Settings } from "../Settings.js";
import { QueryEngineTool } from "../tools/QueryEngineTool.js";

export class LlamaCloudIndex {
params: CloudConstructorParams;
Expand Down Expand Up @@ -272,6 +274,22 @@ export class LlamaCloudIndex {
);
}

asQueryTool(params: QueryToolParams): QueryEngineTool {
if (params.options) {
params.retriever = this.asRetriever(params.options);
}

return new QueryEngineTool({
queryEngine: this.asQueryEngine(params),
metadata: params?.metadata,
includeSourceNodes: params?.includeSourceNodes ?? false,
});
}

queryTool(params: QueryToolParams): QueryEngineTool {
return this.asQueryTool(params);
}

async insert(document: Document) {
const pipelineId = await this.getPipelineId();

Expand Down
13 changes: 9 additions & 4 deletions packages/server/next/app/components/chat-section.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { ChatSection as ChatSectionUI } from "@llamaindex/chat-ui";
import "@llamaindex/chat-ui/styles/markdown.css";
import "@llamaindex/chat-ui/styles/pdf.css";
import { useChat } from "ai/react";
import Header from "./header";
import CustomChatInput from "./ui/chat/chat-input";
import CustomChatMessages from "./ui/chat/chat-messages";
import { getConfig } from "./ui/lib/utils";
Expand All @@ -21,11 +22,15 @@ export default function ChatSection() {
}
alert(errorMessage);
},
experimental_throttle: 100,
});
return (
<ChatSectionUI handler={handler} className="h-full w-full">
<CustomChatMessages />
<CustomChatInput />
</ChatSectionUI>
<div className="flex h-[85vh] w-full flex-col gap-2">
<Header />
<ChatSectionUI handler={handler} className="min-h-0 w-full flex-1">
<CustomChatMessages />
<CustomChatInput />
</ChatSectionUI>
</div>
);
}
29 changes: 16 additions & 13 deletions packages/server/next/app/components/header.tsx
Original file line number Diff line number Diff line change
@@ -1,24 +1,27 @@
"use client";

import { getConfig } from "./ui/lib/utils";

export default function Header() {
return (
<div className="z-10 w-full max-w-5xl items-center justify-between font-mono text-sm lg:flex">
<p className="bg-linear-to-b fixed left-0 top-0 flex w-full justify-center border-b border-gray-300 from-zinc-200 pb-6 pt-8 backdrop-blur-2xl lg:static lg:w-auto lg:rounded-xl lg:border lg:bg-gray-200 lg:p-4 dark:border-neutral-800 dark:bg-zinc-800/30 dark:from-inherit lg:dark:bg-zinc-800/30">
Get started by editing&nbsp;
<code className="font-mono font-bold">app/page.tsx</code>
</p>
<div className="bg-linear-to-t fixed bottom-0 left-0 mb-4 flex h-auto w-full items-end justify-center from-white via-white lg:static lg:mb-0 lg:w-auto lg:bg-none dark:from-black dark:via-black">
<a
href="https://www.llamaindex.ai/"
className="font-nunito flex items-center justify-center gap-2 text-lg font-bold"
>
<span>Built by LlamaIndex</span>
<div className="z-10 w-full max-w-5xl items-center justify-between font-mono text-sm">
<div className="flex w-full flex-col items-center pb-2 text-center">
<h1 className="mb-2 text-4xl font-bold">{getConfig("APP_TITLE")}</h1>
<div className="flex items-center justify-center gap-2">
<a
href="https://www.llamaindex.ai/"
target="_blank"
rel="noopener noreferrer"
className="text-sm text-gray-600 hover:text-gray-800 dark:text-gray-400 dark:hover:text-gray-200"
>
Built by LlamaIndex
</a>
<img
className="h-[40px] w-[40px] rounded-xl"
className="h-[24px] w-[24px] rounded-sm"
src="/llama.png"
alt="Llama Logo"
/>
</a>
</div>
</div>
</div>
);
Expand Down
9 changes: 7 additions & 2 deletions packages/server/next/app/components/ui/chat/chat-starter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,14 @@ import { StarterQuestions } from "@llamaindex/chat-ui/widgets";
import { getConfig } from "../lib/utils";

export function ChatStarter() {
const { append, messages } = useChatUI();
const { append, messages, requestData } = useChatUI();
const starterQuestions = getConfig("STARTER_QUESTIONS") ?? [];

if (starterQuestions.length === 0 || messages.length > 0) return null;
return <StarterQuestions append={append} questions={starterQuestions} />;
return (
<StarterQuestions
append={(message) => append(message, { data: requestData })}
questions={starterQuestions}
/>
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ export function LlamaCloudSelector({
);

useEffect(() => {
if (process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && !config) {
if (!config && getConfig("LLAMA_CLOUD_API")) {
fetch(getConfig("LLAMA_CLOUD_API"))
.then((response) => {
if (!response.ok) {
Expand Down Expand Up @@ -98,10 +98,6 @@ export function LlamaCloudSelector({
setPipeline(JSON.parse(value) as PipelineConfig);
};

if (process.env.NEXT_PUBLIC_USE_LLAMACLOUD !== "true") {
return null;
}

if (!config) {
return (
<div className="flex items-center justify-center p-3">
Expand Down
6 changes: 1 addition & 5 deletions packages/server/next/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import { Loader2 } from "lucide-react";
import dynamic from "next/dynamic";
import Header from "./components/header";

const ChatSection = dynamic(() => import("./components/chat-section"), {
ssr: false,
Expand All @@ -17,10 +16,7 @@ export default function Home() {
return (
<main className="background-gradient flex h-screen w-screen items-center justify-center">
<div className="w-[90%] space-y-2 lg:w-[60rem] lg:space-y-10">
<Header />
<div className="flex h-[65vh]">
<ChatSection />
</div>
<ChatSection />
</div>
</main>
);
Expand Down
12 changes: 6 additions & 6 deletions packages/server/next/public/config.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
// Override the window.LLAMAINDEX object to customize frontend

window.LLAMAINDEX = {
CHAT_API: "/api/chat",
// BACKEND: "http://localhost:3000",
// UPLOAD_API: "/api/chat/upload",
// LLAMA_CLOUD_API: "/api/chat/config/llamacloud",
// STARTER_QUESTIONS: [],
APP_TITLE: "Deep Research App",
LLAMA_CLOUD_API: undefined,
STARTER_QUESTIONS: [
"Research about Apple and Tesla revenue",
"How to improve the revenue of Apple and Tesla",
],
};
37 changes: 27 additions & 10 deletions packages/server/src/events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ export type SourceEventNode = {
score: number | null;
url: string;
text: string;
fileName: string;
filePath: string;
};

export type SourceEventData = {
Expand All @@ -36,25 +38,40 @@ export class AgentRunEvent extends WorkflowEvent<{
data: AgentRunEventData;
}> {}

export function toSourceEventNode(
node: NodeWithScore<Metadata>,
fileUrlPrefix: string = "/api/files/data",
) {
export type DeepResearchEventData = {
event: "retrieve" | "analyze" | "answer";
state: "pending" | "inprogress" | "done" | "error";
id?: string;
question?: string;
answer?: string;
};

export class DeepResearchEvent extends WorkflowEvent<{
type: "deep_research_event";
data: DeepResearchEventData;
}> {}

export function toSourceEventNode(node: NodeWithScore<Metadata>) {
const { file_name, pipeline_id } = node.node.metadata;

const filePath = pipeline_id
? `output/llamacloud/${pipeline_id}${file_name}`
: `data/${file_name}`;

return {
id: node.node.id_,
fileName: file_name,
filePath,
url: `/api/files/${filePath}`,
metadata: node.node.metadata,
score: node.score ?? null,
url: `${fileUrlPrefix}/${node.node.metadata.file_name}`,
text: node.node.getContent(MetadataMode.NONE),
};
}

export function toSourceEvent(
sourceNodes: NodeWithScore<Metadata>[] = [],
fileUrlPrefix: string = "/api/files/data",
) {
export function toSourceEvent(sourceNodes: NodeWithScore<Metadata>[] = []) {
const nodes: SourceEventNode[] = sourceNodes.map((node) =>
toSourceEventNode(node, fileUrlPrefix),
toSourceEventNode(node),
);
return new SourceEvent({
type: "sources",
Expand Down
30 changes: 30 additions & 0 deletions packages/server/src/handlers/cloud.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { getEnv } from "@llamaindex/env";
import type { IncomingMessage, ServerResponse } from "http";
import { LLamaCloudFileService } from "llamaindex";
import { sendJSONResponse } from "../utils/request";

export const getLlamaCloudConfig = async (
req: IncomingMessage,
res: ServerResponse,
) => {
if (!getEnv("LLAMA_CLOUD_API_KEY")) {
return sendJSONResponse(res, 500, {
error: "env variable LLAMA_CLOUD_API_KEY is required to use LlamaCloud",
});
}

try {
const config = {
projects: await LLamaCloudFileService.getAllProjectsWithPipelines(),
pipeline: {
pipeline: getEnv("LLAMA_CLOUD_INDEX_NAME"),
project: getEnv("LLAMA_CLOUD_PROJECT_NAME"),
},
};
return sendJSONResponse(res, 200, config);
} catch (error) {
return sendJSONResponse(res, 500, {
error: "Failed to fetch LlamaCloud configuration",
});
}
};
1 change: 1 addition & 0 deletions packages/server/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
export * from "./events";
export * from "./server";
export * from "./types";
export { toStreamGenerator } from "./utils/workflow";
31 changes: 24 additions & 7 deletions packages/server/src/server.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { getEnv } from "@llamaindex/env";
import fs from "fs";
import { createServer } from "http";
import next from "next";
import path from "path";
import { parse } from "url";
import { handleChat } from "./handlers/chat";
import { getLlamaCloudConfig } from "./handlers/cloud";
import { handleServeFiles } from "./handlers/files";
import type { LlamaIndexServerOptions, ServerWorkflow } from "./types";

Expand All @@ -16,22 +18,29 @@ export class LlamaIndexServer {
app: ReturnType<typeof next>;
workflowFactory: () => Promise<ServerWorkflow> | ServerWorkflow;

constructor({ workflow, ...nextAppOptions }: LlamaIndexServerOptions) {
constructor(options: LlamaIndexServerOptions) {
const { workflow, ...nextAppOptions } = options;
this.app = next({ dev, dir: nextDir, ...nextAppOptions });
this.port = nextAppOptions.port ?? 3000;
this.port = nextAppOptions.port ?? parseInt(process.env.PORT || "3000", 10);
this.workflowFactory = workflow;

this.modifyConfig(nextAppOptions);
this.modifyConfig(options);
}

private modifyConfig(
options: Pick<LlamaIndexServerOptions, "starterQuestions">,
) {
private modifyConfig(options: LlamaIndexServerOptions) {
const appTitle = options.appTitle ?? "LlamaIndex App";
const starterQuestions = options.starterQuestions ?? [];
const llamaCloudApi = getEnv("LLAMA_CLOUD_API_KEY")
? "/api/chat/config/llamacloud"
: undefined;

// content in javascript format
const content = `
window.LLAMAINDEX = {
CHAT_API: '/api/chat',
STARTER_QUESTIONS: ${JSON.stringify(options.starterQuestions ?? [])}
APP_TITLE: ${JSON.stringify(appTitle)},
LLAMA_CLOUD_API: ${JSON.stringify(llamaCloudApi)},
STARTER_QUESTIONS: ${JSON.stringify(starterQuestions)}
}
`;
fs.writeFileSync(configFile, content);
Expand All @@ -52,6 +61,14 @@ export class LlamaIndexServer {
return handleServeFiles(req, res, pathname);
}

if (
getEnv("LLAMA_CLOUD_API_KEY") &&
pathname === "/api/chat/config/llamacloud" &&
req.method === "GET"
) {
return getLlamaCloudConfig(req, res);
}

const handle = this.app.getRequestHandler();
handle(req, res, parsedUrl);
});
Expand Down
1 change: 1 addition & 0 deletions packages/server/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,5 @@ export type NextAppOptions = Parameters<typeof next>[0];
export type LlamaIndexServerOptions = NextAppOptions & {
workflow: WorkflowFactory;
starterQuestions?: string[];
appTitle?: string;
};
31 changes: 31 additions & 0 deletions packages/server/src/utils/file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import fs from "node:fs";
import https from "node:https";

export async function downloadFile(
urlToDownload: string,
downloadedPath: string,
) {
try {
// Check if file already exists
if (fs.existsSync(downloadedPath)) return;

const file = fs.createWriteStream(downloadedPath);
https
.get(urlToDownload, (response) => {
response.pipe(file);
file.on("finish", () => {
file.close(() => {
console.log("File downloaded successfully");
});
});
})
.on("error", (err) => {
fs.unlink(downloadedPath, () => {
console.error("Error downloading file:", err);
throw err;
});
});
} catch (error) {
throw new Error(`Error downloading file: ${error}`);
}
}
Loading