Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LangGraph Adapter #821

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
444 changes: 340 additions & 104 deletions app/(chat)/api/chat/route.ts

Large diffs are not rendered by default.

56 changes: 30 additions & 26 deletions app/(chat)/api/files/upload/route.ts
Original file line number Diff line number Diff line change
@@ -1,68 +1,72 @@
import { put } from '@vercel/blob';
import { NextResponse } from 'next/server';
import { z } from 'zod';
import { put } from '@vercel/blob'
import { NextResponse } from 'next/server'
import { z } from 'zod'

import { auth } from '@/app/(auth)/auth';
import { auth } from '@/app/(auth)/auth'

// Use Blob instead of File since File is not available in Node.js environment
const FileSchema = z.object({
file: z
.instanceof(Blob)
.refine((file) => file.size <= 5 * 1024 * 1024, {
message: 'File size should be less than 5MB',
message: 'File size should be less than 5MB'
})
// Update the file type based on the kind of files you want to accept
.refine((file) => ['image/jpeg', 'image/png'].includes(file.type), {
message: 'File type should be JPEG or PNG',
}),
});
.refine(
(file) =>
['image/jpeg', 'image/png', 'application/pdf'].includes(file.type),
{
message: 'File type should be JPEG, PNG or PDF'
}
)
})

export async function POST(request: Request) {
const session = await auth();
const session = await auth()

if (!session) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}

if (request.body === null) {
return new Response('Request body is empty', { status: 400 });
return new Response('Request body is empty', { status: 400 })
}

try {
const formData = await request.formData();
const file = formData.get('file') as Blob;
const formData = await request.formData()
const file = formData.get('file') as Blob

if (!file) {
return NextResponse.json({ error: 'No file uploaded' }, { status: 400 });
return NextResponse.json({ error: 'No file uploaded' }, { status: 400 })
}

const validatedFile = FileSchema.safeParse({ file });
const validatedFile = FileSchema.safeParse({ file })

if (!validatedFile.success) {
const errorMessage = validatedFile.error.errors
.map((error) => error.message)
.join(', ');
.join(', ')

return NextResponse.json({ error: errorMessage }, { status: 400 });
return NextResponse.json({ error: errorMessage }, { status: 400 })
}

// Get filename from formData since Blob doesn't have name property
const filename = (formData.get('file') as File).name;
const fileBuffer = await file.arrayBuffer();
const filename = (formData.get('file') as File).name
const fileBuffer = await file.arrayBuffer()

try {
const data = await put(`${filename}`, fileBuffer, {
access: 'public',
});
access: 'public'
})

return NextResponse.json(data);
return NextResponse.json(data)
} catch (error) {
return NextResponse.json({ error: 'Upload failed' }, { status: 500 });
return NextResponse.json({ error: 'Upload failed' }, { status: 500 })
}
} catch (error) {
return NextResponse.json(
{ error: 'Failed to process request' },
{ status: 500 },
);
{ status: 500 }
)
}
}
115 changes: 115 additions & 0 deletions app/(chat)/api/langgraph/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import { type Message, formatDataStreamPart } from 'ai'
import { auth } from '@/app/(auth)/auth'
import {
deleteChatById,
getChatById,
saveChat,
saveMessages
} from '@/lib/db/queries'
import { generateUUID, getMostRecentUserMessage } from '@/lib/utils'

import { generateTitleFromUserMessage } from '../../actions'
import { createDocument } from '@/lib/ai/tools/create-document'
import { updateDocument } from '@/lib/ai/tools/update-document'
import { requestSuggestions } from '@/lib/ai/tools/request-suggestions'
import { getWeather } from '@/lib/ai/tools/get-weather'
import { m } from 'framer-motion'
import { Client } from '@langchain/langgraph-sdk'
import { console } from 'inspector'
import { LangGraphAdapter } from '@/lib/adapters/LangGraphAdapter'

export const maxDuration = 60

export async function POST(request: Request) {
const {
id,
messages,
selectedChatModel
}: { id: string; messages: Array<Message>; selectedChatModel: string } =
await request.json()

const session = await auth()

if (!session || !session.user || !session.user.id) {
return new Response('Unauthorized', { status: 401 })
}

const userMessage = getMostRecentUserMessage(messages)

if (!userMessage) {
return new Response('No user message found', { status: 400 })
}

const chat = await getChatById({ id })

if (!chat) {
const title = await generateTitleFromUserMessage({ message: userMessage })
await saveChat({ id, userId: session.user.id, title })
}

await saveMessages({
messages: [{ ...userMessage, createdAt: new Date(), chatId: id }]
})

const client = new Client({
apiUrl: process.env.LANGGRAPH_API_URL,
apiKey: process.env.LANGGRAPH_API_KEY
})

// get default assistant
const assistants = await client.assistants.search()
//console.log(assistants)
let assistant = assistants.find((a) => a.graph_id === 'researcher')
if (!assistant) {
assistant = await client.assistants.create({ graphId: 'researcher' })
// throw new Error('No assistant found')
}
// create thread
const thread = await client.threads.create()

const input = {
messages: [userMessage]
}

const streamResponse = client.runs.stream(
thread['thread_id'],
assistant['assistant_id'],
{
input,
streamMode: 'messages'
}
)

return LangGraphAdapter.toDataStreamResponse(streamResponse)
}

export async function DELETE(request: Request) {
const { searchParams } = new URL(request.url)
const id = searchParams.get('id')

if (!id) {
return new Response('Not Found', { status: 404 })
}

const session = await auth()

if (!session || !session.user) {
return new Response('Unauthorized', { status: 401 })
}

try {
const chat = await getChatById({ id })

if (chat.userId !== session.user.id) {
return new Response('Unauthorized', { status: 401 })
}

await deleteChatById({ id })

return new Response('Chat deleted', { status: 200 })
} catch (error) {
return new Response('An error occurred while processing your request', {
status: 500
})
}
}
44 changes: 44 additions & 0 deletions app/(chat)/langgraph/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { cookies } from 'next/headers'

import { Chat } from '@/components/chat'
import { DEFAULT_CHAT_MODEL } from '@/lib/ai/models'
import { generateUUID } from '@/lib/utils'
import { DataStreamHandler } from '@/components/data-stream-handler'

export default async function Page() {
const id = generateUUID()

const cookieStore = await cookies()
const modelIdFromCookie = cookieStore.get('chat-model')

if (!modelIdFromCookie) {
return (
<>
<Chat
key={id}
id={id}
initialMessages={[]}
selectedChatModel={DEFAULT_CHAT_MODEL}
selectedVisibilityType="private"
isReadonly={false}
api="/api/langgraph"
/>
<DataStreamHandler id={id} />
</>
)
}

return (
<>
<Chat
key={id}
id={id}
initialMessages={[]}
selectedChatModel={modelIdFromCookie.value}
selectedVisibilityType="private"
isReadonly={false}
/>
<DataStreamHandler id={id} />
</>
)
}
64 changes: 32 additions & 32 deletions components/chat.tsx
Original file line number Diff line number Diff line change
@@ -1,35 +1,37 @@
'use client';
'use client'

import type { Attachment, Message } from 'ai';
import { useChat } from 'ai/react';
import { useState } from 'react';
import useSWR, { useSWRConfig } from 'swr';
import type { Attachment, Message } from 'ai'
import { useChat } from 'ai/react'
import { useState } from 'react'
import useSWR, { useSWRConfig } from 'swr'

import { ChatHeader } from '@/components/chat-header';
import type { Vote } from '@/lib/db/schema';
import { fetcher, generateUUID } from '@/lib/utils';
import { ChatHeader } from '@/components/chat-header'
import type { Vote } from '@/lib/db/schema'
import { fetcher, generateUUID } from '@/lib/utils'

import { Block } from './block';
import { MultimodalInput } from './multimodal-input';
import { Messages } from './messages';
import { VisibilityType } from './visibility-selector';
import { useBlockSelector } from '@/hooks/use-block';
import { toast } from 'sonner';
import { Block } from './block'
import { MultimodalInput } from './multimodal-input'
import { Messages } from './messages'
import { VisibilityType } from './visibility-selector'
import { useBlockSelector } from '@/hooks/use-block'
import { toast } from 'sonner'

export function Chat({
id,
initialMessages,
selectedChatModel,
selectedVisibilityType,
isReadonly,
api = '/api/chat'
}: {
id: string;
initialMessages: Array<Message>;
selectedChatModel: string;
selectedVisibilityType: VisibilityType;
isReadonly: boolean;
id: string
initialMessages: Array<Message>
selectedChatModel: string
selectedVisibilityType: VisibilityType
isReadonly: boolean
api?: string
}) {
const { mutate } = useSWRConfig();
const { mutate } = useSWRConfig()

const {
messages,
@@ -40,29 +42,27 @@ export function Chat({
append,
isLoading,
stop,
reload,
reload
} = useChat({
api,
id,
body: { id, selectedChatModel: selectedChatModel },
initialMessages,
experimental_throttle: 100,
sendExtraMessageFields: true,
generateId: generateUUID,
onFinish: () => {
mutate('/api/history');
mutate('/api/history')
},
onError: (error) => {
toast.error('An error occured, please try again!');
},
});
toast.error('An error occured, please try again!')
}
})

const { data: votes } = useSWR<Array<Vote>>(
`/api/vote?chatId=${id}`,
fetcher,
);
const { data: votes } = useSWR<Array<Vote>>(`/api/vote?chatId=${id}`, fetcher)

const [attachments, setAttachments] = useState<Array<Attachment>>([]);
const isBlockVisible = useBlockSelector((state) => state.isVisible);
const [attachments, setAttachments] = useState<Array<Attachment>>([])
const isBlockVisible = useBlockSelector((state) => state.isVisible)

return (
<>
@@ -121,5 +121,5 @@ export function Chat({
isReadonly={isReadonly}
/>
</>
);
)
}
20 changes: 11 additions & 9 deletions components/preview-attachment.tsx
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
import type { Attachment } from 'ai';
import type { Attachment } from 'ai'

import { LoaderIcon } from './icons';
import { LoaderIcon } from './icons'
import { FileTextIcon } from 'lucide-react'

export const PreviewAttachment = ({
attachment,
isUploading = false,
isUploading = false
}: {
attachment: Attachment;
isUploading?: boolean;
attachment: Attachment
isUploading?: boolean
}) => {
const { name, url, contentType } = attachment;
const { name, url, contentType } = attachment
console.log('attachment', attachment)

return (
<div className="flex flex-col gap-2">
@@ -25,7 +27,7 @@ export const PreviewAttachment = ({
className="rounded-md size-full object-cover"
/>
) : (
<div className="" />
<FileTextIcon size={30} className="text-zinc-500" />
)
) : (
<div className="" />
@@ -39,5 +41,5 @@ export const PreviewAttachment = ({
</div>
<div className="text-xs text-zinc-500 max-w-16 truncate">{name}</div>
</div>
);
};
)
}
107 changes: 107 additions & 0 deletions lib/adapters/LangGraphAdapter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import { formatDataStreamPart } from 'ai'
import { generateUUID } from '@/lib/utils'

type LangGraphStreamEvent = {
event: string
data: any
}

export class LangGraphAdapter {
private static async *deltaMessagesGenerator(
streamResponse: AsyncGenerator<{ event: string; data: any }, any, any>
): AsyncGenerator<string, void, unknown> {
let lastOutput = ''
for await (const message of streamResponse) {
if (message.event !== 'messages/complete') {
const msg = message.data?.[0]
if (msg?.content) {
const current = msg.content
const delta = current.substring(lastOutput.length)
lastOutput = current
if (delta) {
yield formatDataStreamPart('text', delta)
}
}
}
}
}

private static async *fullDataStreamGenerator(
streamResponse: AsyncGenerator<LangGraphStreamEvent, any, any>,
messageId = generateUUID()
): AsyncGenerator<string, void, unknown> {
yield formatDataStreamPart('start_step', { messageId })

for await (const delta of this.deltaMessagesGenerator(streamResponse)) {
yield delta
}

yield formatDataStreamPart('finish_step', {
finishReason: 'stop',
usage: { promptTokens: 55, completionTokens: 20 },
isContinued: false
})

yield formatDataStreamPart('finish_message', {
finishReason: 'stop',
usage: { promptTokens: 55, completionTokens: 20 }
})
}

private static asyncGeneratorToReadableStream(
generator: AsyncGenerator<string, any, any>
): ReadableStream<string> {
return new ReadableStream<string>({
async pull(controller) {
const { done, value } = await generator.next()
if (done) {
controller.close()
} else {
controller.enqueue(value)
}
},
async cancel(reason) {
if (generator.return) {
await generator.return(reason)
}
}
})
}

private static prepareResponseHeaders(
headers: HeadersInit | undefined,
{
contentType,
dataStreamVersion
}: { contentType: string; dataStreamVersion?: 'v1' | undefined }
) {
const responseHeaders = new Headers(headers ?? {})
if (!responseHeaders.has('Content-Type')) {
responseHeaders.set('Content-Type', contentType)
}
if (dataStreamVersion !== undefined) {
responseHeaders.set('X-Vercel-AI-Data-Stream', dataStreamVersion)
}
return responseHeaders
}

static toDataStreamResponse(
streamResponse: AsyncGenerator<LangGraphStreamEvent, any, any>
): Response {
const fullGenerator = this.fullDataStreamGenerator(streamResponse)
const readableStream = this.asyncGeneratorToReadableStream(fullGenerator)
const responseStream = readableStream.pipeThrough(new TextEncoderStream())

return new Response(responseStream, {
status: 200,
statusText: 'OK',
headers: this.prepareResponseHeaders(
{},
{
contentType: 'text/plain; charset=utf-8',
dataStreamVersion: 'v1'
}
)
})
}
}
3 changes: 3 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -25,6 +25,8 @@
"@codemirror/state": "^6.5.0",
"@codemirror/theme-one-dark": "^6.1.2",
"@codemirror/view": "^6.35.3",
"@langchain/langgraph-sdk": "^0.0.40",
"@langchain/openai": "^0.4.4",
"@radix-ui/react-alert-dialog": "^1.1.2",
"@radix-ui/react-dialog": "^1.1.2",
"@radix-ui/react-dropdown-menu": "^2.1.2",
@@ -51,6 +53,7 @@
"fast-deep-equal": "^3.1.3",
"framer-motion": "^11.3.19",
"geist": "^1.3.1",
"langchain": "^0.3.15",
"lucide-react": "^0.446.0",
"nanoid": "^5.0.8",
"next": "15.0.3-canary.2",
472 changes: 472 additions & 0 deletions pnpm-lock.yaml

Large diffs are not rendered by default.