LangChain LangServe
Overview
Integration with a LangServe server via Vercel AI SDK.
Getting Started
Create a Next.JS project
npx create-next-app@latest my-app
cd my-app
Install @langchain/core
, ai-sdk
and @assistant-ui/react
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/react @langchain/core
Setup a backend route under /api/chat
import { RemoteRunnable } from "@langchain/core/runnables/remote";
import type { RunnableConfig } from "@langchain/core/runnables";
import { streamText, LangChainAdapter, type Message } from "ai";
export const maxDuration = 30;
export async function POST(req: Request) {
const { messages } = (await req.json()) as { messages: Message[] };
// TODO replace with your own langserve URL
const remoteChain = new RemoteRunnable<
{ messages: Message[] },
string,
RunnableConfig
>({
url: "<YOUR_LANGSERVE_URL>",
});
const stream = await remoteChain.stream({
messages,
});
return LangChainAdapter.toDataStreamResponse(stream);
}
Define a MyRuntimeProvider
component
"use client";
import { useChat } from "@ai-sdk/react";
import { AssistantRuntimeProvider } from "@assistant-ui/react";
import { useVercelUseChatRuntime } from "@assistant-ui/react-ai-sdk";
export function MyRuntimeProvider({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
const chat = useChat({
api: "/api/chat",
unstable_AISDKInterop: true,
});
const runtime = useVercelUseChatRuntime(chat);
return (
<AssistantRuntimeProvider runtime={runtime}>
{children}
</AssistantRuntimeProvider>
);
}
Wrap your app in MyRuntimeProvider
import type { ReactNode } from "react";
import { MyRuntimeProvider } from "@/app/MyRuntimeProvider";
export default function RootLayout({
children,
}: Readonly<{
children: ReactNode;
}>) {
return (
<MyRuntimeProvider>
<html lang="en">
<body>{children}</body>
</html>
</MyRuntimeProvider>
);
}