logoassistant-ui

LangChain LangServe

This integration has not been tested with AI SDK v5.

Overview

Integration with a LangServe server via Vercel AI SDK.

Getting Started

Create a Next.JS project

npx create-next-app@latest my-app
cd my-app

Install @langchain/core, ai-sdk and @assistant-ui/react

npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai @ai-sdk/react @langchain/core

Setup a backend route under /api/chat

@/app/api/chat/route.ts
import { RemoteRunnable } from "@langchain/core/runnables/remote";
import { toDataStreamResponse } from "@ai-sdk/langchain";

export const maxDuration = 30;

export async function POST(req: Request) {
  const { messages } = await req.json();

  // TODO replace with your own langserve URL
  const remoteChain = new RemoteRunnable({
    url: "<YOUR_LANGSERVE_URL>",
  });

  const stream = await remoteChain.stream({
    messages,
  });

  return toDataStreamResponse(stream);
}

Define a MyRuntimeProvider component

@/app/MyRuntimeProvider.tsx
"use client";

import {  } from "@ai-sdk/react";
import {  } from "@assistant-ui/react";
import {  } from "@assistant-ui/react-ai-sdk";

export function ({
  ,
}: <{
  : .;
}>) {
  const  = ({
    : "/api/chat",
    : true,
  });

  const  = ();

  return (
    < ={}>
      {}
    </>
  );
}

Wrap your app in MyRuntimeProvider

@/app/layout.tsx
import type {  } from "react";
import {  } from "@/app/MyRuntimeProvider";

export default function ({
  ,
}: <{
  : ;
}>) {
  return (
    <>
      < ="en">
        <>{}</>
      </>
    </>
  );
}