Yonom / assistant-ui

React Components for AI Chat 💬 🚀
https://www.assistant-ui.com
MIT License
391 stars 26 forks source link

feat: add streaming with `useLocalRuntime` #597

Closed Rajaniraiyn closed 1 month ago

Rajaniraiyn commented 1 month ago

add streaming support to local runtime via async iterator

Example

import { type ChatModelAdapter } from "@assistant-ui/react";

const CustomModelAdapter: ChatModelAdapter = {
  async run({ messages, signal }) {
    const res = await fetch("/api/chat", {
      method: "POST",
      body: JSON.stringify({ messages }),
      signal,
    });

    const reader = res.body.getReader();
    const decoder = new TextDecoder();
    let done = false;
    let content = '';

    return (async function* () {
      while (!done) {
        const { value, done: readerDone } = await reader.read();
        done = readerDone;
        if (value) {
          const chunk = decoder.decode(value, { stream: true });
          content += chunk;
          yield {
            content: [{ type: "text", text: content }],
            status: { type: "running" },
          };
        }
      }
    })();
  },
};

export default CustomModelAdapter;
vercel[bot] commented 1 month ago

@Rajaniraiyn is attempting to deploy a commit to the Simon Farshid's projects Team on Vercel.

A member of the Team first needs to authorize it.

vercel[bot] commented 1 month ago

The latest updates on your projects. Learn more about Vercel for Git ↗︎

Name Status Preview Comments Updated (UTC)
assistant-ui ✅ Ready (Inspect) Visit Preview 💬 Add feedback Jul 28, 2024 6:31pm
Yonom commented 1 month ago

@Rajaniraiyn this is an amazing contribution! thank you so much!

Yonom commented 1 month ago

I made a small adjustment to your code:

import { type ChatModelAdapter } from "@assistant-ui/react";

const CustomModelAdapter: ChatModelAdapter = {
  async *run({ messages, signal }) { // made the outer function async iterable
    const res = await fetch("/api/chat", {
      method: "POST",
      body: JSON.stringify({ messages }),
      signal,
    });

    const reader = res.body.getReader();
    const decoder = new TextDecoder();
    let done = false;
    let content = '';

    while (!done) {
      const { value, done: readerDone } = await reader.read();
      done = readerDone;
      if (value) {
        const chunk = decoder.decode(value, { stream: true });
        content += chunk;
        yield { // now we can directly yield here!
          content: [{ type: "text", text: content }],
          status: { type: "running" },
        };
      }
    }
  },
};

export default CustomModelAdapter;