transitive-bullshit / agentic

AI agent stdlib that works with any LLM and TypeScript AI SDK.
MIT License
16.07k stars 2.11k forks source link

What's the proper way to send partial answers? #605

Closed izcoser closed 11 months ago

izcoser commented 12 months ago

Describe the feature

Hi guys. Not really a feature request but I have this question that I'm not sure how to do it.

I'm using this on the server side of my Next application, and I'd like to send text as it is written by the AI, without waiting for the entire thing.

My initial idea was to use a websocket and send progress.text to the front end using the onProgress callback, but I'd like to avoid websockets. Then I learned you can send a response as a stream of data, which increments continually, but I couldn't figure out how to do that with this API.

The link above also shows that OpenAI has an SDK which makes it easy to send the stream of text. Perhaps its possible to use that SDK with transitive-bullshit's API?

AnsonCheng03 commented 11 months ago

Try this

function createProgressEmitter() {
  let res: any;
  const queue: any = [];

  async function* internalGenerator() {
    while (true) {
      if (queue.length > 0) {
        yield queue.shift();
      } else {
        await new Promise((resolve) => {
          res = resolve;
        });
      }
    }
  }

  return {
    generator: internalGenerator(),
    push: (value: any) => {
      queue.push(value);
      if (res) {
        res();
        res = null;
      }
    },
  };
}

const queryGPT = server$(async function* (
  query: string,
  parentID: string | null
) {
  const { generator: progressGenerator, push: pushProgress } =
    createProgressEmitter();
  // Start the API call
  gptAPI
    .sendMessage(query, {
      ...(parentID ? { parentMessageId: parentID } : {}),
      onProgress: (progress) => {
        pushProgress([progress.text, progress.id]);
      },
    })
    .then((res) => {
      // Once done, push the result to the generator
      pushProgress([res.text, res.id, "END"]);
    });

  for await (const update of progressGenerator) {
    yield update;
  }
});

const submitQuery = $(async () => {
    if (queryValue.value === "") return;
    conversation.value = [
      ...conversation.value,
      { type: "user", content: queryValue.value },
    ];
    waitingResponse.value = true;
    const res = await queryGPT(queryValue.value, parentID.value);
    for await (const i of res) {
      parentID.value = i[1];

      // Check if the id already exists
      const existingIndex = conversation.value.findIndex(
        (item) => item.id === i[1]
      );

      if (existingIndex !== -1) {
        // ID exists, replace content
        conversation.value[existingIndex].content = i[0];
        // refresh the array
        conversation.value = [...conversation.value];
      } else {
        // ID doesn't exist, add a new entry
        conversation.value = [
          ...conversation.value,
          { type: "bot", content: i[0], id: i[1] },
        ];
      }
      if (i[2] === "END") break;
    }
    waitingResponse.value = false;
  });
izcoser commented 11 months ago

Thank you. I ended up using the official API since they also have easy plug and play frontend hooks!