holoviz-topics / panel-chat-examples

Examples of Chat Bots using Panels chat features: Traditional, LLMs, AI Agents, LangChain, OpenAI etc
https://holoviz-topics.github.io/panel-chat-examples/
MIT License
105 stars 31 forks source link

Show how to use local file #97

Open ahuang11 opened 10 months ago

ahuang11 commented 10 months ago

Something like this, maybe not use os.

"""
Demonstrates how to use the `ChatInterface` to create a chatbot using
[Mistral](https://docs.mistral.ai) through
[CTransformers](https://github.com/marella/ctransformers).
"""
import os

import panel as pn
from ctransformers import AutoConfig, AutoModelForCausalLM, Config

pn.extension()

llms = pn.state.cache["llms"] = pn.state.cache.get("llms", {})

MODEL_FILE = "mistral-7b-openorca.Q4_K_M.gguf"
MODEL_URL = f"https://huggingface.co/TheBloke/Mistral-7B-OpenOrca-GGUF/resolve/main/{MODEL_FILE}"
CURL_CMD = f"curl -C - -o {MODEL_FILE} {MODEL_URL}"

async def callback(contents: str, user: str, instance: pn.chat.ChatInterface):
    if "mistral" not in llms:
        instance.placeholder_text = "Downloading model; please wait..."
        config = AutoConfig(
            config=Config(
                temperature=0.5, max_new_tokens=2048, context_length=2048, gpu_layers=1
            ),
        )
        if not os.path.exists(MODEL_FILE):
            return_code = os.system(CURL_CMD)
            if return_code != 0:
                raise RuntimeError(f"Could not download {MODEL_URL}")
        llms["mistral"] = AutoModelForCausalLM.from_pretrained(
            MODEL_FILE,
            config=config,
            local_files_only=True,
        )

    llm = llms["mistral"]
    response = llm(contents, stream=True)
    message = ""
    for token in response:
        message += token
        yield message

chat_interface = pn.chat.ChatInterface(
    callback=callback,
    callback_user="Mistral",
    reset_on_send=True,
)
chat_interface.send(
    "Send a message to get a reply from Mistral!", user="System", respond=False
)
chat_interface.servable()