X-D-Lab / LangChain-ChatGLM-Webui

基于LangChain和ChatGLM-6B等系列LLM的针对本地知识库的自动问答
Apache License 2.0
3.12k stars 469 forks source link

模型无法加载成功 #149

Closed zulan88 closed 6 months ago

zulan88 commented 6 months ago

运行app后的日志 wanji@wanji-Default-string:/data/langchain/LangChain-ChatGLM-Webui$ python3 app.py /home/wanji/.local/lib/python3.8/site-packages/torch/_utils.py:831: UserWarning: TypedStorage is deprecated. It will be removed in the future and UntypedStorage will be the only storage class. This should only matter to you if you are using storages directly. To access UntypedStorage directly, use tensor.untyped_storage() instead of tensor.storage() return self.fget.get(instance, owner)() Running on local URL: http://0.0.0.0:7860

页面提示 模型未成功加载,请重新选择模型后点击"重新加载模型"按钮

config.py如下

import os
import torch

# device config
EMBEDDING_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
LLM_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
num_gpus = torch.cuda.device_count()

# model cache config
MODEL_CACHE_PATH = os.path.join(os.path.dirname(__file__), 'model_cache')

# vector storage config
VECTOR_STORE_PATH='./vector_store'
COLLECTION_NAME='my_collection'

# init model config
init_llm = "ChatGLM-6B-int8"
init_embedding_model = "text2vec-base"

# model config
embedding_model_dict = {
    "ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
    "ernie-base": "nghuyong/ernie-3.0-base-zh",
    "ernie-medium": "nghuyong/ernie-3.0-medium-zh",
    "ernie-xbase": "nghuyong/ernie-3.0-xbase-zh",
    "text2vec-base": "/data/model/text2vec-base-chinese",
    'simbert-base-chinese': 'WangZeJun/simbert-base-chinese',
    'paraphrase-multilingual-MiniLM-L12-v2': "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
}

llm_model_dict = {
    "chatglm2": {
        "ChatGLM2-6B": "THUDM/chatglm2-6b",
        "ChatGLM2-6B-int4": "THUDM/chatglm2-6b-int4",
    },
    "chatglm": {
        "ChatGLM-6B": "THUDM/chatglm-6b",
        "ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
        "ChatGLM-6B-int8": "/data/model/chatglm-6b-int8",
        "ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe",
    },
    "belle": {
        "BELLE-LLaMA-Local": "/pretrainmodel/belle",
    },
    "vicuna": {
        "Vicuna-Local": "/pretrainmodel/vicuna",
    },
    "internlm": {
        "internlm-chat-7b-8k": "internlm/internlm-chat-7b-8k",
        "internlm-chat-7b": "internlm/internlm-chat-7b",
        "internlm-chat-7b-v1_1": "internlm/internlm-chat-7b-v1_1",
    }
}

模型位置如下 image

zulan88 commented 6 months ago

操作系统:ubuntu20.04

显卡:3080 cuda: 12.2

zulan88 commented 6 months ago

运行应用程序后的日志 wanji@wanji-Default-string:/data/langchain/LangChain-ChatGLM-Webui$ python3 app.py /home/wanji/.local/lib/python3.8/site-packages/torch/_utils. py:831: UserWarning: TypedStorage 已弃用。将来它将被删除,UntypedStorage 将是唯一的存储类。仅当您直接使用存储时,这才对您有意义。要直接访问UntypedStorage,请使用tensor.untyped_storage()而不是tensor.storage() 返回self.fget。get (instance, Owner)() 在本地 URL 上运行: http://0.0.0.0:7860

页面提示 模型未成功加载,请重新选择模型后点击“重新加载模型”按钮

config.py如下

import os
import torch

# device config
EMBEDDING_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
LLM_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
num_gpus = torch.cuda.device_count()

# model cache config
MODEL_CACHE_PATH = os.path.join(os.path.dirname(__file__), 'model_cache')

# vector storage config
VECTOR_STORE_PATH='./vector_store'
COLLECTION_NAME='my_collection'

# init model config
init_llm = "ChatGLM-6B-int8"
init_embedding_model = "text2vec-base"

# model config
embedding_model_dict = {
    "ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
    "ernie-base": "nghuyong/ernie-3.0-base-zh",
    "ernie-medium": "nghuyong/ernie-3.0-medium-zh",
    "ernie-xbase": "nghuyong/ernie-3.0-xbase-zh",
    "text2vec-base": "/data/model/text2vec-base-chinese",
    'simbert-base-chinese': 'WangZeJun/simbert-base-chinese',
    'paraphrase-multilingual-MiniLM-L12-v2': "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
}

llm_model_dict = {
    "chatglm2": {
        "ChatGLM2-6B": "THUDM/chatglm2-6b",
        "ChatGLM2-6B-int4": "THUDM/chatglm2-6b-int4",
    },
    "chatglm": {
        "ChatGLM-6B": "THUDM/chatglm-6b",
        "ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
        "ChatGLM-6B-int8": "/data/model/chatglm-6b-int8",
        "ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe",
    },
    "belle": {
        "BELLE-LLaMA-Local": "/pretrainmodel/belle",
    },
    "vicuna": {
        "Vicuna-Local": "/pretrainmodel/vicuna",
    },
    "internlm": {
        "internlm-chat-7b-8k": "internlm/internlm-chat-7b-8k",
        "internlm-chat-7b": "internlm/internlm-chat-7b",
        "internlm-chat-7b-v1_1": "internlm/internlm-chat-7b-v1_1",
    }
}

模型位置如下 图像