Closed ecent2000 closed 1 year ago
AttributeError Traceback (most recent call last) f:\chatglm_bailing\scripts\chat.py in line 8 6 # 加载保存好的模型和分词器,假设您的模型名为my_model 7 model_path = "F:\chatglm_bailing\model" ----> 8 model = AutoModel.from_pretrained(model_path) 9 tokenizer = ChatGLMTokenizer.from_pretrained(model_path) 11 # 使用模型和分词器进行预测,假设您的输入是"Hello, world!"
File c:\Users\yc\anaconda3\envs\chatglm_etuning\lib\site-packages\transformers\models\auto\auto_factory.py:461, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, kwargs) 458 if kwargs.get("torch_dtype", None) == "auto": 459 _ = kwargs.pop("torch_dtype") --> 461 config, kwargs = AutoConfig.from_pretrained( 462 pretrained_model_name_or_path, 463 return_unused_kwargs=True, 464 trust_remote_code=trust_remote_code, 465 hub_kwargs, 466 **kwargs, 467 ) 469 # if torch_dtype=auto was passed here, ensure to pass it on 470 if kwargs_orig.get("torch_dtype", None) == "auto":
File c:\Users\yc\anaconda3\envs\chatglm_etuning\lib\site-packages\transformers\models\auto\configuration_auto.py:986, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs) 984 has_remote_code = "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"] 985 has_local_code = "model_type" in config_dict and config_dict["model_type"] in CONFIG_MAPPING ... --> 535 signal.signal(signal.SIGALRM, _raise_timeout_error) 536 signal.alarm(TIME_OUT_REMOTE_CODE) 537 while trust_remote_code is None:
AttributeError: module 'signal' has no attribute 'SIGALRM'
import sys sys.path.append(r"F:\chatglm_bailing\model") from transformers import AutoModel, AutoTokenizer from tokenization_chatglm import ChatGLMTokenizer
model_path = "F:\chatglm_bailing\model" model = AutoModel.from_pretrained(model_path) tokenizer = ChatGLMTokenizer.from_pretrained(model_path)
input_text = "Hello, world!" input_tokens = tokenizer(input_text, return_tensors="pt") output = model(**input_tokens) print(output)