from transformers import AutoModelForCausalLM, AutoTokenizer
# from huggingface_hub import snapshot_download
from modelscope import snapshot_download
# 下载模型文件
# model_path = snapshot_download('Qwen/Qwen2.5-7B-Instruct')
model_path = snapshot_download('zhw2044154891/glm-4-9b-chat-fs')
# 加载模型和分词器
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForCausalLM.from_pretrained(model_path)
# 设置设备
import torch
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
Expected behavior / 期待表现
不报错。能正常使用
现在报错
AttributeError Traceback (most recent call last)
File d:\my\env\python3.10.10\lib\site-packages\transformers\dynamic_module_utils.py:648, in resolve_trust_remote_code(trust_remote_code, model_name, has_local_code, has_remote_code)
[647](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:647) try:
--> [648](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:648) prev_sig_handler = signal.signal(signal.SIGALRM, _raise_timeout_error)
[649](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:649) signal.alarm(TIME_OUT_REMOTE_CODE)
AttributeError: module 'signal' has no attribute 'SIGALRM'
During handling of the above exception, another exception occurred:
ValueError Traceback (most recent call last)
Cell In[2], [line 10](vscode-notebook-cell:?execution_count=2&line=10)
[7](vscode-notebook-cell:?execution_count=2&line=7) model_path = snapshot_download('zhw2044154891/glm-4-9b-chat-fs')
[9](vscode-notebook-cell:?execution_count=2&line=9) # 加载模型和分词器
---> [10](vscode-notebook-cell:?execution_count=2&line=10) tokenizer = AutoTokenizer.from_pretrained(model_path)
[11](vscode-notebook-cell:?execution_count=2&line=11) model = AutoModelForCausalLM.from_pretrained(model_path)
[13](vscode-notebook-cell:?execution_count=2&line=13) # 设置设备
File d:\my\env\python3.10.10\lib\site-packages\transformers\models\auto\tokenization_auto.py:879, in AutoTokenizer.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs)
[871](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:871) has_remote_code = tokenizer_auto_map is not None
[872](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:872) has_local_code = type(config) in TOKENIZER_MAPPING or (
[873](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:873) config_tokenizer_class is not None
[874](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:874) and (
(...)
[877](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:877) )
[878](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:878) )
--> [879](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:879) trust_remote_code = resolve_trust_remote_code(
[880](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:880) trust_remote_code, pretrained_model_name_or_path, has_local_code, has_remote_code
[881](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:881) )
[883](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:883) if has_remote_code and trust_remote_code:
[884](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/models/auto/tokenization_auto.py:884) if use_fast and tokenizer_auto_map[1] is not None:
File d:\my\env\python3.10.10\lib\site-packages\transformers\dynamic_module_utils.py:664, in resolve_trust_remote_code(trust_remote_code, model_name, has_local_code, has_remote_code)
[661](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:661) signal.alarm(0)
[662](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:662) except Exception:
[663](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:663) # OS which does not support signal.SIGALRM
--> [664](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:664) raise ValueError(
[665](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:665) f"The repository for {model_name} contains custom code which must be executed to correctly "
[666](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:666) f"load the model. You can inspect the repository content at https://hf.co/{model_name}.\n"
[667](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:667) f"Please pass the argument `trust_remote_code=True` to allow custom code to be run."
[668](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:668) )
[669](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:669) finally:
[670](file:///D:/my/env/python3.10.10/lib/site-packages/transformers/dynamic_module_utils.py:670) if prev_sig_handler is not None:
ValueError: The repository for C:\Users\Admin\.cache\modelscope\hub\zhw2044154891\glm-4-9b-chat-fs contains custom code which must be executed to correctly load the model. You can inspect the repository content at [https://hf.co/C:\Users\Admin\.cache\modelscope\hub\zhw2044154891\glm-4-9b-chat-fs](https://hf.co/C:/Users/Admin/.cache/modelscope/hub/zhw2044154891/glm-4-9b-chat-fs).
Please pass the argument `trust_remote_code=True` to allow custom code to be run.
System Info / 系統信息
windows
Who can help? / 谁可以帮助到您?
No response
Information / 问题信息
Reproduction / 复现过程
Expected behavior / 期待表现
不报错。能正常使用 现在报错