!!! Exception during processing!!! Unable to load vocabulary from file. Please check that the provided vocabulary is accessible and not corrupted.
Traceback (most recent call last):
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2336, in _from_pretrained
tokenizer = cls(*init_inputs, init_kwargs)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\models\t5\tokenization_t5.py", line 151, in init
self.sp_model.Load(vocab_file)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\sentencepiece__init__.py", line 905, in Load
return self.LoadFromFile(model_file)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\sentencepiece__init__.py", line 310, in LoadFromFile
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
OSError: Not found: "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\mt5_tokenizer\spiece.model": No such file or directory Error #2
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 151, in recursive_execute
output_data, output_ui = get_output_data(obj, input_data_all)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 81, in get_output_data
return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 74, in map_node_over_list
results.append(getattr(obj, func)(slice_dict(input_data_all, i)))
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\nodes.py", line 75, in load_model
t5 = load_t5(
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\tenc.py", line 176, in load_t5
model = EXM_HyDiT_Tenc_Temp(model_class="mT5", *kwargs)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\tenc.py", line 125, in init
self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\models\auto\tokenization_auto.py", line 880, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, inputs, **kwargs)
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2110, in from_pretrained
return cls._from_pretrained(
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2148, in _from_pretrained
slow_tokenizer = (cls.slow_tokenizer_class)._from_pretrained(
File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2338, in _from_pretrained
raise OSError(
OSError: Unable to load vocabulary from file. Please check that the provided vocabulary is accessible and not corrupted.
Prompt executed in 1.33 seconds
描述错误 运行默认工作流,弹出如下错误: 相关目录已经有相关的文件但是任然提示找不到。
!!! Exception during processing!!! Unable to load vocabulary from file. Please check that the provided vocabulary is accessible and not corrupted. Traceback (most recent call last): File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2336, in _from_pretrained tokenizer = cls(*init_inputs, init_kwargs) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\models\t5\tokenization_t5.py", line 151, in init self.sp_model.Load(vocab_file) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\sentencepiece__init__.py", line 905, in Load return self.LoadFromFile(model_file) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\sentencepiece__init__.py", line 310, in LoadFromFile return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg) OSError: Not found: "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\mt5_tokenizer\spiece.model": No such file or directory Error #2 During handling of the above exception, another exception occurred: Traceback (most recent call last): File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 151, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 81, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\execution.py", line 74, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\nodes.py", line 75, in load_model t5 = load_t5( File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\tenc.py", line 176, in load_t5 model = EXM_HyDiT_Tenc_Temp(model_class="mT5", *kwargs) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\custom_nodes\ComfyUI_ExtraModels\HunYuanDiT\tenc.py", line 125, in init self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\models\auto\tokenization_auto.py", line 880, in from_pretrained return tokenizer_class.from_pretrained(pretrained_model_name_or_path, inputs, **kwargs) File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2110, in from_pretrained return cls._from_pretrained( File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2148, in _from_pretrained slow_tokenizer = (cls.slow_tokenizer_class)._from_pretrained( File "E:\秋叶comfyui\ComfyUI-aki\ComfyUI-aki-v1.3\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2338, in _from_pretrained raise OSError( OSError: Unable to load vocabulary from file. Please check that the provided vocabulary is accessible and not corrupted. Prompt executed in 1.33 seconds