!!! Exception during processing !!! data did not match any variant of untagged enum PyPreTokenizerTypeWrapper at line 951 column 3
Traceback (most recent call last):
File "G:\cmofy\ComfyUI-aki-v1.4\execution.py", line 289, in execute
obj = class_def()
File "G:\cmofy\ComfyUI-aki-v1.4\custom_nodes\ComfyUI-Fluxpromptenhancer\nodes\flux-prompt-enhance-node.py", line 8, in init
self.tokenizer = AutoTokenizer.from_pretrained(self.model_checkpoint)
File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\models\auto\tokenization_auto.py", line 825, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, *kwargs)
File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2048, in from_pretrained
return cls._from_pretrained(
File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2287, in _from_pretrained
tokenizer = cls(init_inputs, **init_kwargs)
File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\models\t5\tokenization_t5_fast.py", line 146, in init
super().init(
File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_fast.py", line 111, in init
fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
Exception: data did not match any variant of untagged enum PyPreTokenizerTypeWrapper at line 951 column 3
!!! Exception during processing !!! data did not match any variant of untagged enum PyPreTokenizerTypeWrapper at line 951 column 3 Traceback (most recent call last): File "G:\cmofy\ComfyUI-aki-v1.4\execution.py", line 289, in execute obj = class_def() File "G:\cmofy\ComfyUI-aki-v1.4\custom_nodes\ComfyUI-Fluxpromptenhancer\nodes\flux-prompt-enhance-node.py", line 8, in init self.tokenizer = AutoTokenizer.from_pretrained(self.model_checkpoint) File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\models\auto\tokenization_auto.py", line 825, in from_pretrained return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, *kwargs) File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2048, in from_pretrained return cls._from_pretrained( File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_base.py", line 2287, in _from_pretrained tokenizer = cls(init_inputs, **init_kwargs) File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\models\t5\tokenization_t5_fast.py", line 146, in init super().init( File "G:\cmofy\ComfyUI-aki-v1.4\python\lib\site-packages\transformers\tokenization_utils_fast.py", line 111, in init fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file) Exception: data did not match any variant of untagged enum PyPreTokenizerTypeWrapper at line 951 column 3