File "train_mydata.py", line 4, in
tokenizer = AutoTokenizer.from_pretrained('SentiWSP-base')
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/models/auto/tokenization_auto.py", line 487, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, inputs, kwargs)
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_base.py", line 1750, in from_pretrained
kwargs,
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_base.py", line 1872, in _from_pretrained
tokenizer = cls(init_inputs, init_kwargs)
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/models/bert/tokenization_bert_fast.py", line 188, in initkwargs,
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_fast.py", line 108, in init
fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
Exception: No such file or directory (os error 2)
File "train_mydata.py", line 4, in
tokenizer = AutoTokenizer.from_pretrained('SentiWSP-base')
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/models/auto/tokenization_auto.py", line 487, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, inputs, kwargs)
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_base.py", line 1750, in from_pretrained
kwargs,
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_base.py", line 1872, in _from_pretrained
tokenizer = cls(init_inputs, init_kwargs)
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/models/bert/tokenization_bert_fast.py", line 188, in init
kwargs,
File "/mnt/sdb/home/liailin/miniconda3/envs/SentiWSP/lib/python3.7/site-packages/transformers/tokenization_utils_fast.py", line 108, in init
fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
Exception: No such file or directory (os error 2)