Traceback (most recent call last):
File "/home/wdz/Baichuan-13B-Finetuning/cli_demo.py", line 79, in
main()
File "/home/wdz/Baichuan-13B-Finetuning/cli_demo.py", line 64, in main
for response in model.chat(tokenizer, messages, stream=True):
File "/home/wdz/anaconda3/envs/baichuan/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/wdz/.cache/huggingface/modules/transformers_modules/Baichuan-13B-Chat/modeling_baichuan.py", line 536, in chat
input_ids = self._build_chat_input(tokenizer, messages, generation_config.max_new_tokens)
File "/home/wdz/.cache/huggingface/modules/transformers_modules/Baichuan-13B-Chat/modeling_baichuan.py", line 504, in _build_chat_input
max_input_tokens = self.config.model_max_length - max_new_tokens
TypeError: unsupported operand type(s) for -: 'int' and 'NoneType'
Traceback (most recent call last): File "/home/wdz/Baichuan-13B-Finetuning/cli_demo.py", line 79, in
main()
File "/home/wdz/Baichuan-13B-Finetuning/cli_demo.py", line 64, in main
for response in model.chat(tokenizer, messages, stream=True):
File "/home/wdz/anaconda3/envs/baichuan/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/wdz/.cache/huggingface/modules/transformers_modules/Baichuan-13B-Chat/modeling_baichuan.py", line 536, in chat
input_ids = self._build_chat_input(tokenizer, messages, generation_config.max_new_tokens)
File "/home/wdz/.cache/huggingface/modules/transformers_modules/Baichuan-13B-Chat/modeling_baichuan.py", line 504, in _build_chat_input
max_input_tokens = self.config.model_max_length - max_new_tokens
TypeError: unsupported operand type(s) for -: 'int' and 'NoneType'