Fine-tune the Whisper speech recognition model to support training without timestamp data, training with timestamp data, and training without speech data. Accelerate inference and support Web deployment, Windows desktop deployment, and Android deployment
Traceback (most recent call last):
File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/finetune.py", line 171, in
main()
File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/finetune.py", line 156, in main
trainer.train(resume_from_checkpoint=args.resume_from_checkpoint)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 1938, in train
return inner_training_loop(
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 2356, in _inner_training_loop
self._maybe_log_save_evaluate(tr_loss, grad_norm, model, trial, epoch, ignore_keys_for_eval)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 2808, in _maybe_log_save_evaluate
self.control = self.callback_handler.on_save(self.args, self.state, self.control)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer_callback.py", line 496, in on_save
return self.call_event("on_save", args, state, control)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer_callback.py", line 507, in call_event
result = getattr(callback, event)(
File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/utils/callback.py", line 20, in on_save
if os.path.exists(state.best_model_checkpoint):
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/genericpath.py", line 19, in exists
os.stat(path)
TypeError: stat: path should be string, bytes, os.PathLike or integer, not NoneType
if args.local_rank == 0 or args.local_rank == -1:
保存效果最好的模型
best_checkpoint_folder = os.path.join(args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-best")
# 因为只保存最新5个检查点,所以要确保不是之前的检查点
if os.path.exists(state.best_model_checkpoint):
if os.path.exists(best_checkpoint_folder):
shutil.rmtree(best_checkpoint_folder)
shutil.copytree(state.best_model_checkpoint, best_checkpoint_folder)
print(f"效果最好的检查点为:{state.best_model_checkpoint},评估结果为:{state.best_metric}")
return control
Traceback (most recent call last): File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/finetune.py", line 171, in
main()
File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/finetune.py", line 156, in main
trainer.train(resume_from_checkpoint=args.resume_from_checkpoint)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 1938, in train
return inner_training_loop(
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 2356, in _inner_training_loop
self._maybe_log_save_evaluate(tr_loss, grad_norm, model, trial, epoch, ignore_keys_for_eval)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer.py", line 2808, in _maybe_log_save_evaluate
self.control = self.callback_handler.on_save(self.args, self.state, self.control)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer_callback.py", line 496, in on_save
return self.call_event("on_save", args, state, control)
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/site-packages/transformers/trainer_callback.py", line 507, in call_event
result = getattr(callback, event)(
File "/home/yunyi/container/whisper_ru/Whisper-Finetune-master/utils/callback.py", line 20, in on_save
if os.path.exists(state.best_model_checkpoint):
File "/home/yunyi/miniconda3/envs/LLM/lib/python3.10/genericpath.py", line 19, in exists
os.stat(path)
TypeError: stat: path should be string, bytes, os.PathLike or integer, not NoneType
if args.local_rank == 0 or args.local_rank == -1:
保存效果最好的模型
这里的state.best_model_checkpoint 在其他地方没有设置,被定为None