Closed apachemycat closed 1 year ago
apply_lora(args.base_model_path, args.target_model_path, args.lora_path, args.save_target_model)
def apply_lora( base_model_path: str, lora_path: str, load_8bit: bool = False, target_model_path: str = None, save_target_model: bool = False ) -> Tuple[AutoModelForCausalLM, AutoTokenizer]: 两个变量的位置错了
非常感谢,bug 已修复
apply_lora(args.base_model_path, args.target_model_path, args.lora_path, args.save_target_model)
def apply_lora( base_model_path: str, lora_path: str, load_8bit: bool = False, target_model_path: str = None, save_target_model: bool = False ) -> Tuple[AutoModelForCausalLM, AutoTokenizer]: 两个变量的位置错了