Traceback (most recent call last):
File "F:\xiazai\MedicalGPT-main\reward_modeling.py", line 653, in
main()
File "F:\xiazai\MedicalGPT-main\reward_modeling.py", line 447, in main
model = get_peft_model(model, peft_config)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\mapping.py", line 136, in get_peft_model
return MODEL_TYPE_TO_PEFT_MODEL_MAPPING[peft_config.task_type](model, peft_config, adapter_name=adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\peft_model.py", line 883, in init
_set_trainable(self, adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 319, in _set_trainable
new_module = ModulesToSaveWrapper(target, adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 181, in init
self.update(adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 226, in update
remove_hook_from_module(self.modules_to_save[adapter_name])
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\hooks.py", line 193, in remove_hook_from_module
module._hf_hook.detach_hook(module)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\hooks.py", line 388, in detach_hook
set_module_tensor_to_device(module, name, device, value=self.weights_map.get(name, None))
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\utils\modeling.py", line 341, in set_module_tensor_to_device
raise ValueError(f"{tensor_name} is on the meta device, we need a value to put in on {device}.")
ValueError: weight is on the meta device, we need a value to put in on cpu.
Traceback (most recent call last): File "F:\xiazai\MedicalGPT-main\reward_modeling.py", line 653, in
main()
File "F:\xiazai\MedicalGPT-main\reward_modeling.py", line 447, in main
model = get_peft_model(model, peft_config)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\mapping.py", line 136, in get_peft_model
return MODEL_TYPE_TO_PEFT_MODEL_MAPPING[peft_config.task_type](model, peft_config, adapter_name=adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\peft_model.py", line 883, in init
_set_trainable(self, adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 319, in _set_trainable
new_module = ModulesToSaveWrapper(target, adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 181, in init
self.update(adapter_name)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\peft\utils\other.py", line 226, in update
remove_hook_from_module(self.modules_to_save[adapter_name])
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\hooks.py", line 193, in remove_hook_from_module
module._hf_hook.detach_hook(module)
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\hooks.py", line 388, in detach_hook
set_module_tensor_to_device(module, name, device, value=self.weights_map.get(name, None))
File "C:\Users\admin.conda\envs\newrlhf\lib\site-packages\accelerate\utils\modeling.py", line 341, in set_module_tensor_to_device
raise ValueError(f"{tensor_name} is on the meta device, we need a
value
to put in on {device}.") ValueError: weight is on the meta device, we need avalue
to put in on cpu.