File "/home/wenxuan/lihaijie_files/cpm-live/examples/tune_cpm_ant.py", line 56, in
delta_model.freeze_module(exclude=["deltas"], set_state_dict=True)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 274, in freeze_module
self._freeze_module_recursive(module, exclude, "") # modify the active state dict that still need grad
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 316, in _freeze_module_recursive
self._freeze_module_recursive(c, exclude=exclude, prefix=next_prefix)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 316, in _freeze_module_recursive
self._freeze_module_recursive(c, exclude=exclude, prefix=next_prefix)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 304, in _freeze_module_recursive
p.requires_grad = False
RuntimeError: you can only change requires_grad flags of leaf variables. If you want to use a computed variable in a subgraph that doesn't require differentiation use var_no_grad = var.detach().
File "/home/wenxuan/lihaijie_files/cpm-live/examples/tune_cpm_ant.py", line 56, in
delta_model.freeze_module(exclude=["deltas"], set_state_dict=True)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 274, in freeze_module
self._freeze_module_recursive(module, exclude, "") # modify the active state dict that still need grad
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 316, in _freeze_module_recursive
self._freeze_module_recursive(c, exclude=exclude, prefix=next_prefix)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 316, in _freeze_module_recursive
self._freeze_module_recursive(c, exclude=exclude, prefix=next_prefix)
File "/home/wenxuan/miniconda3/envs/lhj/lib/python3.9/site-packages/opendelta/basemodel.py", line 304, in _freeze_module_recursive
p.requires_grad = False
RuntimeError: you can only change requires_grad flags of leaf variables. If you want to use a computed variable in a subgraph that doesn't require differentiation use var_no_grad = var.detach().