Hello,
I'm tring to compile from source following the instructions. However, when importing huggingface LLAMA2 i got the following error:
File "/opt/cray/pe/python/3.10.10/lib/python3.10/importlib/init.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "", line 1050, in _gcd_import
File "", line 1027, in _find_and_load
File "", line 1006, in _find_and_load_unlocked
File "", line 688, in _load_unlocked
File "", line 883, in exec_module
File "", line 241, in _call_with_frames_removed
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/models/llama/modeling_llama.py", line 55, in
from flash_attn import flash_attn_func, flash_attn_varlen_func
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn/init.py", line 3, in
from flash_attn.flash_attn_interface import flash_attn_func
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn/flash_attn_interface.py", line 4, in
import flash_attn_2_cuda as flash_attn_cuda
ImportError: /lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNSt15__exception_ptr13exception_ptr9_M_addrefEv
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "", line 1, in
File "", line 1075, in _handle_fromlist
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1373, in getattr
value = getattr(module, name)
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1372, in getattr
module = self._get_module(self._class_to_module[name])
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1384, in _get_module
raise RuntimeError(
RuntimeError: Failed to import transformers.models.llama.modeling_llama because of the following error (look up to see its traceback):
/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNSt15__exception_ptr13exception_ptr9_M_addrefEv
Hello, I'm tring to compile from source following the instructions. However, when importing huggingface LLAMA2 i got the following error:
File "/opt/cray/pe/python/3.10.10/lib/python3.10/importlib/init.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "", line 1050, in _gcd_import
File "", line 1027, in _find_and_load
File "", line 1006, in _find_and_load_unlocked
File "", line 688, in _load_unlocked
File "", line 883, in exec_module
File "", line 241, in _call_with_frames_removed
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/models/llama/modeling_llama.py", line 55, in
from flash_attn import flash_attn_func, flash_attn_varlen_func
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn/init.py", line 3, in
from flash_attn.flash_attn_interface import flash_attn_func
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn/flash_attn_interface.py", line 4, in
import flash_attn_2_cuda as flash_attn_cuda
ImportError: /lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNSt15__exception_ptr13exception_ptr9_M_addrefEv
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "", line 1, in
File "", line 1075, in _handle_fromlist
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1373, in getattr
value = getattr(module, name)
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1372, in getattr
module = self._get_module(self._class_to_module[name])
File "/lus/scratch/CT6/cad14770/pcolombo/test_flash/python_environment/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1384, in _get_module
raise RuntimeError(
RuntimeError: Failed to import transformers.models.llama.modeling_llama because of the following error (look up to see its traceback):
/lus/scratch/CT6/cad14770/pcolombo/test_flash/flash-attention/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNSt15__exception_ptr13exception_ptr9_M_addrefEv
Not sure why ?
Do you have any advice ? Best, Pierre