ta/slim_star --split validation --percentage 1.0
Traceback (most recent call last):
File "/root/TinyLlama/scripts/prepare_slimpajama.py", line 15, in <module>
import lit_gpt.packed_dataset as packed_dataset
File "/root/TinyLlama/lit_gpt/__init__.py", line 1, in <module>
from lit_gpt.model import GPT
File "/root/TinyLlama/lit_gpt/model.py", line 13, in <module>
from flash_attn import flash_attn_func
File "/root/miniconda3/envs/tiny/lib/python3.10/site-packages/flash_attn/__init__.py", line 3, in <module>
from flash_attn.flash_attn_interface import (
File "/root/miniconda3/envs/tiny/lib/python3.10/site-packages/flash_attn/flash_attn_interface.py", line 8, in <module>
import flash_attn_2_cuda as flash_attn_cuda
ImportError: /root/miniconda3/envs/tiny/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNK3c106SymIntltEl
python -c "from flash_attn import flash_attn_qkvpacked_func, flash_attn_func"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/root/miniconda3/lib/python3.8/site-packages/flash_attn/__init__.py", line 3, in <module>
from flash_attn.flash_attn_interface import (
File "/root/miniconda3/lib/python3.8/site-packages/flash_attn/flash_attn_interface.py", line 8, in <module>
import flash_attn_2_cuda as flash_attn_cuda
ImportError: libcudart.so.11.0: cannot open shared object file: No such file or directory
解决办法:保持一下版本一致,保持依赖不冲突,可以重新安装
CUDA版本 nvidia-smi
CUDA Toolkit版本 nvcc -V
Torch版本,参考:https://pytorch.org/get-started/previous-versions/
FlashAttn版本,参考 :https://github.com/Dao-AILab/flash-attention/releases