1、创建InternLM虚拟环境,并查看python的版本
conda create -n InternLM --clone /share/conda_envs/internlm-base
conda activate InternLM
>>> import torch
>>> print(torch.__version__)
2.0.1
>>> exit()
2、升级PIP并打补丁
# 升级pip python -m pip install --upgrade pip pip install modelscope==1.9.5 transformers==4.35.2 streamlit==1.24.0 sentencepiece==0.1.99 accelerate==0.24.1
3、LangChain 相关环境配置
pip install langchain==0.0.292 gradio==4.4.0 chromadb==0.4.15 sentence-transformers==2.2.2 unstructured==0.10.30 markdown==3.3.7
4、 import os
# 设置环境变量
os.environ['HF_ENDPOINT'] = 'https://hf-mirror.com'
# 下载模型
os.system('huggingface-cli download --resume-download sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 --local-dir /root/data/model/sentence-transformer')
# 进入到数据库盘 cd /root/data # clone 上述开源仓库 git clone https://gitee.com/open-compass/opencompass.git && git clone https://gitee.com/InternLM/lmdeploy.git && git clone https://gitee.com/InternLM/xtuner.git && git clone https://gitee.com/InternLM/InternLM-XComposer.git && git clone https://gitee.com/InternLM/lagent.git && git clone https://gitee.com/InternLM/InternLM.git