使用镜像:
只需要在下载模型前设置好镜像源即可,在导入transformer等与模型有关的库前运行如下语句:
import os
os.environ['HF_ENDPOINT'] = 'https://hf-mirror.com'
例如
import os
os.environ['HF_ENDPOINT'] = 'https://hf-mirror.com'
from transformers import LlamaTokenizer, LlamaForCausalLM
import os
# 设置模型保存路径
llama_model_path = "/root/autodl-tmp/model/luodian_llama-7b-hf"
# 如果路径不存在,创建路径
os.makedirs(llama_model_path, exist_ok=True)
# 下载并保存 tokenizer 和模型
tokenizer = LlamaTokenizer.from_pretrained("luodian/llama-7b-hf")
tokenizer.save_pretrained(llama_model_path)
model = LlamaForCausalLM.from_pretrained("luodian/llama-7b-hf")
model.save_pretrained(llama_model_path)
print("Model and tokenizer have been saved to:", llama_model_path)