import transformers
import torch
pipeline = transformers.pipeline(
task="text-generation",
model="/root/models/Meta-Llama-3-8B-instruct", # "/root/models/Meta-Llama-3-8B",
model_kwargs={"torch_dtype": torch.bfloat16},
device="cuda",
)
print(pipeline("Hey how are you doing today?"))
Transform调用LLaMA
于 2024-07-01 17:35:46 首次发布