from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
hf = HuggingFacePipeline.from_model_id(
model_id="gpt2",
task="text-generation",
pipeline_kwargs={"max_new_tokens": 50},
)
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
from langchain.prompts import PromptTemplate
template = """Question: {question}
Answer: Let's think step by step."""
prompt = PromptTemplate.from_template(template)
chain = prompt | hf
question = "What is the result of 1+ 1?"
print(chain.invoke({"question": question}))
# Use a pipeline as a high-level helper
!pip install langchain_community langchain
from transformers import pipeline
from langchain_community.llms import HuggingFacePipeline
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
from langchain.prompts import PromptTemplate
pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta")
hf = HuggingFacePipeline(pipeline=pipe)
template = """Question: {question}
Answer: Let's think step by step."""
prompt = PromptTemplate.from_template(template)
chain = prompt | hf
question = "What is the result of 1+ 1?"
print(chain.invoke({"question": question}))
以下使用清华的大模型GPU运行:
from transformers import pipeline
from langchain_community.llms import HuggingFacePipeline
import time
hf = HuggingFacePipeline.from_model_id(
model_id="THUDM/chatglm3-6b",
task="text-generation",
device=0,
model_kwargs={"trust_remote_code":True},
pipeline_kwargs={"max_new_tokens": 500},
)
for i in range(10):
a = time.time()
print(hf.invoke("西游记中描写了哪些人物"))
print(time.time()-a)
也可以作为链式来调用.
from transformers import pipeline
from langchain_community.llms import HuggingFacePipeline
import time
from langchain.prompts import PromptTemplate
hf = HuggingFacePipeline.from_model_id(
model_id="THUDM/chatglm3-6b",
task="text-generation",
device=0,
model_kwargs={"trust_remote_code":True},
pipeline_kwargs={"max_new_tokens": 500},
)
template = """{question}"""
prompt = PromptTemplate.from_template(template)
chain = prompt | hf
question = "西游记中描写了哪些人物?"
for i in range(10):
a = time.time()
print(chain.invoke({"question": question}))
print(time.time()-a)