申请免费模型api-key
1.选择喜欢的模型,查看详情
2.创建api-key
3.测试api连通
pip install --upgrade openai
import os
from openai import OpenAI
client = OpenAI(
# 若没有配置环境变量,请用百炼API Key将下行替换为:api_key="sk-xxx",
api_key=os.getenv("DASHSCOPE_API_KEY"),
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
)
completion = client.chat.completions.create(
model="qwen-omni-turbo",
messages=[{"role": "user", "content": "你是谁"}],
# 设置输出数据的模态,当前仅支持["text"]
modalities=["text"],
# stream 必须设置为 True,否则会报错
stream=True,
stream_options={"include_usage": True},
)
for chunk in completion:
if chunk.choices:
print(chunk.choices[0].delta)
else:
print(chunk.usage)
使用langchain搭建LLM程序
pip install dashscope
pip install langserve[all]
pip install langchain-community
from fastapi import FastAPI
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_community.llms import Tongyi
import os
from langchain_core.prompts import ChatPromptTemplate
from langserve import add_routes
# 配置langchainSmith
os.environ["LANGSMITH_TRACING_V2"] = "true"
os.environ["LANGSMITH_API_KEY"] = "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"
os.environ["DASHSCOPE_API_KEY"] = "sk-你的key"
# 开始
llm = Tongyi()
parser = StrOutputParser()
# 准备prompt
msg = [
SystemMessage(content="回答问题,用英文回答"),
HumanMessage(content="你是谁")
]
# 定义提示模板
prompt_template = ChatPromptTemplate.from_messages([
("system", "回答问题,用{language}回答"),
("user", "{question}")
])
# 得到链
chain = prompt_template | llm | parser
# 直接调用模型
# print(llm.invoke(msg))
# 直接调用chain
print(chain.invoke({"language": "英文", "question": "你是谁"}))
# 部署api服务
app = FastAPI(title="我的langchain服务",version="0.0.1",description="我的langchain服务")
add_routes(
app,
chain,
path="/chain"
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="localhost", port=8000)
客户端调用案例
from langserve import RemoteRunnable
if __name__ == '__main__':
client = RemoteRunnable(url="http://localhost:8000/chain")
print(client.invoke({'language': "英文", 'question': "你是谁"}))
使用langchain搭建聊天机器人程序
聊天机器人能够进行对话并记住之前的互动
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_community.llms import Tongyi
import os
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory
# 配置langchainSmith
os.environ["LANGSMITH_TRACING_V2"] = "true"
os.environ["LANGSMITH_API_KEY"] = "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"
os.environ["DASHSCOPE_API_KEY"] = "sk-你的key"
# 开始
llm = Tongyi()
# 一个聊天机器人的提示模板案例
prompt_template = ChatPromptTemplate.from_messages([
("system", "你是一个乐于助人的助手,用{language}尽你所能回答所有问题"),
MessagesPlaceholder("my_msg")
])
# 得到链
chain = prompt_template | llm
# 保存历史聊天的记录,所有用户的聊天记录都保存到store中,key:用户sessionID value:聊天记录
store={}
# 此函数预期将接收一个session_id并返回一个消息历史记录对象
def get_session_history(session_id : str):
if session_id not in store:
store[session_id] = ChatMessageHistory()
return store[session_id]
do_message = RunnableWithMessageHistory(
chain,
get_session_history,
input_messages_key="my_msg"# 每次聊天时发送消息的key
)
config = {
"configurable": {'session_id': "123"} # 给当前绘画定义一个sessionId
}
# 第一轮
resp = do_message.invoke(
{
"my_msg": [HumanMessage(content="你好,我是lzl")],
"language": "中文"
},
config=config
)
print(resp)
# 第二轮
resp2 = do_message.invoke(
{
"my_msg": [HumanMessage(content="我是谁")],
"language": "中文"
},
config=config
)
print(resp2)
# 第三轮 返回的数据是流式的(每一次resp都是一个token)
for resp in do_message.stream(
{
"my_msg": [HumanMessage(content="请给我讲一个笑话")],
"language": "中文"
},
config=config
):
# 每一次resp都是一个token
print(resp,end="--")