使用OpenAI 对接deepseek大模型
fastapi 流式StreamingResponse接口
代码:
test_openaiapi.py
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from openai import OpenAI
from pydantic import BaseModel
app = FastAPI()
client = OpenAI(base_url="https://api.deepseek.com", api_key="sk-****")
async def get_openai_generator(prompt: str):
print("##############",prompt)
openai_stream = client.chat.completions.create(
model="deepseek-chat",
messages=[{"role": "user", "content": prompt}],
temperature=0.0,
stream=True,
)
print("##############",openai_stream)
for event in openai_stream:
# if "choices" in event and event["choices"]:
choice = event.choices[0].delta.content
print("##############",choice)
yield f"{choice}\n\n"
@app.get('/stream')
async def stream():
return StreamingResponse(get_openai_generator("你是谁"), media_type='text/event-stream')
不用async也可以,注意接口def aa_stream函数里多个返回不能同时使用yield和return,只能用一个,不然报错或者不返回数据
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from openai import OpenAI
from pydantic import BaseModel
import json
app = FastAPI()
client = OpenAI(base_url="https://api.deepseek.com", api_key="sk-***")
def get_openai_generator(prompt: str):
print("##############",prompt)
openai_stream = client.chat.completions.create(
model="deepseek-chat",
messages=[{"role": "user", "content": prompt}],
temperature=0.0,
stream=True,
)
print("##############",openai_stream)
for event in openai_stream:
# if "choices" in event and event["choices"]:
choice = event.choices[0].delta.content
print("##############",choice)
# yield f"{choice}\n\n"
json_chunk = json.dumps({"content": choice}, ensure_ascii=False)
yield f"{json_chunk}\n".encode('utf-8')
# 定义一个数据模型(Pydantic 模型)
class QueryRequest(BaseModel):
user_query: str # 定义 user_query 参数
@app.post('/stream')
def aa_stream(request: QueryRequest):
user_query = request.user_query
if user_query:
print("query_all:",user_query)
headers ={
"Content-Type": "text/event-stream; charset=utf-8",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
}
return StreamingResponse(get_openai_generator(user_query), media_type='text/event-stream',headers=headers)
else:
pass
运行:
uvicorn test_openaiapi:app --reload
方法requests:
import requests
url = "http://127.0.0.1:8000/stream/"
with requests.get(url, stream=True) as r:
if r.status_code == 200:
for line in r.iter_lines(decode_unicode=True):
if line: # 忽略空行
print(line)