from fastapi import FastAPI
from fastapi.responses import StreamingResponse
import asyncio
import time
from pydantic import BaseModel
app = FastAPI()# 定义一个Pydantic模型来描述JSON数据的结构classItem(BaseModel):
msg:str@app.post("/stream")asyncdefstream(item: Item):asyncdefevent_stream():whileTrue:# 模拟一个事件yield'{} data: {}\n\n'.format(item.msg, time.ctime())# 立即发送事件,而不是等待缓冲await asyncio.sleep(2)# 每2秒发送一次事件return StreamingResponse(event_stream(), media_type="text/event-stream")if __name__ =="__main__":import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
代码客户端
import requests
import json
# SSE URL
url ='http://localhost:8000/stream'# JSON payload
payload ={'msg':'你好'}# Headers for SSE request
headers ={'Content-Type':'application/json','Accept':'text/event-stream'}# Send SSE request
response = requests.post(url,
data=json.dumps(payload),
headers=headers,
stream=True)# Check if the request was successfulif response.status_code ==200:# Process the SSE streamfor line in response.iter_lines():if line:
decoded_line = line.decode('utf-8')print(decoded_line)else:print(f"Request failed with status code: {response.status_code}")# Close the connection
response.close()
webui客户端
import random
import gradio as gr
import requests
import json
"""
用户输入后的回调函数 random_response
参数:
message: 用户此次输入的消息
history: 历史聊天记录,比如 [["use input 1", "assistant output 1"],["user input 2", "assistant output 2"]]
返回值:输出的内容
"""defrandom_response(message, history):return random.choice(["Yes","No"])defchat(message, history):# SSE URL
url ='http://localhost:8000/stream'# JSON payload
payload ={'msg':'你好'}# Headers for SSE request
headers ={'Content-Type':'application/json','Accept':'text/event-stream'}# Send SSE request
response = requests.post(url,
data=json.dumps(payload),
headers=headers,
stream=True)# Check if the request was successfulif response.status_code ==200:# Process the SSE streamfor line in response.iter_lines():if line:
decoded_line = line.decode('utf-8')yield decoded_line
else:print(f"Request failed with status code: {response.status_code}")# Close the connection
response.close()
gr.ChatInterface(chat).launch(server_name='0.0.0.0',
server_port=8001)