接口代码如下:# !/usr/bin/env python
# -*- coding: utf-8 -*-
# 版权信息:华为技术有限公司,版本所有(C) 2025-2099
"""
功 能:供应链 SCM Agent -- interface/roles/order/supply_manager_assistant_app_langgraph-供应经理助手fastapi接口
"""
import json
import os
import sys
from time import time
import uvicorn
from aipaas.logger_factory import logger
from fastapi import Request, FastAPI
from fastapi.responses import StreamingResponse
from langchain_core.messages import HumanMessage
from infrastructure.auth_fastapi import SoaAuth
from infrastructure.langfuse_telemetery.trace_langgraph import create_langfuse_callback
from scm_agent.src.application.roles.order.supply_manager_assistant_graph.main_graph.graph import \
get_supply_manager_assistant_main_graph
from scm_agent.src.common.agent_name import AgentName
from scm_agent.src.common.constants import Status
from scm_agent.src.infrastructures.agent_config_download.config_download import dowload_agent_config_langgraph
from scm_agent.src.infrastructures.agent_config_read.read_yaml_config import read_project_config
from scm_agent.src.infrastructures.agent_state.agent_state_helper import get_redis_key
from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_output_json
from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_stream_output_langgraph
from scm_agent.src.infrastructures.memory.postgre_checkpointer.postgre_checkpointer import FrameworkAdapter
from scm_agent.src.infrastructures.read_config import app_config
from scm_agent.src.interface.input_output_parameters import SupplyManagerAssistantChatInput, ConfigUpdateInput, \
ConfigUpdateOutput
os.environ['NO_PROXY'] = '127.0.0.1,localhost'
fastapi_app = FastAPI(lifespan=FrameworkAdapter.lifespan_wrapper)
env = os.environ.get("env")
soa = SoaAuth(env_type=env, skip_soa_auth=False, only_check_token=True)
agent_name = AgentName.SupplyManagerAssistantLangGraph
project_config = {}
common_prompt_config = {}
def preload_agent_config(name):
"""
预加载agent配置
Args:
name: 助手/技能名称,app_config中配置
"""
global project_config
global common_prompt_config
dowload_agent_config_langgraph(name)
# 读取项目配置文件
project_config = read_project_config(agent_name, f"{agent_name}.yaml").get(env)
# 读取公共Prompt配置文件
common_prompt_config = read_project_config("common", "prompt_config.yaml")
if 'PYCHARM_HOSTED' in os.environ or 'PYCHARM_DEBUG_PROCESS' in os.environ:
logger.info("debug模式请在此打断点")
# raise Exception("debug模式请在此打断点,注释此行即可")
preload_agent_config(agent_name)
async def generator(graph, supply_manager_assistant_chat_input, initial_state, config):
yield str_to_stream_output_langgraph('<think>')
yield str_to_stream_output_langgraph('**问题**')
question = supply_manager_assistant_chat_input.question.strip()
yield str_to_stream_output_langgraph('\n' + question)
async for chunk in graph.astream(
input=initial_state,
stream_mode="custom",
config=config,
subgraphs=True
):
yield str_to_stream_output_langgraph(chunk[1])
@fastapi_app.post('/roles/supply_manager_assistant_chat_langgraph')
@soa.required
async def supply_manager_assistant_chat(request: Request,
supply_manager_assistant_chat_input: SupplyManagerAssistantChatInput):
# checkpointer = presit_param.get("checkpointer")
checkpointer = request.app.state.presist_param.get("checkpointer")
thread_id = get_redis_key(supply_manager_assistant_chat_input)
user_id = supply_manager_assistant_chat_input.user_id
session_id = supply_manager_assistant_chat_input.session_id
langfuse_callback = create_langfuse_callback(user_id=user_id,
session_id=session_id,
trace_name=AgentName.SupplyManagerAssistantLangGraph)
config = {"configurable": {"thread_id": thread_id},
"metadata": {"user_id": supply_manager_assistant_chat_input.user_id,
"project_config": project_config,
"common_prompt_config": common_prompt_config,
"ctx_params": supply_manager_assistant_chat_input.ctxParams},
"callbacks": [langfuse_callback],
}
try:
graph = get_supply_manager_assistant_main_graph(checkpointer)
initial_state = {"messages": [HumanMessage(content=supply_manager_assistant_chat_input.question)]}
return StreamingResponse(
generator(graph, supply_manager_assistant_chat_input, initial_state, config),
media_type="text/event-stream",
headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}
)
except Exception as e:
return str_to_output_json(f'处理异常,异常原因: {e}')
@fastapi_app.post('/config_update')
@soa.required
async def config_update(request: Request, config_update_input: ConfigUpdateInput):
start_time = time()
config_update_output = ConfigUpdateOutput()
try:
preload_agent_config(config_update_input.agent_name)
config_update_output.status = Status.SUCCESS
except Exception as e:
config_update_output.error_message = "[SCM-Agent] Update config error."
# 耗时统计
config_update_output.elapsed_time = str(time() - start_time)
return config_update_output.to_dict()
@fastapi_app.get('/health')
@soa.required
async def health(request: Request, ):
return json.dumps({"success": True}, ensure_ascii=False)
if __name__ == '__main__':
uvicorn.run("supply_manager_assistant_app_langgraph:fastapi_app",
host=app_config.get('host', '0.0.0.0'),
# port=app_config.get('port', 8080),
loop="asyncio",
port=8080)
# workers=app_config.get('workers', 4))
postgre_checkpointer.py目前没问题了,但是现在接口代码报错如下:ERROR: Exception in ASGI application
+ Exception Group Traceback (most recent call last):
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\uvicorn\protocols\http\httptools_impl.py", line 426, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 84, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\fastapi\applications.py", line 1054, in __call__
| await super().__call__(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\applications.py", line 123, in __call__
| await self.middleware_stack(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\errors.py", line 186, in __call__
| raise exc
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\errors.py", line 164, in __call__
| await self.app(scope, receive, _send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\middleware\exceptions.py", line 65, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 756, in __call__
| await self.middleware_stack(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 776, in app
| await route.handle(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 297, in handle
| await self.app(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 77, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\routing.py", line 75, in app
| await response(scope, receive, send)
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 258, in __call__
| async with anyio.create_task_group() as task_group:
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\anyio\_backends\_asyncio.py", line 772, in __aexit__
| raise BaseExceptionGroup(
| ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 261, in wrap
| await func()
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\starlette\responses.py", line 250, in stream_response
| async for chunk in self.body_iterator:
| File "D:\code\iscp-app-aigc-ai\scm_agent\src\interface\roles\order\supply_manager_assistant_app_langgraph.py", line 71, in generator
| async for chunk in graph.astream(
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\pregel\main.py", line 2883, in astream
| async with AsyncPregelLoop(
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\pregel\_loop.py", line 1186, in __aenter__
| saved = await self.checkpointer.aget_tuple(self.checkpoint_config)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\checkpoint\postgres\aio.py", line 192, in aget_tuple
| async with self._cursor() as cur:
| File "C:\Users\zwx1453293\AppData\Local\Programs\Python\Python311\Lib\contextlib.py", line 204, in __aenter__
| return await anext(self.gen)
| ^^^^^^^^^^^^^^^^^^^^^
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\langgraph\checkpoint\postgres\aio.py", line 388, in _cursor
| async with conn.cursor(binary=True, row_factory=dict_row) as cur:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\psycopg\connection_async.py", line 256, in cursor
| self._check_connection_ok()
| File "D:\code\iscp-app-aigc-ai\.venv\Lib\site-packages\psycopg\_connection_base.py", line 528, in _check_connection_ok
| raise e.OperationalError("the connection is closed")
| psycopg.OperationalError: the connection is closed
+------------------------------------
之前接口代码是没问题的
最新发布