0 环境准备
- ollama已部署推理模型qwen:7b(deepseek目前不支持function calling)
- 已安装miniconda环境
- 具备科学上网条件
1 开发环境准备
1.1 创建项目python环境
通过conda命令创建项目的python开发环境
conda create -n mcp_demo python=3.10
1.2 在pycharm创建项目
在pycharm创建mcp_demo项目,并在环境中选择上一步中创建的python环境。
- 解释器类型:选择自定义环境
- 环境:选择现有
- 类型:选择conda
- 环境:选择上一步创建的环境
1.3 安装项目依赖
pip install mcp openai python-dotenv
2 依赖的资源准备
本项目中需要调用天气接口获取城市的天气信息,因此需要注册天气API网站,并且获取调用key。
2.1 访问openweather
Current weather and forecast - OpenWeatherMap
点击以上连接,使用邮箱注册账号,过程简单,此处不做赘述。
2.2 进入apikey页面
按上图所示点击进入my api keys
2.3 生成apikey
按上图所示,在create key列中输入learn,然后点击generate按钮,就会生成一个名字为learn的key,后续使用就是这个key。如下图:
3 程序逻辑实现
mcp是c/s架构,客户端和服务端是一一对应的。因此在实现基于MCP协议工具调用时,要分别实现客户端和服务端。
3.1 服务端逻辑实现
3.1.1 新建weather_server.py文件
3.1.2 导入相关依赖包
import json
import httpx
from mcp.server import FastMCP
3.1.3 初始化MCP服务
mcp = FastMCP("WeatherServer")
3.1.4 定义openweather配置
OPENWEATHER_API_KEY = "5082d4XXX163cb"
OPENWEATHER_BASE_URL = "https://api.openweathermap.org/data/2.5/weather"
USER_AGENT = "weather-app/1.0"
3.1.5 定义获取城市天气方法
async def get_weather(city):
"""
从OpenWeather API 获取天气信息
:param city: 城市名称(需要试用英文,如 beijing)
:return: 天气数据字典;若发生错误,返回包含error信息的字典
"""
params = {
"q": city,
"appid": OPENWEATHER_API_KEY,
"units": "metric",
"lang": "zh_cn",
}
headers = {"User-Agent": USER_AGENT}
async with httpx.AsyncClient() as client:
try:
response = await client.get(OPENWEATHER_BASE_URL, params=params, headers=headers, timeout=30)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
return {"error": f"HTTP请求错误:{e}"}
except Exception as e:
return {"error": f"发生错误:{e}"}
3.1.6 定义格式话天气数据方法
def format_weather_data(data):
"""
格式化天气数据
:param data: 天气数据字典
:return: 格式化后的字符串;若发生错误,返回包含error信息的字符串
"""
# 如果传入的是字符串,则先转换成字典
if isinstance(data, str):
data = json.loads(data)
if "error" in data:
return data["error"]
weather = data["weather"][0]["description"]
temperature = data["main"]["temp"]
city = data["name"]
country = data["sys"]["country"]
humidity = data["main"]["humidity"]
wind = data["wind"]["speed"]
return f"城市:{city}, {country}\n天气:{weather}\n温度:{temperature}°C\n湿度:{humidity}%\n风速:{wind}m/s"
3.1.7 定义为MCP工具
@mcp.tool()
async def get_weather_tool(city: str):
"""
获取城市的天气信息
:param city: 城市名称(需要试用英文,如 beijing)
:return: 天气数据字典;若发生错误,返回包含error信息的字典
"""
weather_data = await get_weather(city)
return format_weather_data(weather_data)
3.1.8 定义main方法
if __name__ == "__main__":
mcp.run(transport="stdio")
3.2 客户端逻辑实现
3.2.1 新建weather_client文件
3.2.2 导入相关依赖包
import asyncio
import json
import os
import sys
from contextlib import AsyncExitStack
from typing import Optional
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters, stdio_client
from openai import OpenAI
3.2.3 格式化编码环境
sys.stdout.reconfigure(encoding='utf-8')
os.environ['PYTHONIOENCODING'] = 'utf-8'
3.2.4 加载ollama配置
load_dotenv()
3.2.5 定义配置文件
新建.env文件,并填入ollama相关配置
BASE_URL=http://localhost:11434/v1/
MODEL=qwen2.5:7b
OPENAI_API_KEY=ollama
3.2.6 定义MCPclient类
class MCPClient:
3.2.6.1 定义初始化方法
def __init__(self):
"""初始化MCP客户端"""
self.exit_stack = AsyncExitStack()
self.openai_api_key = os.getenv('OPENAI_API_KEY')
self.base_url = os.getenv('BASE_URL')
self.model = os.getenv('MODEL')
if not self.openai_api_key:
raise ValueError("❌未找到OpenAI API Key,请在.env文件中设置OPENAI_API_KEY")
self.client = OpenAI(api_key=self.openai_api_key, base_url=self.base_url)
self.session: Optional[ClientSession] = None
3.2.6.2 定义连接至服务器方法
async def connect_to_server(self, server_script_path):
"""连接到MCP服务器"""
is_python = server_script_path.endswith('.py')
is_js = server_script_path.endswith('.js')
if not is_python and not is_js:
raise ValueError("❌不支持的脚本类型,请使用Python或JavaScript脚本")
command = "python" if is_python else "node"
server_params = StdioServerParameters(
command=command,
args=[server_script_path],
env=None,
)
# 启动 MCP 服务器并建立通信
stdio_transport = await self.exit_stack.enter_async_context(
stdio_client(server_params)
)
self.stdio, self.write = stdio_transport
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
await self.session.initialize()
# 列出 MCP服务器上的工具
response = await self.session.list_tools()
tools = response.tools
print("\n已连接到服务器,支持以下工具:", [tool.name for tool in tools])
3.2.6.3 定义查询方法
async def process_query(self, query):
"""
使用大模型处理查询并调用可用的 MCP 工具 (Function Calling)
"""
messages = [{"role": "system", "content": "你是一个智能助手,帮助用户回答问题。"},
{"role": "user", "content": query}]
response = await self.session.list_tools()
available_tools = [{
"type": "function",
"function": {
"name": tool.name,
"description": tool.description,
"input_schema": tool.inputSchema,
}
} for tool in response.tools]
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
tools=available_tools
)
content = response.choices[0]
if content.finish_reason == "tool_calls":
tool_call = content.message.tool_calls[0]
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
# 执行工具
result = await self.session.call_tool(function_name, function_args)
print(f"\n\n[Calling tool {function_name} with args {function_args}]\n\n")
# 将模型返回的调用哪个工具数据和工具执行完成后的数据都存入messages中
result_content = result.content[0].text
messages.append(content.message.model_dump())
messages.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": result_content,
})
# 将上面的结果再返回给大模型用于生产最终的结果
return result_content
return content.message.content.strip()
3.2.6.4 定义问答方法
async def chat_loop(self):
"""运行交互式聊天循环"""
print("✅MCP 客户端已启动!输入 'quit' 退出")
while True:
try:
query = input("输入你的问题:").strip()
if query.lower() == 'quit':
break
response = await self.process_query(query)
print(f"openai:{response}")
except Exception as e:
print(f"发生错误:{e}")
3.2.6.5 定义连接关闭方法
async def cleanup(self):
"""清理资源"""
await self.exit_stack.aclose()
3.2.7 定义主流程方法
async def main():
if len(sys.argv) < 2:
print("请提供 MCP 服务器脚本路径作为参数")
sys.exit(1)
client = MCPClient()
try:
await client.connect_to_server(sys.argv[1])
await client.chat_loop()
finally:
await client.cleanup()
3.2.8 定义main方法
if __name__ == "__main__":
asyncio.run(main())
4 测试验证
4.1 运行client方法
注意在运行参数里填写server脚本。如果不在同一目录,写路径全名
4.2 输入问题
4.3 查看回答结果
5 完整代码
5.1 服务端完整代码
import json
import httpx
from mcp.server import FastMCP
mcp = FastMCP("WeatherServer")
OPENWEATHER_API_KEY = "5082d4XXXXX51163cb"
OPENWEATHER_BASE_URL = "https://api.openweathermap.org/data/2.5/weather"
USER_AGENT = "weather-app/1.0"
async def get_weather(city):
"""
从OpenWeather API 获取天气信息
:param city: 城市名称(需要试用英文,如 beijing)
:return: 天气数据字典;若发生错误,返回包含error信息的字典
"""
params = {
"q": city,
"appid": OPENWEATHER_API_KEY,
"units": "metric",
"lang": "zh_cn",
}
headers = {"User-Agent": USER_AGENT}
async with httpx.AsyncClient() as client:
try:
response = await client.get(OPENWEATHER_BASE_URL, params=params, headers=headers, timeout=30)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
return {"error": f"HTTP请求错误:{e}"}
except Exception as e:
return {"error": f"发生错误:{e}"}
def format_weather_data(data):
"""
格式化天气数据
:param data: 天气数据字典
:return: 格式化后的字符串;若发生错误,返回包含error信息的字符串
"""
# 如果传入的是字符串,则先转换成字典
if isinstance(data, str):
data = json.loads(data)
if "error" in data:
return data["error"]
weather = data["weather"][0]["description"]
temperature = data["main"]["temp"]
city = data["name"]
country = data["sys"]["country"]
humidity = data["main"]["humidity"]
wind = data["wind"]["speed"]
return f"城市:{city}, {country}\n天气:{weather}\n温度:{temperature}°C\n湿度:{humidity}%\n风速:{wind}m/s"
@mcp.tool()
async def get_weather_tool(city: str):
"""
获取城市的天气信息
:param city: 城市名称(需要试用英文,如 beijing)
:return: 天气数据字典;若发生错误,返回包含error信息的字典
"""
weather_data = await get_weather(city)
return format_weather_data(weather_data)
if __name__ == "__main__":
mcp.run(transport="stdio")
5.2 客户端完整代码
import asyncio
import json
import os
import sys
from contextlib import AsyncExitStack
from typing import Optional
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters, stdio_client
from openai import OpenAI
sys.stdout.reconfigure(encoding='utf-8')
os.environ['PYTHONIOENCODING'] = 'utf-8'
load_dotenv()
class MCPClient:
def __init__(self):
"""初始化MCP客户端"""
self.exit_stack = AsyncExitStack()
self.openai_api_key = os.getenv('OPENAI_API_KEY')
self.base_url = os.getenv('BASE_URL')
self.model = os.getenv('MODEL')
if not self.openai_api_key:
raise ValueError("❌未找到OpenAI API Key,请在.env文件中设置OPENAI_API_KEY")
self.client = OpenAI(api_key=self.openai_api_key, base_url=self.base_url)
self.session: Optional[ClientSession] = None
async def connect_to_server(self, server_script_path):
"""连接到MCP服务器"""
is_python = server_script_path.endswith('.py')
is_js = server_script_path.endswith('.js')
if not is_python and not is_js:
raise ValueError("❌不支持的脚本类型,请使用Python或JavaScript脚本")
command = "python" if is_python else "node"
server_params = StdioServerParameters(
command=command,
args=[server_script_path],
env=None,
)
# 启动 MCP 服务器并建立通信
stdio_transport = await self.exit_stack.enter_async_context(
stdio_client(server_params)
)
self.stdio, self.write = stdio_transport
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
await self.session.initialize()
# 列出 MCP服务器上的工具
response = await self.session.list_tools()
tools = response.tools
print("\n已连接到服务器,支持以下工具:", [tool.name for tool in tools])
async def process_query(self, query):
"""
使用大模型处理查询并调用可用的 MCP 工具 (Function Calling)
"""
messages = [{"role": "system", "content": "你是一个智能助手,帮助用户回答问题。"},
{"role": "user", "content": query}]
response = await self.session.list_tools()
available_tools = [{
"type": "function",
"function": {
"name": tool.name,
"description": tool.description,
"input_schema": tool.inputSchema,
}
} for tool in response.tools]
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
tools=available_tools
)
content = response.choices[0]
if content.finish_reason == "tool_calls":
tool_call = content.message.tool_calls[0]
function_name = tool_call.function.name
function_args = json.loads(tool_call.function.arguments)
# 执行工具
result = await self.session.call_tool(function_name, function_args)
print(f"\n\n[Calling tool {function_name} with args {function_args}]\n\n")
# 将模型返回的调用哪个工具数据和工具执行完成后的数据都存入messages中
result_content = result.content[0].text
messages.append(content.message.model_dump())
messages.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": result_content,
})
# 将上面的结果再返回给大模型用于生产最终的结果
return result_content
return content.message.content.strip()
async def chat_loop(self):
"""运行交互式聊天循环"""
print("✅MCP 客户端已启动!输入 'quit' 退出")
while True:
try:
query = input("输入你的问题:").strip()
if query.lower() == 'quit':
break
response = await self.process_query(query)
print(f"openai:{response}")
except Exception as e:
print(f"发生错误:{e}")
async def cleanup(self):
"""清理资源"""
await self.exit_stack.aclose()
async def main():
if len(sys.argv) < 2:
print("请提供 MCP 服务器脚本路径作为参数")
sys.exit(1)
client = MCPClient()
try:
await client.connect_to_server(sys.argv[1])
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == "__main__":
asyncio.run(main())
附录
项目结构
问题一:报UnicodeDecodeError: 'utf-8' codec can't decode byte 0xc7 in position 0: invalid continuation byte
- 检查是否代码中是否设置了标准输入输出编码格式
- 检查项目设置中的编码是不是utf-8
问题二:openai:发生错误:All connection attempts failed
访问openweather时网络问题,请尝试科学上网。