qwen_agent/agent/deep_assistant.py
2025-12-13 20:20:31 +08:00

79 lines
2.7 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import json
from langchain.chat_models import init_chat_model
# from deepagents import create_deep_agent
from langchain.agents import create_agent
from langchain_mcp_adapters.client import MultiServerMCPClient
from utils.fastapi_utils import detect_provider
from .guideline_middleware import GuidelineMiddleware
# Utility functions
def read_system_prompt():
"""读取通用的无状态系统prompt"""
with open("./prompt/system_prompt_default.md", "r", encoding="utf-8") as f:
return f.read().strip()
def read_mcp_settings():
"""读取MCP工具配置"""
with open("./mcp/mcp_settings.json", "r") as f:
mcp_settings_json = json.load(f)
return mcp_settings_json
async def get_tools_from_mcp(mcp):
"""从MCP配置中提取工具"""
# 防御式处理:确保 mcp 是列表且长度大于 0且包含 mcpServers
if not isinstance(mcp, list) or len(mcp) == 0 or "mcpServers" not in mcp[0]:
return []
# 修改 mcp[0]["mcpServers"] 列表,把 type 字段改成 transport
# 如果没有 transport则根据是否存在 url 默认 transport 为 http 或 stdio
for cfg in mcp[0]["mcpServers"].values():
if "type" in cfg:
cfg.pop("type")
if "transport" not in cfg:
cfg["transport"] = "http" if "url" in cfg else "stdio"
# 确保 mcp[0]["mcpServers"] 是字典类型
if not isinstance(mcp[0]["mcpServers"], dict):
return []
try:
mcp_client = MultiServerMCPClient(mcp[0]["mcpServers"])
mcp_tools = await mcp_client.get_tools()
return mcp_tools
except Exception:
# 发生异常时返回空列表,避免上层调用报错
return []
async def init_agent(bot_id: str, model_name="qwen3-next", api_key=None,
model_server=None, generate_cfg=None,
system_prompt=None, mcp=None, robot_type=None, language="jp", user_identifier=None):
system = system_prompt if system_prompt else read_system_prompt()
mcp = mcp if mcp else read_mcp_settings()
mcp_tools = await get_tools_from_mcp(mcp)
# 检测或使用指定的提供商
model_provider,base_url = detect_provider(model_name,model_server)
# 构建模型参数
model_kwargs = {
"model": model_name,
"model_provider": model_provider,
"temperature": 0.8,
"base_url": base_url,
"api_key": api_key
}
if generate_cfg:
model_kwargs.update(generate_cfg)
llm_instance = init_chat_model(**model_kwargs)
agent = create_agent(
model=llm_instance,
system_prompt=system,
tools=mcp_tools,
middleware=[GuidelineMiddleware(bot_id, llm_instance, system, robot_type, language, user_identifier)]
)
return agent