Merge branch 'developing' into bot_manager

This commit is contained in:
朱潮 2026-04-11 21:02:18 +08:00
commit ccff7be046
4 changed files with 77 additions and 25 deletions

View File

@ -45,10 +45,11 @@ from langchain_core.language_models import BaseChatModel
from langgraph.pregel import Pregel from langgraph.pregel import Pregel
# 新版本导入MemoryMiddleware 和 SkillsMiddleware 已迁移到 deepagents.middleware # 新版本导入MemoryMiddleware 和 SkillsMiddleware 已迁移到 deepagents.middleware
from deepagents.middleware import MemoryMiddleware, SkillsMiddleware from deepagents.middleware import MemoryMiddleware, SkillsMiddleware
from langchain.agents.middleware import AgentMiddleware from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse
from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware
from langchain_core.messages import AIMessage, HumanMessage
from langgraph.types import Checkpointer from langgraph.types import Checkpointer
from deepagents_cli.config import settings, get_default_coding_instructions from deepagents_cli.config import settings, get_default_coding_instructions
from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware
from deepagents.middleware.filesystem import FilesystemMiddleware from deepagents.middleware.filesystem import FilesystemMiddleware
from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware
from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware
@ -63,6 +64,51 @@ from .custom_filesystem_middleware import CustomFilesystemMiddleware
# _global_checkpointer = MemorySaver() # _global_checkpointer = MemorySaver()
logger = logging.getLogger('app') logger = logging.getLogger('app')
class EmptyResponseRetryMiddleware(AgentMiddleware):
"""当模型返回空内容(无文字、无工具调用)时自动重试"""
MAX_RETRIES = 5
def _is_empty_response(self, result: ModelResponse) -> bool:
"""判断是否为空响应"""
if not result.result:
return True
msg = result.result[0]
if not isinstance(msg, AIMessage):
return False
content = msg.content or ""
has_text = bool(content.strip()) if isinstance(content, str) else bool(content)
return not has_text and len(msg.tool_calls) == 0
def wrap_model_call(self, request, handler):
result = handler(request)
retries = 0
while self._is_empty_response(result) and retries < self.MAX_RETRIES:
retries += 1
logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})")
retry_messages = list(request.messages) + [
HumanMessage(content="Please continue your response.")
]
request = request.override(messages=retry_messages)
result = handler(request)
return result
async def awrap_model_call(self, request, handler):
result = await handler(request)
retries = 0
while self._is_empty_response(result) and retries < self.MAX_RETRIES:
retries += 1
logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})")
retry_messages = list(request.messages) + [
HumanMessage(content="Please continue your response.")
]
request = request.override(messages=retry_messages)
result = await handler(request)
return result
# Utility functions # Utility functions
def read_system_prompt(): def read_system_prompt():
"""读取通用的无状态系统prompt""" """读取通用的无状态系统prompt"""
@ -220,6 +266,8 @@ async def init_agent(config: AgentConfig):
# 构建中间件列表 # 构建中间件列表
middleware = [] middleware = []
# 添加空响应重试中间件(最先执行,最外层包裹)
middleware.append(EmptyResponseRetryMiddleware())
# 首先添加 ToolUseCleanupMiddleware 来清理孤立的 tool_use # 首先添加 ToolUseCleanupMiddleware 来清理孤立的 tool_use
middleware.append(ToolUseCleanupMiddleware()) middleware.append(ToolUseCleanupMiddleware())
# 添加工具输出长度控制中间件 # 添加工具输出长度控制中间件
@ -407,22 +455,22 @@ def create_custom_cli_agent(
# Use LocalShellBackend for filesystem + shell execution # Use LocalShellBackend for filesystem + shell execution
backend = LocalShellBackend( backend = LocalShellBackend(
root_dir=workspace_root, root_dir=workspace_root,
virtual_mode=True, virtual_mode=False,
inherit_env=True, inherit_env=True,
env=final_shell_env, env=final_shell_env,
) )
else: else:
# No shell access - use plain FilesystemBackend # No shell access - use plain FilesystemBackend
backend = FilesystemBackend(root_dir=workspace_root, virtual_mode=True) backend = FilesystemBackend(root_dir=workspace_root, virtual_mode=False)
# Set up composite backend with routing (参考新版本实现) # Set up composite backend with routing (参考新版本实现)
large_results_backend = FilesystemBackend( large_results_backend = FilesystemBackend(
root_dir=tempfile.mkdtemp(prefix="deepagents_large_results_"), root_dir=tempfile.mkdtemp(prefix="deepagents_large_results_"),
virtual_mode=True, virtual_mode=False,
) )
conversation_history_backend = FilesystemBackend( conversation_history_backend = FilesystemBackend(
root_dir=tempfile.mkdtemp(prefix="deepagents_conversation_history_"), root_dir=tempfile.mkdtemp(prefix="deepagents_conversation_history_"),
virtual_mode=True, virtual_mode=False,
) )
composite_backend = CompositeBackend( composite_backend = CompositeBackend(
default=backend, default=backend,
@ -438,7 +486,7 @@ def create_custom_cli_agent(
agent_middleware.append( agent_middleware.append(
CustomSkillsMiddleware( CustomSkillsMiddleware(
backend=FilesystemBackend(root_dir=workspace_root, virtual_mode=True), backend=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
sources=skills_sources, sources=skills_sources,
) )
) )

View File

@ -41,16 +41,20 @@ class LoggingCallbackHandler(BaseCallbackHandler):
if hasattr(response, 'generations') and response.generations: if hasattr(response, 'generations') and response.generations:
for gen_idx, generation_list in enumerate(response.generations): for gen_idx, generation_list in enumerate(response.generations):
for msg_idx, generation in enumerate(generation_list): for msg_idx, generation in enumerate(generation_list):
if hasattr(generation, 'text'): # ChatGeneration: 使用 text 属性获取内容
output_list = generation.text.split("\n") if hasattr(generation, 'text') and generation.text:
for i, output in enumerate(output_list): for line in generation.text.split("\n"):
if output.strip(): if line.strip():
self.logger.info(f"{output}") self.logger.info(f" {line}")
elif hasattr(generation, 'message'): # 如果有 message 属性输出额外信息tool_calls 等)
output_list = generation.message.split("\n") if hasattr(generation, 'message') and generation.message:
for i, output in enumerate(output_list): msg = generation.message
if output.strip(): content = msg.content if hasattr(msg, 'content') else ''
self.logger.info(f"{output}") if not content or (isinstance(content, str) and not content.strip()):
self.logger.info(f" [EMPTY content]")
tool_calls = msg.tool_calls if hasattr(msg, 'tool_calls') else []
if tool_calls:
self.logger.info(f" [tool_calls: {[tc.get('name', '') for tc in tool_calls]}")
def on_llm_error( def on_llm_error(
self, error: Exception, **kwargs: Any self, error: Exception, **kwargs: Any

View File

@ -60,7 +60,7 @@ from deepagents.backends import LocalShellBackend
# 创建 backend支持自定义环境变量 # 创建 backend支持自定义环境变量
backend = LocalShellBackend( backend = LocalShellBackend(
root_dir=workspace_root, root_dir=workspace_root,
virtual_mode=True, virtual_mode=False,
env={"ASSISTANT_ID": "xxx", "USER_IDENTIFIER": "yyy"}, # 自定义环境变量 env={"ASSISTANT_ID": "xxx", "USER_IDENTIFIER": "yyy"}, # 自定义环境变量
inherit_env=True, # 继承父进程环境变量 inherit_env=True, # 继承父进程环境变量
) )
@ -92,7 +92,7 @@ backend = LocalShellBackend(
```python ```python
# 当前实现 # 当前实现
composite_backend = CompositeBackend( composite_backend = CompositeBackend(
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=True), default=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
routes={}, routes={},
) )
``` ```
@ -120,14 +120,14 @@ from deepagents.backends import LocalShellBackend
# 创建带自定义环境变量的 backend # 创建带自定义环境变量的 backend
shell_backend = LocalShellBackend( shell_backend = LocalShellBackend(
root_dir=workspace_root, root_dir=workspace_root,
virtual_mode=True, virtual_mode=False,
env=shell_env, env=shell_env,
inherit_env=True, inherit_env=True,
) )
# 或使用 CompositeBackend 路由 # 或使用 CompositeBackend 路由
composite_backend = CompositeBackend( composite_backend = CompositeBackend(
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=True), default=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
routes={ routes={
"/shell/": shell_backend, # shell 命令路由 "/shell/": shell_backend, # shell 命令路由
}, },
@ -209,7 +209,7 @@ if enable_shell:
final_shell_env = shell_env or {} final_shell_env = shell_env or {}
shell_backend = LocalShellBackend( shell_backend = LocalShellBackend(
root_dir=workspace_root, root_dir=workspace_root,
virtual_mode=True, virtual_mode=False,
env=final_shell_env, env=final_shell_env,
inherit_env=True, # 继承 os.environ inherit_env=True, # 继承 os.environ
) )

View File

@ -512,7 +512,7 @@ async def chat_completions(request: ChatRequest, authorization: Optional[str] =
project_dir = create_project_directory(request.dataset_ids, bot_id, request.skills) project_dir = create_project_directory(request.dataset_ids, bot_id, request.skills)
# 收集额外参数作为 generate_cfg # 收集额外参数作为 generate_cfg
exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills', 'enable_memory', 'n', 'shell_env'} exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills', 'enable_memory', 'n', 'shell_env', 'max_tokens'}
generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields} generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields}
# 处理消息 # 处理消息
messages = process_messages(request.messages, request.language) messages = process_messages(request.messages, request.language)
@ -666,7 +666,7 @@ async def chat_warmup_v2(request: ChatRequestV2, authorization: Optional[str] =
messages = process_messages(empty_messages, request.language or "ja") messages = process_messages(empty_messages, request.language or "ja")
# 收集额外参数作为 generate_cfg # 收集额外参数作为 generate_cfg
exclude_fields = {'messages', 'stream', 'tool_response', 'bot_id', 'language', 'user_identifier', 'session_id', 'n', 'model', 'model_server', 'api_key', 'shell_env'} exclude_fields = {'messages', 'stream', 'tool_response', 'bot_id', 'language', 'user_identifier', 'session_id', 'n', 'model', 'model_server', 'api_key', 'shell_env', 'max_tokens'}
generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields} generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields}
# 从请求中提取 model/model_server/api_key优先级高于 bot_config排除 "whatever" 和空值) # 从请求中提取 model/model_server/api_key优先级高于 bot_config排除 "whatever" 和空值)
req_data = request.model_dump() req_data = request.model_dump()
@ -773,7 +773,7 @@ async def chat_completions_v2(request: ChatRequestV2, authorization: Optional[st
# 处理消息 # 处理消息
messages = process_messages(request.messages, request.language) messages = process_messages(request.messages, request.language)
# 收集额外参数作为 generate_cfg # 收集额外参数作为 generate_cfg
exclude_fields = {'messages', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings', 'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills', 'enable_memory', 'n', 'model', 'model_server', 'api_key', 'shell_env'} exclude_fields = {'messages', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings', 'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills', 'enable_memory', 'n', 'model', 'model_server', 'api_key', 'shell_env', 'max_tokens'}
generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields} generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields}
# 从请求中提取 model/model_server/api_key优先级高于 bot_config排除 "whatever" 和空值) # 从请求中提取 model/model_server/api_key优先级高于 bot_config排除 "whatever" 和空值)
req_data = request.model_dump() req_data = request.model_dump()