From 45911632825d33c71d2e3090df98033ea6270aff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9C=B1=E6=BD=AE?= Date: Sat, 11 Apr 2026 19:15:53 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E7=A9=BA=E5=86=85=E5=AE=B9?= =?UTF-8?q?=E9=87=8D=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- agent/deep_assistant.py | 52 ++++++++++++++++++++++++++++++++++++++-- agent/logging_handler.py | 24 +++++++++++-------- 2 files changed, 64 insertions(+), 12 deletions(-) diff --git a/agent/deep_assistant.py b/agent/deep_assistant.py index 8726a86..b03ff75 100644 --- a/agent/deep_assistant.py +++ b/agent/deep_assistant.py @@ -45,10 +45,11 @@ from langchain_core.language_models import BaseChatModel from langgraph.pregel import Pregel # 新版本导入:MemoryMiddleware 和 SkillsMiddleware 已迁移到 deepagents.middleware from deepagents.middleware import MemoryMiddleware, SkillsMiddleware -from langchain.agents.middleware import AgentMiddleware +from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse +from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware +from langchain_core.messages import AIMessage, HumanMessage from langgraph.types import Checkpointer from deepagents_cli.config import settings, get_default_coding_instructions -from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware from deepagents.middleware.filesystem import FilesystemMiddleware from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware @@ -63,6 +64,51 @@ from .custom_filesystem_middleware import CustomFilesystemMiddleware # _global_checkpointer = MemorySaver() logger = logging.getLogger('app') + + +class EmptyResponseRetryMiddleware(AgentMiddleware): + """当模型返回空内容(无文字、无工具调用)时自动重试""" + + MAX_RETRIES = 5 + + def _is_empty_response(self, result: ModelResponse) -> bool: + """判断是否为空响应""" + if not result.result: + return True + msg = result.result[0] + if not isinstance(msg, AIMessage): + return False + content = msg.content or "" + has_text = bool(content.strip()) if isinstance(content, str) else bool(content) + return not has_text and len(msg.tool_calls) == 0 + + def wrap_model_call(self, request, handler): + result = handler(request) + retries = 0 + while self._is_empty_response(result) and retries < self.MAX_RETRIES: + retries += 1 + logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})") + retry_messages = list(request.messages) + [ + HumanMessage(content="Please continue your response.") + ] + request = request.override(messages=retry_messages) + result = handler(request) + return result + + async def awrap_model_call(self, request, handler): + result = await handler(request) + retries = 0 + while self._is_empty_response(result) and retries < self.MAX_RETRIES: + retries += 1 + logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})") + retry_messages = list(request.messages) + [ + HumanMessage(content="Please continue your response.") + ] + request = request.override(messages=retry_messages) + result = await handler(request) + return result + + # Utility functions def read_system_prompt(): """读取通用的无状态系统prompt""" @@ -220,6 +266,8 @@ async def init_agent(config: AgentConfig): # 构建中间件列表 middleware = [] + # 添加空响应重试中间件(最先执行,最外层包裹) + middleware.append(EmptyResponseRetryMiddleware()) # 首先添加 ToolUseCleanupMiddleware 来清理孤立的 tool_use middleware.append(ToolUseCleanupMiddleware()) # 添加工具输出长度控制中间件 diff --git a/agent/logging_handler.py b/agent/logging_handler.py index de0d3db..26aa6b1 100644 --- a/agent/logging_handler.py +++ b/agent/logging_handler.py @@ -41,16 +41,20 @@ class LoggingCallbackHandler(BaseCallbackHandler): if hasattr(response, 'generations') and response.generations: for gen_idx, generation_list in enumerate(response.generations): for msg_idx, generation in enumerate(generation_list): - if hasattr(generation, 'text'): - output_list = generation.text.split("\n") - for i, output in enumerate(output_list): - if output.strip(): - self.logger.info(f"{output}") - elif hasattr(generation, 'message'): - output_list = generation.message.split("\n") - for i, output in enumerate(output_list): - if output.strip(): - self.logger.info(f"{output}") + # ChatGeneration: 使用 text 属性获取内容 + if hasattr(generation, 'text') and generation.text: + for line in generation.text.split("\n"): + if line.strip(): + self.logger.info(f" {line}") + # 如果有 message 属性,输出额外信息(tool_calls 等) + if hasattr(generation, 'message') and generation.message: + msg = generation.message + content = msg.content if hasattr(msg, 'content') else '' + if not content or (isinstance(content, str) and not content.strip()): + self.logger.info(f" [EMPTY content]") + tool_calls = msg.tool_calls if hasattr(msg, 'tool_calls') else [] + if tool_calls: + self.logger.info(f" [tool_calls: {[tc.get('name', '') for tc in tool_calls]}") def on_llm_error( self, error: Exception, **kwargs: Any