Merge branch 'feature/moshui20260411-deepagents-0_5_2' into developing

This commit is contained in:
朱潮 2026-04-11 19:59:59 +08:00
commit 3c681c4760
3 changed files with 74 additions and 22 deletions

View File

@ -45,10 +45,11 @@ from langchain_core.language_models import BaseChatModel
from langgraph.pregel import Pregel
# 新版本导入MemoryMiddleware 和 SkillsMiddleware 已迁移到 deepagents.middleware
from deepagents.middleware import MemoryMiddleware, SkillsMiddleware
from langchain.agents.middleware import AgentMiddleware
from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse
from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware
from langchain_core.messages import AIMessage, HumanMessage
from langgraph.types import Checkpointer
from deepagents_cli.config import settings, get_default_coding_instructions
from langchain.agents.middleware import HumanInTheLoopMiddleware, InterruptOnConfig, TodoListMiddleware
from deepagents.middleware.filesystem import FilesystemMiddleware
from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware
from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware
@ -63,6 +64,51 @@ from .custom_filesystem_middleware import CustomFilesystemMiddleware
# _global_checkpointer = MemorySaver()
logger = logging.getLogger('app')
class EmptyResponseRetryMiddleware(AgentMiddleware):
"""当模型返回空内容(无文字、无工具调用)时自动重试"""
MAX_RETRIES = 5
def _is_empty_response(self, result: ModelResponse) -> bool:
"""判断是否为空响应"""
if not result.result:
return True
msg = result.result[0]
if not isinstance(msg, AIMessage):
return False
content = msg.content or ""
has_text = bool(content.strip()) if isinstance(content, str) else bool(content)
return not has_text and len(msg.tool_calls) == 0
def wrap_model_call(self, request, handler):
result = handler(request)
retries = 0
while self._is_empty_response(result) and retries < self.MAX_RETRIES:
retries += 1
logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})")
retry_messages = list(request.messages) + [
HumanMessage(content="Please continue your response.")
]
request = request.override(messages=retry_messages)
result = handler(request)
return result
async def awrap_model_call(self, request, handler):
result = await handler(request)
retries = 0
while self._is_empty_response(result) and retries < self.MAX_RETRIES:
retries += 1
logger.warning(f"Empty response detected, retrying ({retries}/{self.MAX_RETRIES})")
retry_messages = list(request.messages) + [
HumanMessage(content="Please continue your response.")
]
request = request.override(messages=retry_messages)
result = await handler(request)
return result
# Utility functions
def read_system_prompt():
"""读取通用的无状态系统prompt"""
@ -220,6 +266,8 @@ async def init_agent(config: AgentConfig):
# 构建中间件列表
middleware = []
# 添加空响应重试中间件(最先执行,最外层包裹)
middleware.append(EmptyResponseRetryMiddleware())
# 首先添加 ToolUseCleanupMiddleware 来清理孤立的 tool_use
middleware.append(ToolUseCleanupMiddleware())
# 添加工具输出长度控制中间件
@ -405,22 +453,22 @@ def create_custom_cli_agent(
# Use LocalShellBackend for filesystem + shell execution
backend = LocalShellBackend(
root_dir=workspace_root,
virtual_mode=True,
virtual_mode=False,
inherit_env=True,
env=final_shell_env,
)
else:
# No shell access - use plain FilesystemBackend
backend = FilesystemBackend(root_dir=workspace_root, virtual_mode=True)
backend = FilesystemBackend(root_dir=workspace_root, virtual_mode=False)
# Set up composite backend with routing (参考新版本实现)
large_results_backend = FilesystemBackend(
root_dir=tempfile.mkdtemp(prefix="deepagents_large_results_"),
virtual_mode=True,
virtual_mode=False,
)
conversation_history_backend = FilesystemBackend(
root_dir=tempfile.mkdtemp(prefix="deepagents_conversation_history_"),
virtual_mode=True,
virtual_mode=False,
)
composite_backend = CompositeBackend(
default=backend,
@ -436,7 +484,7 @@ def create_custom_cli_agent(
agent_middleware.append(
CustomSkillsMiddleware(
backend=FilesystemBackend(root_dir=workspace_root, virtual_mode=True),
backend=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
sources=skills_sources,
)
)

View File

@ -41,16 +41,20 @@ class LoggingCallbackHandler(BaseCallbackHandler):
if hasattr(response, 'generations') and response.generations:
for gen_idx, generation_list in enumerate(response.generations):
for msg_idx, generation in enumerate(generation_list):
if hasattr(generation, 'text'):
output_list = generation.text.split("\n")
for i, output in enumerate(output_list):
if output.strip():
self.logger.info(f"{output}")
elif hasattr(generation, 'message'):
output_list = generation.message.split("\n")
for i, output in enumerate(output_list):
if output.strip():
self.logger.info(f"{output}")
# ChatGeneration: 使用 text 属性获取内容
if hasattr(generation, 'text') and generation.text:
for line in generation.text.split("\n"):
if line.strip():
self.logger.info(f" {line}")
# 如果有 message 属性输出额外信息tool_calls 等)
if hasattr(generation, 'message') and generation.message:
msg = generation.message
content = msg.content if hasattr(msg, 'content') else ''
if not content or (isinstance(content, str) and not content.strip()):
self.logger.info(f" [EMPTY content]")
tool_calls = msg.tool_calls if hasattr(msg, 'tool_calls') else []
if tool_calls:
self.logger.info(f" [tool_calls: {[tc.get('name', '') for tc in tool_calls]}")
def on_llm_error(
self, error: Exception, **kwargs: Any

View File

@ -60,7 +60,7 @@ from deepagents.backends import LocalShellBackend
# 创建 backend支持自定义环境变量
backend = LocalShellBackend(
root_dir=workspace_root,
virtual_mode=True,
virtual_mode=False,
env={"ASSISTANT_ID": "xxx", "USER_IDENTIFIER": "yyy"}, # 自定义环境变量
inherit_env=True, # 继承父进程环境变量
)
@ -92,7 +92,7 @@ backend = LocalShellBackend(
```python
# 当前实现
composite_backend = CompositeBackend(
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=True),
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
routes={},
)
```
@ -120,14 +120,14 @@ from deepagents.backends import LocalShellBackend
# 创建带自定义环境变量的 backend
shell_backend = LocalShellBackend(
root_dir=workspace_root,
virtual_mode=True,
virtual_mode=False,
env=shell_env,
inherit_env=True,
)
# 或使用 CompositeBackend 路由
composite_backend = CompositeBackend(
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=True),
default=FilesystemBackend(root_dir=workspace_root, virtual_mode=False),
routes={
"/shell/": shell_backend, # shell 命令路由
},
@ -209,7 +209,7 @@ if enable_shell:
final_shell_env = shell_env or {}
shell_backend = LocalShellBackend(
root_dir=workspace_root,
virtual_mode=True,
virtual_mode=False,
env=final_shell_env,
inherit_env=True, # 继承 os.environ
)