feat(deep-agent): add skills support and improve project structure

- Add skills parameter to ChatRequest for skill file processing
- Extract and unzip skill files to robot project skills directory
- Add robot_config.json with bot_id and environment variables
- Update symlink setup to skip if ~/.deepagents already exists
- Enhance system prompt with directory access restrictions
- Refactor _get_robot_dir to handle symlink paths correctly

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
朱潮 2025-12-31 13:21:58 +08:00
parent c808517f02
commit 766b9becda
7 changed files with 129 additions and 79 deletions

View File

@ -1,14 +1,5 @@
[
{
"mcpServers": {
"rag_retrieve": {
"transport": "stdio",
"command": "python",
"args": [
"./mcp/rag_retrieve_server.py",
"{bot_id}"
]
}
}
"mcpServers": {}
}
]

View File

@ -1,5 +1,8 @@
<env>
<env>
Working directory: {agent_dir_path}
Current User: {user_identifier}
Current Time: {datetime}
</env>
### Current Working Directory
@ -8,6 +11,13 @@ The filesystem backend is currently operating in: `{agent_dir_path}`
### File System and Paths
**CRITICAL - Directory Access Restriction:**
- You are **ONLY** allowed to access files and directories within `{agent_dir_path}`
- **NEVER** attempt to access files outside this directory (e.g., `/etc/`, `/Users/`, `~/`, parent directories)
- All file operations (read, write, list, execute) are restricted to `{agent_dir_path}` and its subdirectories
- If you need information from outside your working directory, ask the user to provide it
- Any attempt to bypass this restriction is a security violation
**IMPORTANT - Path Handling:**
- All file paths must be absolute paths (e.g., `{agent_dir_path}/file.txt`)
- Use the working directory from <env> to construct absolute paths
@ -56,7 +66,3 @@ When using the write_todos tool:
6. Update todo status promptly as you complete each item
The todo list is a planning tool - use it judiciously to avoid overwhelming the user with excessive task tracking.
## System Information
- **Current User**: {user_identifier}
- **Current Time**: {datetime}

View File

@ -286,10 +286,10 @@ async def chat_completions(request: ChatRequest, authorization: Optional[str] =
raise HTTPException(status_code=400, detail="bot_id is required")
# 创建项目目录如果有dataset_ids且不是agent类型
project_dir = create_project_directory(request.dataset_ids, bot_id, request.robot_type)
project_dir = create_project_directory(request.dataset_ids, bot_id, request.robot_type, request.skills)
# 收集额外参数作为 generate_cfg
exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking'}
exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills'}
generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields}
# 处理消息
messages = process_messages(request.messages, request.language)
@ -336,10 +336,10 @@ async def chat_warmup_v1(request: ChatRequest, authorization: Optional[str] = He
raise HTTPException(status_code=400, detail="bot_id is required")
# 创建项目目录如果有dataset_ids且不是agent类型
project_dir = create_project_directory(request.dataset_ids, bot_id, request.robot_type)
project_dir = create_project_directory(request.dataset_ids, bot_id, request.robot_type, request.skills)
# 收集额外参数作为 generate_cfg
exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking'}
exclude_fields = {'messages', 'model', 'model_server', 'dataset_ids', 'language', 'tool_response', 'system_prompt', 'mcp_settings' ,'stream', 'robot_type', 'bot_id', 'user_identifier', 'session_id', 'enable_thinking', 'skills'}
generate_cfg = {k: v for k, v in request.model_dump().items() if k not in exclude_fields}
# 创建一个空的消息列表用于预热实际消息不会在warmup中处理
@ -431,11 +431,12 @@ async def chat_warmup_v2(request: ChatRequestV2, authorization: Optional[str] =
# 从后端API获取机器人配置使用v2的鉴权方式
bot_config = await fetch_bot_config(bot_id)
# 创建项目目录从后端配置获取dataset_ids
# 创建项目目录从后端配置获取dataset_ids和skills
project_dir = create_project_directory(
bot_config.get("dataset_ids", []),
bot_id,
bot_config.get("robot_type", "general_agent")
bot_config.get("robot_type", "general_agent"),
bot_config.get("skills")
)
# 创建一个空的消息列表用于预热实际消息不会在warmup中处理
@ -533,11 +534,12 @@ async def chat_completions_v2(request: ChatRequestV2, authorization: Optional[st
# 从后端API获取机器人配置使用v2的鉴权方式
bot_config = await fetch_bot_config(bot_id)
# 创建项目目录从后端配置获取dataset_ids
# 创建项目目录从后端配置获取dataset_ids和skills
project_dir = create_project_directory(
bot_config.get("dataset_ids", []),
bot_id,
bot_config.get("robot_type", "general_agent")
bot_config.get("robot_type", "general_agent"),
bot_config.get("skills")
)
# 处理消息
messages = process_messages(request.messages, request.language)

View File

@ -54,6 +54,7 @@ class ChatRequest(BaseModel):
user_identifier: Optional[str] = ""
session_id: Optional[str] = None
enable_thinking: Optional[bool] = DEFAULT_THINKING_ENABLE
skills: Optional[List[str]] = None
class ChatRequestV2(BaseModel):

View File

@ -364,7 +364,7 @@ def format_messages_to_chat_history(messages: List[Dict[str, str]]) -> str:
return "\n".join(recent_chat_history)
def create_project_directory(dataset_ids: Optional[List[str]], bot_id: str, robot_type: str = "general_agent") -> Optional[str]:
def create_project_directory(dataset_ids: Optional[List[str]], bot_id: str, robot_type: str = "general_agent", skills: Optional[List[str]] = None) -> Optional[str]:
"""创建项目目录的公共逻辑"""
# 只有当 robot_type == "catalog_agent" 且 dataset_ids 不为空时才创建目录
@ -374,7 +374,7 @@ def create_project_directory(dataset_ids: Optional[List[str]], bot_id: str, robo
try:
from utils.multi_project_manager import create_robot_project
from pathlib import Path
return create_robot_project(dataset_ids, bot_id, Path("~", ".deepagents"))
return create_robot_project(dataset_ids, bot_id, project_path=Path("~", ".deepagents"), skills=skills)
except Exception as e:
logger.error(f"Error creating project directory: {e}")
return None

View File

@ -15,6 +15,7 @@ from datetime import datetime
logger = logging.getLogger('app')
from utils.file_utils import get_document_preview
from utils import settings
def generate_robot_directory_tree(robot_dir: str, robot_id: str, max_depth: int = 3) -> str:
@ -301,6 +302,26 @@ def generate_robot_readme(robot_id: str, dataset_ids: List[str], copy_results: L
return str(readme_path)
def _get_robot_dir(project_path: Path, bot_id: str) -> Path:
"""
获取 robot 目录路径处理软链接情况
Args:
project_path: 项目路径
bot_id: 机器人ID
Returns:
Path: robot 目录路径已展开
"""
resolved_path = project_path.expanduser().resolve()
if resolved_path.name == "robot":
# project_path 已经指向 robot 目录(如 ~/.deepagents -> projects/robot
return (project_path / bot_id).expanduser()
else:
# project_path 指向 projects 目录
return (project_path / "robot" / bot_id).expanduser()
def should_rebuild_robot_project(dataset_ids: List[str], bot_id: str, project_path: Path) -> bool:
"""
检查是否需要重建机器人项目
@ -316,7 +337,7 @@ def should_rebuild_robot_project(dataset_ids: List[str], bot_id: str, project_pa
Returns:
bool: 是否需要重建
"""
robot_dir = project_path / "robot" / bot_id
robot_dir = _get_robot_dir(project_path, bot_id)
# 如果机器人项目不存在,需要创建
if not robot_dir.exists():
@ -375,7 +396,7 @@ def should_rebuild_robot_project(dataset_ids: List[str], bot_id: str, project_pa
return False
def create_robot_project(dataset_ids: List[str], bot_id: str, force_rebuild: bool = False, project_path: Path = Path("projects")) -> str:
def create_robot_project(dataset_ids: List[str], bot_id: str, force_rebuild: bool = False, project_path: Path = Path("projects"), skills: Optional[List[str]] = None) -> str:
"""
创建机器人项目合并多个源项目的dataset文件夹
@ -383,20 +404,24 @@ def create_robot_project(dataset_ids: List[str], bot_id: str, force_rebuild: boo
dataset_ids: 源项目ID列表
bot_id: 机器人ID
force_rebuild: 是否强制重建
skills: 技能文件名列表 ["rag-retrieve", "device_controller.zip"]
Returns:
str: 机器人项目目录路径
"""
logger.info(f"Creating robot project: {bot_id} from sources: {dataset_ids}")
logger.info(f"Creating robot project: {bot_id} from sources: {dataset_ids}, skills: {skills}")
# 检查是否需要重建
if not force_rebuild and not should_rebuild_robot_project(dataset_ids, bot_id, project_path):
robot_dir = project_path / "robot" / bot_id
logger.info(f"Using existing robot project: {robot_dir}")
# 即使使用现有项目,也要处理 skills如果提供了
if skills:
_extract_skills_to_robot(robot_dir, skills, project_path)
return str(robot_dir)
# 创建机器人目录结构
robot_dir = project_path / "robot" / bot_id
robot_dir = _get_robot_dir(project_path, bot_id)
dataset_dir = robot_dir / "dataset"
# 清理已存在的目录(如果需要)
@ -435,6 +460,11 @@ def create_robot_project(dataset_ids: List[str], bot_id: str, force_rebuild: boo
config_file = robot_dir / "robot_config.json"
config_data = {
"dataset_ids": dataset_ids,
"bot_id": bot_id,
"env": {
"backend_host": settings.BACKEND_HOST,
"masterkey": settings.MASTERKEY
},
"created_at": datetime.now().isoformat(),
"total_folders": len(copy_results),
"successful_copies": sum(1 for r in copy_results if r["success"])
@ -455,6 +485,10 @@ def create_robot_project(dataset_ids: List[str], bot_id: str, force_rebuild: boo
logger.info(f" Config saved: {config_file}")
logger.info(f" README generated: {readme_path}")
# 处理 skills 解压
if skills:
_extract_skills_to_robot(robot_dir, skills, project_path)
return str(robot_dir)
@ -466,3 +500,54 @@ if __name__ == "__main__":
robot_dir = create_robot_project(test_dataset_ids, test_bot_id)
logger.info(f"Created robot project at: {robot_dir}")
def _extract_skills_to_robot(robot_dir: Path, skills: List[str], project_path: Path) -> None:
"""
解压 skills robot 项目的 skills 文件夹
Args:
robot_dir: 机器人项目目录
skills: 技能文件名列表 ["rag-retrieve", "device_controller.zip"]
project_path: 项目路径
"""
import zipfile
# skills 源目录在 projects/skills需要通过解析软链接获取正确路径
# project_path 可能是 ~/.deepagents (软链接 -> projects/robot)
# 所以 skills 源目录是 project_path.resolve().parent / "skills"
resolved_path = project_path.expanduser().resolve()
skills_source_dir = resolved_path.parent / "skills"
skills_target_dir = robot_dir / "skills"
# 先清空 skills_target_dir然后重新解压
if skills_target_dir.exists():
logger.info(f" Removing existing skills directory: {skills_target_dir}")
shutil.rmtree(skills_target_dir)
skills_target_dir.mkdir(parents=True, exist_ok=True)
logger.info(f"Extracting skills to {skills_target_dir}")
for skill in skills:
# 规范化文件名(确保有 .zip 后缀)
if not skill.endswith(".zip"):
skill_file = skill + ".zip"
else:
skill_file = skill
skill_source_path = skills_source_dir / skill_file
if not skill_source_path.exists():
logger.warning(f" Skill file not found: {skill_source_path}")
continue
# 获取解压后的文件夹名称(去掉 .zip 后缀)
folder_name = skill_file.replace(".zip", "")
extract_target = skills_target_dir / folder_name
# 解压文件
try:
with zipfile.ZipFile(skill_source_path, 'r') as zip_ref:
zip_ref.extractall(extract_target)
logger.info(f" Extracted: {skill_file} -> {extract_target}")
except Exception as e:
logger.error(f" Failed to extract {skill_file}: {e}")

View File

@ -24,47 +24,12 @@ def setup_deepagents_symlink():
# Create robot directory if it doesn't exist
robot_dir.mkdir(parents=True, exist_ok=True)
# If ~/.deepagents already exists and is not a symlink, backup and remove it
if deepagents_dir.exists() and not deepagents_dir.is_symlink():
backup_dir = deepagents_dir.parent / f"{deepagents_dir.name}.backup"
logger.warning(f"~/.deepagents directory exists but is not a symlink.")
logger.warning(f"Creating backup at {backup_dir}")
try:
# Create backup
import shutil
if backup_dir.exists():
shutil.rmtree(backup_dir)
shutil.move(str(deepagents_dir), str(backup_dir))
logger.info(f"Successfully backed up existing directory to {backup_dir}")
except Exception as backup_error:
logger.error(f"Failed to backup existing directory: {backup_error}")
logger.error("Please manually remove or backup ~/.deepagents to proceed")
return False
# If ~/.deepagents is already a symlink pointing to the right place, do nothing
if deepagents_dir.is_symlink():
target = deepagents_dir.resolve()
if target == robot_dir.resolve():
logger.info(f"~/.deepagents already points to {robot_dir}")
return True
else:
# Remove existing symlink pointing elsewhere
deepagents_dir.unlink()
logger.info(f"Removed existing symlink pointing to {target}")
# If ~/.deepagents already exists, do nothing
if deepagents_dir.exists():
logger.info(f"~/.deepagents already exists at {deepagents_dir}, skipping symlink creation")
return True
# Create the symbolic link
# Check again before creating to handle race conditions
if deepagents_dir.is_symlink() or deepagents_dir.exists():
logger.warning(f"Path {deepagents_dir} exists, attempting to remove before symlink")
if deepagents_dir.is_symlink():
deepagents_dir.unlink()
elif deepagents_dir.is_dir():
import shutil
shutil.rmtree(str(deepagents_dir))
else:
deepagents_dir.unlink()
os.symlink(robot_dir, deepagents_dir, target_is_directory=True)
logger.info(f"Created symbolic link: {deepagents_dir} -> {robot_dir}")
return True