176 lines
7.1 KiB
Python
176 lines
7.1 KiB
Python
import os
|
||
import json
|
||
from typing import Optional
|
||
from fastapi import APIRouter, HTTPException
|
||
import logging
|
||
|
||
logger = logging.getLogger('app')
|
||
|
||
from task_queue.task_status import task_status_store
|
||
|
||
router = APIRouter()
|
||
|
||
|
||
@router.get("/api/v1/projects")
|
||
async def list_all_projects():
|
||
"""获取所有项目列表"""
|
||
try:
|
||
# 获取机器人项目(projects/robot)
|
||
robot_dir = "projects/robot"
|
||
robot_projects = []
|
||
|
||
if os.path.exists(robot_dir):
|
||
for item in os.listdir(robot_dir):
|
||
item_path = os.path.join(robot_dir, item)
|
||
if os.path.isdir(item_path):
|
||
try:
|
||
# 读取机器人配置文件
|
||
config_path = os.path.join(item_path, "robot_config.json")
|
||
config_data = {}
|
||
if os.path.exists(config_path):
|
||
with open(config_path, 'r', encoding='utf-8') as f:
|
||
config_data = json.load(f)
|
||
|
||
# 统计文件数量
|
||
file_count = 0
|
||
if os.path.exists(os.path.join(item_path, "dataset")):
|
||
for root, dirs, files in os.walk(os.path.join(item_path, "dataset")):
|
||
file_count += len(files)
|
||
|
||
robot_projects.append({
|
||
"id": item,
|
||
"name": config_data.get("name", item),
|
||
"type": "robot",
|
||
"status": config_data.get("status", "active"),
|
||
"file_count": file_count,
|
||
"config": config_data,
|
||
"created_at": os.path.getctime(item_path),
|
||
"updated_at": os.path.getmtime(item_path)
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Error reading robot project {item}: {str(e)}")
|
||
robot_projects.append({
|
||
"id": item,
|
||
"name": item,
|
||
"type": "robot",
|
||
"status": "unknown",
|
||
"file_count": 0,
|
||
"created_at": os.path.getctime(item_path),
|
||
"updated_at": os.path.getmtime(item_path)
|
||
})
|
||
|
||
# 获取数据集(projects/data)
|
||
data_dir = "projects/data"
|
||
datasets = []
|
||
|
||
if os.path.exists(data_dir):
|
||
for item in os.listdir(data_dir):
|
||
item_path = os.path.join(data_dir, item)
|
||
if os.path.isdir(item_path):
|
||
try:
|
||
# 读取处理日志
|
||
log_path = os.path.join(item_path, "processing_log.json")
|
||
log_data = {}
|
||
if os.path.exists(log_path):
|
||
with open(log_path, 'r', encoding='utf-8') as f:
|
||
log_data = json.load(f)
|
||
|
||
# 统计文件数量
|
||
file_count = 0
|
||
for root, dirs, files in os.walk(item_path):
|
||
file_count += len([f for f in files if not f.endswith('.pkl')])
|
||
|
||
# 获取状态
|
||
status = "active"
|
||
if log_data.get("status"):
|
||
status = log_data["status"]
|
||
elif os.path.exists(os.path.join(item_path, "processed")):
|
||
status = "completed"
|
||
|
||
datasets.append({
|
||
"id": item,
|
||
"name": f"数据集 - {item[:8]}...",
|
||
"type": "dataset",
|
||
"status": status,
|
||
"file_count": file_count,
|
||
"log_data": log_data,
|
||
"created_at": os.path.getctime(item_path),
|
||
"updated_at": os.path.getmtime(item_path)
|
||
})
|
||
except Exception as e:
|
||
logger.error(f"Error reading dataset {item}: {str(e)}")
|
||
datasets.append({
|
||
"id": item,
|
||
"name": f"数据集 - {item[:8]}...",
|
||
"type": "dataset",
|
||
"status": "unknown",
|
||
"file_count": 0,
|
||
"created_at": os.path.getctime(item_path),
|
||
"updated_at": os.path.getmtime(item_path)
|
||
})
|
||
|
||
all_projects = robot_projects + datasets
|
||
|
||
return {
|
||
"success": True,
|
||
"message": "项目列表获取成功",
|
||
"total_projects": len(all_projects),
|
||
"robot_projects": robot_projects,
|
||
"datasets": datasets,
|
||
"projects": all_projects # 保持向后兼容
|
||
}
|
||
|
||
except Exception as e:
|
||
logger.error(f"Error listing projects: {str(e)}")
|
||
raise HTTPException(status_code=500, detail=f"获取项目列表失败: {str(e)}")
|
||
|
||
|
||
@router.get("/api/v1/projects/robot")
|
||
async def list_robot_projects():
|
||
"""获取机器人项目列表"""
|
||
try:
|
||
response = await list_all_projects()
|
||
return {
|
||
"success": True,
|
||
"message": "机器人项目列表获取成功",
|
||
"total_projects": len(response["robot_projects"]),
|
||
"projects": response["robot_projects"]
|
||
}
|
||
except Exception as e:
|
||
logger.error(f"Error listing robot projects: {str(e)}")
|
||
raise HTTPException(status_code=500, detail=f"获取机器人项目列表失败: {str(e)}")
|
||
|
||
|
||
@router.get("/api/v1/projects/datasets")
|
||
async def list_datasets():
|
||
"""获取数据集列表"""
|
||
try:
|
||
response = await list_all_projects()
|
||
return {
|
||
"success": True,
|
||
"message": "数据集列表获取成功",
|
||
"total_projects": len(response["datasets"]),
|
||
"projects": response["datasets"]
|
||
}
|
||
except Exception as e:
|
||
logger.error(f"Error listing datasets: {str(e)}")
|
||
raise HTTPException(status_code=500, detail=f"获取数据集列表失败: {str(e)}")
|
||
|
||
|
||
@router.get("/api/v1/projects/{dataset_id}/tasks")
|
||
async def get_project_tasks(dataset_id: str):
|
||
"""获取指定项目的所有任务"""
|
||
try:
|
||
tasks = task_status_store.get_by_unique_id(dataset_id)
|
||
|
||
return {
|
||
"success": True,
|
||
"message": "项目任务获取成功",
|
||
"dataset_id": dataset_id,
|
||
"total_tasks": len(tasks),
|
||
"tasks": tasks
|
||
}
|
||
|
||
except Exception as e:
|
||
logger.error(f"Error getting project tasks: {str(e)}")
|
||
raise HTTPException(status_code=500, detail=f"获取项目任务失败: {str(e)}") |