110 lines
3.6 KiB
Python
110 lines
3.6 KiB
Python
#!/usr/bin/env python3
|
||
"""
|
||
配置文件缓存模块 - 提供异步文件读取缓存功能
|
||
用于优化并发请求时的文件I/O性能
|
||
"""
|
||
import asyncio
|
||
import os
|
||
import json
|
||
from typing import Dict, Tuple, Optional, Any
|
||
import logging
|
||
|
||
logger = logging.getLogger('app')
|
||
|
||
|
||
class ConfigFileCache:
|
||
"""配置文件缓存类
|
||
|
||
提供基于文件修改时间的缓存机制,避免重复读取未修改的文件
|
||
"""
|
||
|
||
def __init__(self):
|
||
self._cache: Dict[str, Tuple[Any, float]] = {} # {file_path: (content, mtime)}
|
||
self._lock = asyncio.Lock()
|
||
|
||
async def get_text_file(self, file_path: str) -> Optional[str]:
|
||
"""获取文本文件内容,带缓存机制
|
||
|
||
Args:
|
||
file_path: 文件路径
|
||
|
||
Returns:
|
||
文件内容字符串,如果文件不存在或读取失败返回None
|
||
"""
|
||
if not os.path.exists(file_path):
|
||
return None
|
||
|
||
current_mtime = os.path.getmtime(file_path)
|
||
|
||
# 检查缓存是否有效(不需要锁)
|
||
if file_path in self._cache:
|
||
cached_content, cached_mtime = self._cache[file_path]
|
||
if current_mtime == cached_mtime:
|
||
logger.debug(f"使用缓存文件: {file_path}")
|
||
return cached_content
|
||
|
||
# 读取文件并更新缓存(需要锁)
|
||
async with self._lock:
|
||
# 再次检查缓存,防止在等待锁的过程中其他协程已经更新了缓存
|
||
if file_path in self._cache:
|
||
cached_content, cached_mtime = self._cache[file_path]
|
||
if current_mtime == cached_mtime:
|
||
logger.debug(f"使用缓存文件: {file_path}")
|
||
return cached_content
|
||
|
||
try:
|
||
with open(file_path, 'r', encoding='utf-8') as f:
|
||
content = f.read()
|
||
self._cache[file_path] = (content, current_mtime)
|
||
logger.debug(f"缓存文件: {file_path}")
|
||
return content
|
||
except Exception as e:
|
||
logger.error(f"读取文本文件失败 {file_path}: {e}")
|
||
return None
|
||
|
||
async def get_json_file(self, file_path: str) -> Optional[Dict]:
|
||
"""获取JSON文件内容,带缓存机制
|
||
|
||
Args:
|
||
file_path: JSON文件路径
|
||
|
||
Returns:
|
||
解析后的字典,如果文件不存在、读取失败或JSON解析失败返回None
|
||
"""
|
||
content = await self.get_text_file(file_path)
|
||
if content:
|
||
try:
|
||
return json.loads(content)
|
||
except json.JSONDecodeError as e:
|
||
logger.error(f"JSON解析失败 {file_path}: {e}")
|
||
return None
|
||
|
||
def clear_cache(self, file_path: str = None):
|
||
"""清理缓存
|
||
|
||
Args:
|
||
file_path: 要清理的文件路径,如果为None则清理所有缓存
|
||
"""
|
||
if file_path:
|
||
self._cache.pop(file_path, None)
|
||
logger.debug(f"清理文件缓存: {file_path}")
|
||
else:
|
||
cleared_count = len(self._cache)
|
||
self._cache.clear()
|
||
logger.debug(f"清理所有缓存,共{cleared_count}个文件")
|
||
|
||
def get_cache_stats(self) -> Dict:
|
||
"""获取缓存统计信息
|
||
|
||
Returns:
|
||
包含缓存统计信息的字典
|
||
"""
|
||
return {
|
||
"cached_files": len(self._cache),
|
||
"cached_paths": list(self._cache.keys())
|
||
}
|
||
|
||
|
||
# 全局缓存实例
|
||
config_cache = ConfigFileCache()
|