配置监控和日志系统

This commit is contained in:
2025-08-17 22:10:51 +08:00
parent 799416335b
commit 23a5c907f7
8 changed files with 1706 additions and 1 deletions

216
log_manager.py Normal file
View File

@@ -0,0 +1,216 @@
#!/usr/bin/env python3
"""
Flask 提示词大师 - 日志管理脚本
用于日志轮转、清理和压缩
"""
import os
import sys
import gzip
import shutil
import logging
from datetime import datetime, timedelta
from pathlib import Path
# 添加项目路径到 Python 路径
project_root = Path(__file__).parent
sys.path.insert(0, str(project_root))
def setup_logging():
"""配置日志管理日志"""
log_dir = project_root / "logs"
log_dir.mkdir(exist_ok=True)
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler(log_dir / "log_manager.log", encoding='utf-8'),
logging.StreamHandler()
]
)
return logging.getLogger(__name__)
class LogManager:
"""日志管理类"""
def __init__(self):
self.logger = setup_logging()
self.log_dir = project_root / "logs"
self.archive_dir = self.log_dir / "archive"
self.archive_dir.mkdir(exist_ok=True)
# 日志保留策略
self.retention_days = 30 # 保留天数
self.max_file_size = 10 * 1024 * 1024 # 10MB
self.compress_files = True # 是否压缩归档文件
def rotate_logs(self):
"""日志轮转"""
try:
self.logger.info("🔄 开始日志轮转...")
# 获取所有日志文件
log_files = list(self.log_dir.glob("*.log"))
rotated_count = 0
for log_file in log_files:
if log_file.name in ['log_manager.log', 'monitor.log']:
continue # 跳过管理日志
# 检查文件大小
if log_file.stat().st_size > self.max_file_size:
# 创建归档文件名
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
archive_name = f"{log_file.stem}_{timestamp}.log"
archive_path = self.archive_dir / archive_name
# 移动文件到归档目录
shutil.move(str(log_file), str(archive_path))
# 压缩归档文件
if self.compress_files:
self.compress_file(archive_path)
self.logger.info(f"📦 轮转日志文件: {log_file.name} -> {archive_name}")
rotated_count += 1
self.logger.info(f"✅ 日志轮转完成,共轮转 {rotated_count} 个文件")
return rotated_count
except Exception as e:
self.logger.error(f"❌ 日志轮转失败: {e}")
return 0
def compress_file(self, file_path):
"""压缩文件"""
try:
compressed_path = file_path.with_suffix('.log.gz')
with open(file_path, 'rb') as f_in:
with gzip.open(compressed_path, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
# 删除原文件
file_path.unlink()
self.logger.info(f"🗜️ 压缩文件: {file_path.name} -> {compressed_path.name}")
except Exception as e:
self.logger.error(f"❌ 压缩文件失败 {file_path}: {e}")
def cleanup_old_logs(self):
"""清理旧日志"""
try:
self.logger.info("🧹 开始清理旧日志...")
cutoff_date = datetime.now() - timedelta(days=self.retention_days)
deleted_count = 0
# 清理归档目录中的旧文件
for file_path in self.archive_dir.iterdir():
if file_path.is_file():
# 获取文件修改时间
mtime = datetime.fromtimestamp(file_path.stat().st_mtime)
if mtime < cutoff_date:
file_path.unlink()
self.logger.info(f"🗑️ 删除旧日志: {file_path.name}")
deleted_count += 1
self.logger.info(f"✅ 清理完成,共删除 {deleted_count} 个旧文件")
return deleted_count
except Exception as e:
self.logger.error(f"❌ 清理旧日志失败: {e}")
return 0
def get_log_stats(self):
"""获取日志统计信息"""
try:
stats = {
'current_logs': {},
'archived_logs': {},
'total_size': 0,
'file_count': 0
}
# 当前日志文件
for log_file in self.log_dir.glob("*.log"):
if log_file.name not in ['log_manager.log', 'monitor.log']:
size = log_file.stat().st_size
stats['current_logs'][log_file.name] = {
'size': size,
'size_mb': size / 1024 / 1024
}
stats['total_size'] += size
stats['file_count'] += 1
# 归档日志文件
for file_path in self.archive_dir.iterdir():
if file_path.is_file():
size = file_path.stat().st_size
stats['archived_logs'][file_path.name] = {
'size': size,
'size_mb': size / 1024 / 1024
}
stats['total_size'] += size
stats['file_count'] += 1
stats['total_size_mb'] = stats['total_size'] / 1024 / 1024
return stats
except Exception as e:
self.logger.error(f"❌ 获取日志统计失败: {e}")
return None
def print_stats(self):
"""打印日志统计信息"""
stats = self.get_log_stats()
if not stats:
return
print("日志统计信息:")
print(f" 总文件数: {stats['file_count']}")
print(f" 总大小: {stats['total_size_mb']:.2f}MB")
print()
if stats['current_logs']:
print("当前日志文件:")
for name, info in stats['current_logs'].items():
print(f" {name}: {info['size_mb']:.2f}MB")
print()
if stats['archived_logs']:
print("归档日志文件:")
for name, info in stats['archived_logs'].items():
print(f" {name}: {info['size_mb']:.2f}MB")
def main():
"""主函数"""
log_manager = LogManager()
if len(sys.argv) == 1:
# 默认操作:轮转和清理
log_manager.rotate_logs()
log_manager.cleanup_old_logs()
log_manager.print_stats()
elif sys.argv[1] == "rotate":
# 只进行轮转
log_manager.rotate_logs()
elif sys.argv[1] == "cleanup":
# 只进行清理
log_manager.cleanup_old_logs()
elif sys.argv[1] == "stats":
# 显示统计信息
log_manager.print_stats()
else:
print("用法:")
print(" python log_manager.py # 轮转和清理")
print(" python log_manager.py rotate # 只轮转")
print(" python log_manager.py cleanup # 只清理")
print(" python log_manager.py stats # 显示统计")
if __name__ == '__main__':
main()