日志保存
This commit is contained in:
128
app/core/logging_config.py
Normal file
128
app/core/logging_config.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
def setup_logging():
|
||||||
|
"""配置日志系统"""
|
||||||
|
# 创建logs目录
|
||||||
|
log_dir = Path("logs")
|
||||||
|
log_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
# 配置根日志记录器
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
root_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# 清除现有的处理器
|
||||||
|
for handler in root_logger.handlers[:]:
|
||||||
|
root_logger.removeHandler(handler)
|
||||||
|
|
||||||
|
# 创建格式化器
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
|
||||||
|
# 1. 控制台处理器
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setLevel(logging.INFO)
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
# 2. 应用日志文件处理器(按日期滚动)
|
||||||
|
app_log_file = log_dir / "app.log"
|
||||||
|
app_handler = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
app_log_file,
|
||||||
|
when='midnight',
|
||||||
|
interval=1,
|
||||||
|
backupCount=30,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
app_handler.setLevel(logging.INFO)
|
||||||
|
app_handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(app_handler)
|
||||||
|
|
||||||
|
# 3. 错误日志文件处理器
|
||||||
|
error_log_file = log_dir / "error.log"
|
||||||
|
error_handler = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
error_log_file,
|
||||||
|
when='midnight',
|
||||||
|
interval=1,
|
||||||
|
backupCount=30,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
error_handler.setLevel(logging.ERROR)
|
||||||
|
error_handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(error_handler)
|
||||||
|
|
||||||
|
# 4. 任务调度器专用日志处理器
|
||||||
|
scheduler_log_file = log_dir / "scheduler.log"
|
||||||
|
scheduler_handler = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
scheduler_log_file,
|
||||||
|
when='midnight',
|
||||||
|
interval=1,
|
||||||
|
backupCount=30,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
scheduler_handler.setLevel(logging.INFO)
|
||||||
|
scheduler_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# 为调度器设置专用logger
|
||||||
|
scheduler_logger = logging.getLogger('apscheduler')
|
||||||
|
scheduler_logger.addHandler(scheduler_handler)
|
||||||
|
scheduler_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# 为应用调度器模块设置专用logger
|
||||||
|
app_scheduler_logger = logging.getLogger('app.utils.scheduler')
|
||||||
|
app_scheduler_logger.addHandler(scheduler_handler)
|
||||||
|
app_scheduler_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# 5. API访问日志处理器
|
||||||
|
access_log_file = log_dir / "access.log"
|
||||||
|
access_handler = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
access_log_file,
|
||||||
|
when='midnight',
|
||||||
|
interval=1,
|
||||||
|
backupCount=30,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
access_formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(message)s',
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
access_handler.setFormatter(access_formatter)
|
||||||
|
|
||||||
|
# 为uvicorn访问日志设置处理器
|
||||||
|
uvicorn_access_logger = logging.getLogger("uvicorn.access")
|
||||||
|
uvicorn_access_logger.addHandler(access_handler)
|
||||||
|
uvicorn_access_logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# 6. 数据库日志处理器
|
||||||
|
db_log_file = log_dir / "database.log"
|
||||||
|
db_handler = logging.handlers.TimedRotatingFileHandler(
|
||||||
|
db_log_file,
|
||||||
|
when='midnight',
|
||||||
|
interval=1,
|
||||||
|
backupCount=30,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
db_handler.setLevel(logging.INFO)
|
||||||
|
db_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# 为SQLAlchemy设置日志
|
||||||
|
sqlalchemy_logger = logging.getLogger('sqlalchemy.engine')
|
||||||
|
sqlalchemy_logger.addHandler(db_handler)
|
||||||
|
sqlalchemy_logger.setLevel(logging.WARNING) # 只记录警告和错误
|
||||||
|
|
||||||
|
print(f"日志系统已配置,日志文件保存在: {log_dir.absolute()}")
|
||||||
|
print("日志文件:")
|
||||||
|
print(f" - 应用日志: {app_log_file}")
|
||||||
|
print(f" - 错误日志: {error_log_file}")
|
||||||
|
print(f" - 调度器日志: {scheduler_log_file}")
|
||||||
|
print(f" - 访问日志: {access_log_file}")
|
||||||
|
print(f" - 数据库日志: {db_log_file}")
|
||||||
|
|
||||||
|
def get_logger(name: str = None):
|
||||||
|
"""获取日志记录器"""
|
||||||
|
return logging.getLogger(name or __name__)
|
||||||
@@ -4,15 +4,16 @@ from contextlib import asynccontextmanager
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .core.config import settings
|
from .core.config import settings
|
||||||
|
from .core.logging_config import setup_logging, get_logger
|
||||||
from .core.database import init_db
|
from .core.database import init_db
|
||||||
from .api.account import router as account_router
|
from .api.account import router as account_router
|
||||||
from .api.database import router as database_router
|
from .api.database import router as database_router
|
||||||
from .api.task import router as task_router
|
from .api.task import router as task_router
|
||||||
from .utils.scheduler import task_scheduler
|
from .utils.scheduler import task_scheduler
|
||||||
|
|
||||||
# 配置日志
|
# 初始化日志系统
|
||||||
logging.basicConfig(level=logging.INFO)
|
setup_logging()
|
||||||
logger = logging.getLogger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
|
|||||||
@@ -2,14 +2,13 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|||||||
from apscheduler.executors.pool import ThreadPoolExecutor
|
from apscheduler.executors.pool import ThreadPoolExecutor
|
||||||
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
|
||||||
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
|
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
|
||||||
import logging
|
|
||||||
from ..core.config import settings
|
from ..core.config import settings
|
||||||
from ..core.database import SessionLocal
|
from ..core.database import SessionLocal
|
||||||
|
from ..core.logging_config import get_logger
|
||||||
from ..models.account import Account
|
from ..models.account import Account
|
||||||
|
|
||||||
# 配置日志
|
# 获取日志记录器
|
||||||
logging.basicConfig(level=logging.INFO)
|
logger = get_logger(__name__)
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class TaskScheduler:
|
class TaskScheduler:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|||||||
Reference in New Issue
Block a user