1.优化daily的数据结构

2.修改定时任务
This commit is contained in:
whm
2025-11-18 12:09:26 +08:00
parent d8b6247094
commit 83925382ed
3 changed files with 76 additions and 104 deletions

View File

@@ -1,6 +1,7 @@
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.executors.pool import ThreadPoolExecutor
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from sqlalchemy import text
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
from ..core.config import settings
from sqlalchemy.orm import Session
@@ -17,6 +18,8 @@ from ..models.daily import DailyData
from ..models.settlement_data import SettlementData
from typing import List
from ..utils.construction_monitor import ConstructionMonitorUtils
import time
import json
# 获取日志记录器
logger = get_logger(__name__)
@@ -216,7 +219,7 @@ def database_cleanup_task():
return "数据库清理完成"
# 每日自动写入获取最新工况信息
def scheduled_get_max_nyid_by_point_id(start: int = 0, end: int = 0):
def scheduled_get_max_nyid_by_point_id(start: int = 0,end: int = 0):
"""定时任务获取max NYID关联数据并批量创建DailyData记录"""
db: Session = None
try:
@@ -225,23 +228,25 @@ def scheduled_get_max_nyid_by_point_id(start: int = 0, end: int = 0):
db = SessionLocal()
logger.info("定时任务开始执行获取max NYID关联数据并处理")
# 核心新增清空DailyData表所有数据
delete_count = db.query(DailyData).delete()
db.commit()
logger.info(f"DailyData表清空完成共删除{delete_count}条历史记录")
# delete_count = db.query(DailyData).delete()
# db.commit()
db.execute(text(f"TRUNCATE TABLE {DailyData.__tablename__}"))
db.commit() # 必须提交事务
# logger.info(f"DailyData表清空完成共删除{delete_count}条历史记录")
# 1. 获取沉降数据(返回 List[List[dict]]
daily_service = DailyDataService()
result = daily_service.get_nyid_by_point_id(db, [], 1)
# 2. 计算到期数据
monitor = ConstructionMonitorUtils()
daily_data = monitor.get_due_data(result, start, end)
daily_data = monitor.get_due_data(result,start=start,end=end)
data = daily_data['data']
error_data = daily_data['error_data']
winters = daily_data['winter']
logger.info(f"首次获取数据完成,共{len(result)}条记录")
# 3. 循环处理冬休数据,追溯历史非冬休记录
max_num = 1
while winters:
@@ -250,86 +255,55 @@ def scheduled_get_max_nyid_by_point_id(start: int = 0, end: int = 0):
new_list = [w['point_id'] for w in winters]
# 获取更多历史记录
nyid_list = daily_service.get_nyid_by_point_id(db, new_list, max_num)
w_list = monitor.get_due_data(nyid_list, start, end)
w_list = monitor.get_due_data(nyid_list,start=start,end=end)
# 更新冬休、待处理、错误数据
winters = w_list['winter']
data.extend(w_list['data'])
# 过期数据一并处理
# data.extend(w_list['error_data'])
error_data.extend(w_list['error_data'])
print(w_list)
data.extend(error_data)
# 4. 初始化服务实例
level_service = LevelDataService()
checkpoint_db = CheckpointService()
section_db = SectionDataService()
account_service = AccountService()
# 5. 批量查询优化
logger.info("批量获取关联数据")
# 提取所有需要的ID列表
nyid_list = list(set(d['NYID'] for d in data if d.get('NYID')))
point_id_list = list(set(d['point_id'] for d in data if d.get('point_id')))
# 批量查询LevelData
logger.info(f"批量查询LevelDatanyid数量: {len(nyid_list)}")
level_results = level_service.get_by_nyids(db, nyid_list)
level_dict = {level.NYID: level for level in level_results}
# 批量查询CheckpointData
logger.info(f"批量查询CheckpointDatapoint_id数量: {len(point_id_list)}")
checkpoint_results = checkpoint_db.get_by_point_ids_batch(db, point_id_list)
checkpoint_dict = {cp.point_id: cp for cp in checkpoint_results}
# 提取所有section_id
section_id_list = list(set(
cp.section_id for cp in checkpoint_results
if cp.section_id and isinstance(cp, object)
))
# 批量查询SectionData
logger.info(f"批量查询SectionDatasection_id数量: {len(section_id_list)}")
section_results = section_db.get_by_section_ids_batch(db, section_id_list)
section_dict = {s.section_id: s for s in section_results}
# 提取所有account_id
account_id_list = list(set(
s.account_id for s in section_results
if s.account_id and isinstance(s, object)
))
# 批量查询AccountData
logger.info(f"批量查询AccountDataaccount_id数量: {len(account_id_list)}")
account_results = account_service.get_accounts_batch(db, account_id_list)
account_dict = {acc.id: acc for acc in account_results}
logger.info("批量查询完成,开始关联数据")
# 6. 关联数据到原记录
print(len(data))
# 5. 关联其他表数据(核心逻辑保留)
for d in data:
# 关联LevelData
level_instance = level_dict.get(d['NYID'])
# 处理 LevelData(假设返回列表,取第一条)
level_results = level_service.get_by_nyid(db, d['NYID'])
level_instance = level_results[0] if isinstance(level_results, list) and level_results else level_results
d['level_data'] = level_instance.to_dict() if level_instance else None
# 关联CheckpointData
checkpoint_instance = checkpoint_dict.get(d['point_id'])
# 处理 CheckpointData(返回单实例,直接使用)
checkpoint_instance = checkpoint_db.get_by_point_id(db, d['point_id'])
d['checkpoint_data'] = checkpoint_instance.to_dict() if checkpoint_instance else None
# 关联SectionData
section_id = d['checkpoint_data']['section_id'] if d.get('checkpoint_data') else None
section_instance = section_dict.get(section_id) if section_id else None
d['section_data'] = section_instance.to_dict() if section_instance else None
# 关联AccountData
account_id = d.get('section_data', {}).get('account_id') if d.get('section_data') else None
account_instance = account_dict.get(account_id) if account_id else None
d['account_data'] = account_instance.__dict__ if account_instance else None
# 7. 构造DailyData数据并批量创建
# 处理 SectionData根据checkpoint_data关联
if d['checkpoint_data']:
section_instance = section_db.get_by_section_id(db, d['checkpoint_data']['section_id'])
d['section_data'] = section_instance.to_dict() if section_instance else None
else:
d['section_data'] = None
# 处理 AccountData
if d.get('section_data') and d['section_data'].get('account_id'):
account_response = account_service.get_account(db, account_id=d['section_data']['account_id'])
d['account_data'] = account_response.__dict__ if account_response else None
else:
d['account_data'] = None
# 6. 构造DailyData数据并批量创建
# daily_create_data1 = set()
daily_create_data = []
nyids = []
for d in data:
# 过滤无效数据(避免缺失关键字段报错)
if all(key in d for key in ['NYID', 'point_id']) and d.get('level_data') and d.get('account_data') and d.get('section_data'):
if d['NYID'] in nyids:
continue
tem = {
'NYID': d['NYID'],
'point_id': d['point_id'],
@@ -338,8 +312,8 @@ def scheduled_get_max_nyid_by_point_id(start: int = 0, end: int = 0):
'section_id': d['section_data']['section_id'],
'remaining': (0-int(d['overdue'])) if 'overdue' in d else d['remaining'],
}
nyids.append(d['NYID'])
daily_create_data.append(tem)
# 批量创建记录
if daily_create_data:
created_records = daily_service.batch_create_by_account_nyid(db, daily_create_data)