from fastapi import APIRouter, Depends, HTTPException, status,Query from sqlalchemy.orm import Session from typing import List, Optional from ..core.database import get_db from ..core.response_code import ResponseCode, ResponseMessage from ..schemas.comprehensive_data import ( BatchSectionDataImportRequest, BatchCheckpointDataImportRequest, BatchSettlementDataImportRequest, BatchLevelDataImportRequest, BatchOriginalDataImportRequest, BatchOriginalDataImportRequestNew, DataImportResponse, DataResponse, SectionDataQueryRequest, SettlementDataQueryRequest, OriginalDataQueryRequest, SettlementDataCheckpointQueryRequest, LevelDataQueryRequest, LinecodeRequest, NYIDRequest, SectionByAccountRequest, PointByAccountRequest, TodayDataRequest ) from ..services.daily import DailyDataService from ..services.section_data import SectionDataService from ..services.checkpoint import CheckpointService from ..services.settlement_data import SettlementDataService from ..services.level_data import LevelDataService from ..services.original_data import OriginalDataService from ..services.comprehensive import ComprehensiveDataService import logging router = APIRouter(prefix="/comprehensive_data", tags=["综合数据管理"]) logger = logging.getLogger(__name__) # 实例化服务类 section_service = SectionDataService() checkpoint_service = CheckpointService() settlement_service = SettlementDataService() level_service = LevelDataService() original_service = OriginalDataService() comprehensive_service = ComprehensiveDataService() @router.post("/batch_import_sections", response_model=DataImportResponse) def batch_import_sections(request: BatchSectionDataImportRequest, db: Session = Depends(get_db)): """批量导入断面数据""" try: logger.info(f"Starting batch import sections, count: {len(request.data)}") data_list = request.data result = section_service.batch_import_sections(db, data_list) logger.info(f"Batch import sections completed: {result['message']}") # 统一响应格式 return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import sections failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) @router.post("/batch_import_checkpoints", response_model=DataImportResponse) def batch_import_checkpoints(request: BatchCheckpointDataImportRequest, db: Session = Depends(get_db)): """批量导入观测点数据""" try: logger.info(f"Starting batch import checkpoints, count: {len(request.data)}") data_list = request.data result = checkpoint_service.batch_import_checkpoints(db, data_list) logger.info(f"Batch import checkpoints completed: {result['message']}") return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import checkpoints failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) @router.post("/batch_import_settlement_data", response_model=DataImportResponse) def batch_import_settlement_data(request: BatchSettlementDataImportRequest, db: Session = Depends(get_db)): """批量导入沉降数据""" try: logger.info(f"Starting batch import settlement data, count: {len(request.data)}") data_list = request.data result = settlement_service.batch_import_settlement_data(db, data_list) logger.info(f"Batch import settlement data completed: {result['message']}") return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import settlement data failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) @router.post("/batch_import_level_data", response_model=DataImportResponse) def batch_import_level_data(request: BatchLevelDataImportRequest, db: Session = Depends(get_db)): """批量导入水准数据""" try: logger.info(f"Starting batch import level data, count: {len(request.data)}") data_list = request.data result = level_service.batch_import_level_data(db, data_list) return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import level data failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) @router.post("/batch_import_original_data", response_model=DataImportResponse) def batch_import_original_data(request: BatchOriginalDataImportRequest, db: Session = Depends(get_db)): """批量导入原始数据 - 数据中必须包含account_id字段""" try: logger.info(f"Starting batch import original data, count: {len(request.data)}") # 验证数据中是否包含account_id if not request.data or len(request.data) == 0: return DataImportResponse( code=ResponseCode.BAD_REQUEST, message="导入数据不能为空", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) # 检查第一条数据是否包含account_id if 'account_id' not in request.data[0]: return DataImportResponse( code=ResponseCode.BAD_REQUEST, message="数据中必须包含account_id字段", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) data_list = request.data result = original_service.batch_import_original_data(db, data_list) logger.info(f"Batch import original data completed: {result['message']}") return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import original data failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) @router.post("/batch_import_original_data_new", response_model=DataImportResponse) def batch_import_original_data_new(request: BatchOriginalDataImportRequestNew, db: Session = Depends(get_db)): """ 新版批量导入原始数据 - 支持分组格式 传入参数格式:data:[[{},{},{}],[{},{}]] 里层一个[{},{}]称为一组数据,数据{}内容与旧接口一致 一组数据全部记录的NYID与account_id将会一样,不同组可能不同 导入逻辑: 1. 按account_id分表存储,没表就建表 2. 插入前根据NYID判断表中是否有重复数据 3. 有重复就删除表中全部同NYID数据,插入新的,不重复就直接插入 """ try: logger.info(f"Starting batch import original data (new), group count: {len(request.data)}") # 验证分组数据 if not request.data or len(request.data) == 0: return DataImportResponse( code=ResponseCode.BAD_REQUEST, message="导入数据不能为空", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) # 检查第一组数据是否包含account_id if len(request.data) == 0 or len(request.data[0]) == 0: return DataImportResponse( code=ResponseCode.BAD_REQUEST, message="分组数据不能为空", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) first_item = request.data[0][0] if 'account_id' not in first_item: return DataImportResponse( code=ResponseCode.BAD_REQUEST, message="数据中必须包含account_id字段", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) # 调用服务层方法 data_list = request.data result = original_service.batch_import_original_data_new(db, data_list) logger.info(f"Batch import original data (new) completed: {result['message']}") return DataImportResponse( code=ResponseCode.SUCCESS if result.get('success') else ResponseCode.IMPORT_FAILED, message=result['message'], data={ 'total_count': result.get('total_count', 0), 'success_count': result.get('success_count', 0), 'failed_count': result.get('failed_count', 0), 'failed_items': result.get('failed_items', []) } ) except Exception as e: logger.error(f"Batch import original data (new) failed: {str(e)}") return DataImportResponse( code=ResponseCode.IMPORT_FAILED, message=f"{ResponseMessage.IMPORT_FAILED}: {str(e)}", data={'total_count': 0, 'success_count': 0, 'failed_count': 0, 'failed_items': []} ) # 查询断面数据对应观察点数据 @router.post("/get_section", response_model=DataResponse) def get_section(request: SectionDataQueryRequest, db: Session = Depends(get_db)): """获取断面数据 + 观测点(支持分页)""" try: logger.info(f"Querying section data with params: {request.dict()}") result = section_service.search_sections_with_checkpoints( db, id=request.id, section_id=request.section_id, mileage=request.mileage, work_site=request.work_site, number=request.number, status=request.status, account_id=request.account_id, skip=request.skip, limit=request.limit ) logger.info(f"Found {result['total']} sections with checkpoints, returning {len(result['data'])} records") return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=result['total'], data=result['data'] ) except Exception as e: logger.error(f"Query section data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) # 根据观测点id查询沉降数据 @router.post("/get_settlement", response_model=DataResponse) def get_settlement(request: SettlementDataQueryRequest, db: Session = Depends(get_db)): """获取沉降数据,支持根据account_id查询(account_id -> 断面数据 -> 观测点数据 -> 沉降数据)""" try: logger.info(f"Querying settlement data with params: {request.dict()}") # 如果提供了account_id,则按新逻辑查询 if request.account_id: logger.info(f"Using account_id to query: {request.account_id}") # 1. 根据account_id查询断面数据 section_data_list = section_service.get_by_account_id(db, request.account_id) logger.info(f"Found {len(section_data_list)} sections for account_id: {request.account_id}") if not section_data_list: return DataResponse( code=ResponseCode.SUCCESS, message="未找到对应账号ID的断面数据", total=0, data=[] ) # 2. 使用批量查询一次性获取所有观测点数据(避免循环查询) section_ids = [section_data.section_id for section_data in section_data_list] logger.info(f"Querying {len(section_ids)} sections for account_id: {request.account_id}") checkpoint_data_list = checkpoint_service.get_by_section_ids_batch(db, section_ids) logger.info(f"Found {len(checkpoint_data_list)} checkpoints total") # 提取所有观测点ID(去重) point_ids = [] for checkpoint in checkpoint_data_list: if checkpoint.point_id and checkpoint.point_id not in point_ids: point_ids.append(checkpoint.point_id) logger.info(f"Total unique point_ids found: {len(point_ids)}") if not point_ids: return DataResponse( code=ResponseCode.SUCCESS, message="未找到观测点数据", total=0, data=[] ) # 3. 使用优化的批量查询方法(一次性查询,避免多次数据库访问) result = settlement_service.search_settlement_data_by_point_ids_formatted( db, point_ids=point_ids, id=request.id, nyid=request.NYID, sjName=request.sjName, workinfoname=request.workinfoname, skip=request.skip, limit=request.limit ) logger.info(f"Found {result['total']} settlement records using optimized batch query, returning {len(result['data'])} records") else: # 原逻辑:不提供account_id,按原有方式查询 logger.info("Using original query logic without account_id") result = settlement_service.search_settlement_data_formatted( db, id=request.id, point_id=request.point_id, nyid=request.NYID, sjName=request.sjName, workinfoname=request.workinfoname, skip=request.skip, limit=request.limit ) logger.info(f"Found {result['total']} settlement records using original logic, returning {len(result['data'])} records") return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=result['total'], data=result['data'] ) except Exception as e: logger.error(f"Query settlement data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) # 查询沉降数据+观测点数据 @router.post("/get_settlement_checkpoint", response_model=DataResponse) def get_settlement_checkpoint(request: SettlementDataCheckpointQueryRequest, db: Session = Depends(get_db)): """获取沉降数据+观测点数据,按上传时间倒序排序,支持limit参数限制返回数量""" try: logger.info(f"Querying settlement data with params: {request.dict()}") result_data = settlement_service.search_settlement_checkpoint_data_formatted( db, id=request.id, point_id=request.point_id, nyid=request.NYID, sjName=request.sjName, workinfoname=request.workinfoname, linecode=request.linecode, limit=request.limit ) logger.info(f"Found {len(result_data)} settlement records") return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=len(result_data), data=result_data ) except Exception as e: logger.error(f"Query settlement data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) # 根据期数id获取原始数据 @router.post("/get_original", response_model=DataResponse) def get_original(request: OriginalDataQueryRequest, db: Session = Depends(get_db)): """获取水准数据+原始数据 - account_id可选,不填则查询所有分表""" try: logger.info(f"Querying original data with params: {request.dict()}") result = comprehensive_service.get_level_and_original_data( db, account_id=request.account_id, # 可选 id=request.id, bfpcode=request.bfpcode, bffb=request.bffb, nyid=request.NYID, linecode=request.linecode, bfpl=request.bfpl ) return DataResponse( code=ResponseCode.SUCCESS, message=result["message"], total=result["count"], data=result["data"] ) except Exception as e: logger.error(f"Query original data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) @router.post("/get_settlement_by_linecode", response_model=DataResponse) def get_settlement_by_linecode( request: LinecodeRequest, # 假设定义了接收linecode的请求模型 db: Session = Depends(get_db) ): try: linecode = request.linecode # 从请求体中获取linecode logger.info(f"接口请求:根据linecode={linecode}查询沉降数据") settlement_service = SettlementDataService() result = settlement_service.get_settlement_by_linecode(db, linecode) return DataResponse( code=ResponseCode.SUCCESS, message=f"查询成功,共获取{len(result['settlement_data'])}条沉降数据", total=len(result['settlement_data']), data=result['settlement_data'] ) except Exception as e: logger.error(f"查询沉降数据失败:{str(e)}", exc_info=True) return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"查询失败:{str(e)}", total=0, data=[] ) @router.post("/get_settlement_by_nyid", response_model=DataResponse) def get_settlement_by_nyid( request: NYIDRequest, # 假设定义了接收nyid的请求模型 db: Session = Depends(get_db) ): try: nyid = request.NYID # 从请求体中获取nyid logger.info(f"接口请求:根据nyid={nyid}查询沉降数据") settlement = SettlementDataService() # 获取模型实例列表 checkpoint_instances = settlement.get_by_nyid(db, nyid=nyid) # 转为字典列表(核心修正) checkpoint_data = [instance.__dict__ for instance in checkpoint_instances] # 清理 SQLAlchemy 内部属性(可选,避免多余字段) checkpoint_data = [{k: v for k, v in item.items() if not k.startswith('_sa_')} for item in checkpoint_data] return DataResponse( code=ResponseCode.SUCCESS, message=f"查询成功,共获取{len(checkpoint_data)}条沉降数据,nyid={nyid}", total=len(checkpoint_data), data=checkpoint_data ) except Exception as e: logger.error(f"查询沉降数据失败:{str(e)}", exc_info=True) return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"查询失败:{str(e)}", total=0, data=[] ) @router.post("/get_settlement_by_point", response_model=DataResponse) def get_settlement_by_point( request: NYIDRequest, # 假设定义了接收nyid的请求模型 db: Session = Depends(get_db) ): try: nyid = request.NYID # 从请求体中获取nyid point_id = request.point_id # 从请求体中获取point_id logger.info(f"接口请求:根据nyid={nyid}和point_id={point_id}查询沉降数据") settlement = SettlementDataService() # 获取模型实例列表 checkpoint_instances = settlement.get_by_point_id(db, point_id=point_id) # 转为字典列表(核心修正) checkpoint_data = [instance.__dict__ for instance in checkpoint_instances] # 清理 SQLAlchemy 内部属性(可选,避免多余字段) checkpoint_data = [{k: v for k, v in item.items() if not k.startswith('_sa_')} for item in checkpoint_data] return DataResponse( code=ResponseCode.SUCCESS, message=f"查询成功,共获取{len(checkpoint_data)}条沉降数据,nyid={nyid}", total=len(checkpoint_data), data=checkpoint_data ) except Exception as e: logger.error(f"查询沉降数据失败:{str(e)}", exc_info=True) return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"查询失败:{str(e)}", total=0, data=[] ) @router.post("/get_today_data", response_model=DataResponse) def get_today_data(request: TodayDataRequest, db: Session = Depends(get_db)): """接口:通过POST请求触发调度器中的 scheduled_get_max_nyid_by_point_id 定时任务""" try: # 获取请求参数(如果需要从请求体中接收参数,可通过request获取) # 示例:如需接收account_id,可通过 request.account_id 获取 # account_id = request.account_id # 根据根据实际需求决定是否需要 # 触发定时任务(如果需要传入参数,可在这里添加) # from ..utils.scheduler import scheduled_get_max_nyid_by_point_id # scheduled_get_max_nyid_by_point_id() # 调用服务层获取数据 account_id = request.account_id daily_service = DailyDataService() # 如需使用请求参数,可修改为 daily_service.get_daily_data_by_account(db, account_id=account_id) daily_data = daily_service.get_daily_data_by_account(db, account_id=account_id) return DataResponse( code=ResponseCode.SUCCESS, message="定时时任务触发执行成功!任务已开始处理(具体结果查看系统日志)", total=1 if daily_data else 0, # 根据实际数据是否存在调整total data=daily_data ) except Exception as e: logger.error(f"接口触发定时任务失败:{str(e)}", exc_info=True) return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"定时任务触发失败:{str(e)}", total=0, data={} ) @router.post("/refresh_today_data", response_model=DataResponse) def refresh_today_data(request: TodayDataRequest, db: Session = Depends(get_db)): """接口:通过POST请求触发调度器中的 scheduled_get_max_nyid_by_point_id 定时任务""" try: start = request.start if request.start is not None else 0 end = request.end if request.end is not None else 0 from ..utils.scheduler import scheduled_get_max_nyid_by_point_id scheduled_get_max_nyid_by_point_id(start, end) account_id = request.account_id daily_service = DailyDataService() daily_data = daily_service.get_daily_data_by_account(db, account_id=account_id) return DataResponse( code=ResponseCode.SUCCESS, message="定时时任务触发执行成功!任务已开始处理(具体结果查看系统日志)", total=len(daily_data), # 根据实际数据是否存在调整total data=daily_data ) except Exception as e: logger.error(f"接口触发定时任务失败:{str(e)}", exc_info=True) return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"定时任务触发失败:{str(e)}", total=len(daily_data), data={} ) # account_id获取所有断面数据 @router.post("/get_all_section_by_account", response_model=DataResponse) def get_all_section_by_account(request: SectionByAccountRequest, db: Session = Depends(get_db)): """获取断面数据 + 观测点""" try: account_id = request.account_id section_service = SectionDataService() result_data = section_service.get_by_account_id(db, account_id=account_id) data_list = [item.to_dict() for item in result_data] if result_data else [] return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=len(data_list), data=data_list ) except Exception as e: logger.error(f"Query section data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) # section_id 获取所有观测点数据 @router.post("/get_all_checkpoint_by_section", response_model=DataResponse) def get_all_checkpoint_by_section(request: SectionByAccountRequest, db: Session = Depends(get_db)): """获取断面数据 + 观测点""" try: section_id = request.section_id checkpoint_service = CheckpointService() result_data = checkpoint_service.get_by_section_id(db, section_id=section_id) data_list = [item.to_dict() for item in result_data] if result_data else [] return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=len(data_list), data=data_list ) except Exception as e: logger.error(f"Query section data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) @router.post("/get_checkpoint_by_point", response_model=DataResponse) def get_checkpoint_by_point(request: PointByAccountRequest, db: Session = Depends(get_db)): """根据观测点ID获取观测点""" try: point_id = request.point_id checkpoint_service = CheckpointService() result_data = checkpoint_service.get_by_point_id(db, point_id=point_id) # 使用 __dict__ 转换(过滤内部属性) if result_data: # 复制字典并排除 SQLAlchemy 内部属性 data_dict = result_data.__dict__.copy() data_dict.pop('_sa_instance_state', None) # 移除ORM内部状态属性 data_list = [data_dict] else: data_list = [] return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=len(data_list), data=data_list ) except Exception as e: logger.error(f"Query section data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] ) # 根据水准线路获取所有的测点id @router.post("/get_checkpoint_by_linecode", response_model=DataResponse) def get_checkpoint_by_point(request: LevelDataQueryRequest, db: Session = Depends(get_db)): """根据观测点ID获取观测点""" try: linecode = request.linecode level_service = LevelDataService() result_data = level_service.get_last_by_linecode(db, linecode=linecode) NYID = result_data.NYID if result_data else None settlement_service = SettlementDataService() result_data = settlement_service.get_by_nyid(db, nyid=NYID) # 使用 __dict__ 转换(过滤内部属性) if result_data: # 复制字典并排除 SQLAlchemy 内部属性 data_dict = result_data.__dict__.copy() data_dict.pop('_sa_instance_state', None) # 移除ORM内部状态属性 data_list = [data_dict] else: data_list = [] return DataResponse( code=ResponseCode.SUCCESS, message="查询成功", total=len(data_list), data=data_list ) except Exception as e: logger.error(f"Query section data failed: {str(e)}") return DataResponse( code=ResponseCode.QUERY_FAILED, message=f"{ResponseMessage.QUERY_FAILED}: {str(e)}", total=0, data=[] )