Files
railway_cloud/app/api/comprehensive_data.py
2025-09-29 11:58:56 +08:00

167 lines
6.5 KiB
Python

from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from typing import List, Optional
from ..core.database import get_db
from ..schemas.comprehensive_data import (
BatchSectionDataImportRequest,
BatchCheckpointDataImportRequest,
BatchSettlementDataImportRequest,
BatchLevelDataImportRequest,
BatchOriginalDataImportRequest,
DataImportResponse,
DataResponse,
SectionDataQueryRequest,
SettlementDataQueryRequest,
OriginalDataQueryRequest,
LevelDataQueryRequest
)
from ..services.section_data import SectionDataService
from ..services.checkpoint import CheckpointService
from ..services.settlement_data import SettlementDataService
from ..services.level_data import LevelDataService
from ..services.original_data import OriginalDataService
import logging
router = APIRouter(prefix="/comprehensive_data", tags=["综合数据管理"])
logger = logging.getLogger(__name__)
# 实例化服务类
section_service = SectionDataService()
checkpoint_service = CheckpointService()
settlement_service = SettlementDataService()
level_service = LevelDataService()
original_service = OriginalDataService()
@router.post("/batch_import_sections", response_model=DataImportResponse)
def batch_import_sections(request: BatchSectionDataImportRequest, db: Session = Depends(get_db)):
"""批量导入断面数据"""
try:
logger.info(f"Starting batch import sections, count: {len(request.data)}")
# 转换为字典格式
data_list = [item.dict() for item in request.data]
result = section_service.batch_import_sections(db, data_list)
logger.info(f"Batch import sections completed: {result['message']}")
return DataImportResponse(**result)
except Exception as e:
logger.error(f"Batch import sections failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"批量导入断面数据失败: {str(e)}"
)
@router.post("/batch_import_checkpoints", response_model=DataImportResponse)
def batch_import_checkpoints(request: BatchCheckpointDataImportRequest, db: Session = Depends(get_db)):
"""批量导入观测点数据"""
try:
logger.info(f"Starting batch import checkpoints, count: {len(request.data)}")
# 转换为字典格式
data_list = [item.dict() for item in request.data]
result = checkpoint_service.batch_import_checkpoints(db, data_list)
logger.info(f"Batch import checkpoints completed: {result['message']}")
return DataImportResponse(**result)
except Exception as e:
logger.error(f"Batch import checkpoints failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"批量导入观测点数据失败: {str(e)}"
)
@router.post("/batch_import_settlement_data", response_model=DataImportResponse)
def batch_import_settlement_data(request: BatchSettlementDataImportRequest, db: Session = Depends(get_db)):
"""批量导入沉降数据"""
try:
logger.info(f"Starting batch import settlement data, count: {len(request.data)}")
# 转换为字典格式
data_list = [item.dict() for item in request.data]
result = settlement_service.batch_import_settlement_data(db, data_list)
logger.info(f"Batch import settlement data completed: {result['message']}")
return DataImportResponse(**result)
except Exception as e:
logger.error(f"Batch import settlement data failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"批量导入沉降数据失败: {str(e)}"
)
@router.post("/batch_import_level_data", response_model=DataImportResponse)
def batch_import_level_data(request: BatchLevelDataImportRequest, db: Session = Depends(get_db)):
"""批量导入水准数据"""
try:
logger.info(f"Starting batch import level data, count: {len(request.data)}")
# 转换为字典格式
data_list = [item.dict() for item in request.data]
result = level_service.batch_import_level_data(db, data_list)
logger.info(f"Batch import level data completed: {result['message']}")
return DataImportResponse(**result)
except Exception as e:
logger.error(f"Batch import level data failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"批量导入水准数据失败: {str(e)}"
)
@router.post("/batch_import_original_data", response_model=DataImportResponse)
def batch_import_original_data(request: BatchOriginalDataImportRequest, db: Session = Depends(get_db)):
"""批量导入原始数据"""
try:
logger.info(f"Starting batch import original data, count: {len(request.data)}")
# 转换为字典格式
data_list = [item.dict() for item in request.data]
result = original_service.batch_import_original_data(db, data_list)
logger.info(f"Batch import original data completed: {result['message']}")
return DataImportResponse(**result)
except Exception as e:
logger.error(f"Batch import original data failed: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"批量导入原始数据失败: {str(e)}"
)
# 查询断面数据对应观察点数据
@router.post("/get_section", response_model = DataResponse)
def get_section(request: SectionDataQueryRequest, db: Session = Depends(get_db)):
"""获取断面数据 + 观测点"""
section_service.search_section_data(db,
id=request.id,
section_id=request.section_id,
mileage=request.mileage,
work_site=request.work_site
)
# 根据观测点id查询沉降数据
@router.post("/get_section", response_model = DataResponse)
def get_settlenment(request: SettlementDataQueryRequest, db: Session = Depends(get_db)):
"""获取沉降数据"""
# 查询水准数据
@router.post("/get_level", response_model = DataResponse)
def get_level(request: LevelDataQueryRequest, db: Session = Depends(get_db)):
"""查询水准数据"""
# 根据期数id获取原始数据
def get_original(request: OriginalDataQueryRequest, db: Session = Depends(get_db)):
"""获取原始数据 + 水准数据"""