153 lines
6.1 KiB
Python
153 lines
6.1 KiB
Python
from fastapi import APIRouter, Depends, HTTPException, status
|
|
from sqlalchemy.orm import Session
|
|
from typing import List, Optional
|
|
from ..core.database import get_db
|
|
from ..schemas.comprehensive_data import (
|
|
BatchSectionDataImportRequest,
|
|
BatchCheckpointDataImportRequest,
|
|
BatchSettlementDataImportRequest,
|
|
BatchLevelDataImportRequest,
|
|
BatchOriginalDataImportRequest,
|
|
DataImportResponse,
|
|
ComprehensiveDataImportRequest,
|
|
ComprehensiveDataImportResponse
|
|
)
|
|
from ..services.section_data import SectionDataService
|
|
from ..services.checkpoint import CheckpointService
|
|
from ..services.settlement_data import SettlementDataService
|
|
from ..services.level_data import LevelDataService
|
|
from ..services.original_data import OriginalDataService
|
|
import logging
|
|
|
|
router = APIRouter(prefix="/comprehensive_data", tags=["综合数据管理"])
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# 实例化服务类
|
|
section_service = SectionDataService()
|
|
checkpoint_service = CheckpointService()
|
|
settlement_service = SettlementDataService()
|
|
level_service = LevelDataService()
|
|
original_service = OriginalDataService()
|
|
|
|
@router.post("/batch_import_sections", response_model=DataImportResponse)
|
|
def batch_import_sections(request: BatchSectionDataImportRequest, db: Session = Depends(get_db)):
|
|
"""批量导入断面数据"""
|
|
try:
|
|
logger.info(f"Starting batch import sections, count: {len(request.data)}")
|
|
|
|
# 转换为字典格式
|
|
data_list = [item.dict() for item in request.data]
|
|
|
|
result = section_service.batch_import_sections(db, data_list)
|
|
|
|
logger.info(f"Batch import sections completed: {result['message']}")
|
|
return DataImportResponse(**result)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Batch import sections failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"批量导入断面数据失败: {str(e)}"
|
|
)
|
|
|
|
@router.post("/batch_import_checkpoints", response_model=DataImportResponse)
|
|
def batch_import_checkpoints(request: BatchCheckpointDataImportRequest, db: Session = Depends(get_db)):
|
|
"""批量导入观测点数据"""
|
|
try:
|
|
logger.info(f"Starting batch import checkpoints, count: {len(request.data)}")
|
|
|
|
# 转换为字典格式
|
|
data_list = [item.dict() for item in request.data]
|
|
|
|
result = checkpoint_service.batch_import_checkpoints(db, data_list)
|
|
|
|
logger.info(f"Batch import checkpoints completed: {result['message']}")
|
|
return DataImportResponse(**result)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Batch import checkpoints failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"批量导入观测点数据失败: {str(e)}"
|
|
)
|
|
|
|
@router.post("/batch_import_settlement_data", response_model=DataImportResponse)
|
|
def batch_import_settlement_data(request: BatchSettlementDataImportRequest, db: Session = Depends(get_db)):
|
|
"""批量导入沉降数据"""
|
|
try:
|
|
logger.info(f"Starting batch import settlement data, count: {len(request.data)}")
|
|
|
|
# 转换为字典格式
|
|
data_list = [item.dict() for item in request.data]
|
|
|
|
result = settlement_service.batch_import_settlement_data(db, data_list)
|
|
|
|
logger.info(f"Batch import settlement data completed: {result['message']}")
|
|
return DataImportResponse(**result)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Batch import settlement data failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"批量导入沉降数据失败: {str(e)}"
|
|
)
|
|
|
|
@router.post("/batch_import_level_data", response_model=DataImportResponse)
|
|
def batch_import_level_data(request: BatchLevelDataImportRequest, db: Session = Depends(get_db)):
|
|
"""批量导入水准数据"""
|
|
try:
|
|
logger.info(f"Starting batch import level data, count: {len(request.data)}")
|
|
|
|
# 转换为字典格式
|
|
data_list = [item.dict() for item in request.data]
|
|
|
|
result = level_service.batch_import_level_data(db, data_list)
|
|
|
|
logger.info(f"Batch import level data completed: {result['message']}")
|
|
return DataImportResponse(**result)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Batch import level data failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"批量导入水准数据失败: {str(e)}"
|
|
)
|
|
|
|
@router.post("/batch_import_original_data", response_model=DataImportResponse)
|
|
def batch_import_original_data(request: BatchOriginalDataImportRequest, db: Session = Depends(get_db)):
|
|
"""批量导入原始数据"""
|
|
try:
|
|
logger.info(f"Starting batch import original data, count: {len(request.data)}")
|
|
|
|
# 转换为字典格式
|
|
data_list = [item.dict() for item in request.data]
|
|
|
|
result = original_service.batch_import_original_data(db, data_list)
|
|
|
|
logger.info(f"Batch import original data completed: {result['message']}")
|
|
return DataImportResponse(**result)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Batch import original data failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"批量导入原始数据失败: {str(e)}"
|
|
)
|
|
|
|
# 保留原有接口以保持兼容性
|
|
@router.post("/data_settlement_import", response_model=ComprehensiveDataImportResponse)
|
|
def data_import(request: ComprehensiveDataImportRequest, db: Session = Depends(get_db)):
|
|
"""导入综合数据 (兼容旧接口)"""
|
|
try:
|
|
logger.info("Using legacy data import interface")
|
|
# 这里可以根据需要实现旧接口的兼容逻辑
|
|
return ComprehensiveDataImportResponse(
|
|
success=True,
|
|
message="请使用新的批量导入接口"
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Legacy data import failed: {str(e)}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"数据导入失败: {str(e)}"
|
|
) |