初始化

This commit is contained in:
lhx
2025-12-12 10:57:31 +08:00
commit f8e85beba1
38 changed files with 2146 additions and 0 deletions

4
app/api/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
from .work_area import router as work_area_router
from .section_data import router as section_data_router
from .checkpoint import router as checkpoint_router
from .measurement_data import router as measurement_data_router

62
app/api/checkpoint.py Normal file
View File

@@ -0,0 +1,62 @@
"""观测点数据接口"""
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from pydantic import BaseModel
from typing import Optional
from app.core.database import get_tunnel_db
from app.core.logging_config import get_logger
from app.schemas.checkpoint import CheckpointBatchImport, CheckpointQuery
from app.schemas.common import BatchImportResponse
from app.servives.checkpoint_service import CheckpointService
router = APIRouter(prefix="/checkpoint", tags=["观测点数据"])
logger = get_logger(__name__)
class CheckpointByDepartmentQuery(BaseModel):
"""根据department_id查询观测点"""
account_id: int
department_id: str
page: int = 1
page_size: int = 20
@router.post("/import", response_model=BatchImportResponse)
async def batch_import(
request: CheckpointBatchImport,
db: Session = Depends(get_tunnel_db)
):
"""批量导入观测点数据"""
logger.info(f"观测点数据导入请求: account_id={request.account_id}, 数据量={len(request.data)}")
return CheckpointService.batch_import(db, request.account_id, request.data)
@router.post("/query")
async def query(
request: CheckpointQuery,
db: Session = Depends(get_tunnel_db)
):
"""查询观测点数据"""
logger.info(f"观测点数据查询请求: {request}")
items, total = CheckpointService.query(db, request)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}
@router.post("/query_by_department")
async def query_by_department(
request: CheckpointByDepartmentQuery,
db: Session = Depends(get_tunnel_db)
):
"""根据department_id查询观测点数据包含断面里程和围岩级别"""
logger.info(f"根据department_id查询观测点数据: {request}")
items, total = CheckpointService.query_by_department(
db, request.account_id, request.department_id,
request.page, request.page_size
)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}

View File

@@ -0,0 +1,66 @@
"""量测数据接口"""
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
from app.core.database import get_tunnel_db
from app.core.logging_config import get_logger
from app.schemas.measurement_data import MeasurementDataBatchImport, MeasurementDataQuery
from app.schemas.common import BatchImportResponse
from app.servives.measurement_data_service import MeasurementDataService
router = APIRouter(prefix="/measurement_data", tags=["量测数据"])
logger = get_logger(__name__)
class MeasurementByDepartmentQuery(BaseModel):
"""根据department_id查询量测数据"""
account_id: int
department_id: str
monitoring_time_start: Optional[datetime] = None
monitoring_time_end: Optional[datetime] = None
page: int = 1
page_size: int = 20
@router.post("/import", response_model=BatchImportResponse)
async def batch_import(
request: MeasurementDataBatchImport,
db: Session = Depends(get_tunnel_db)
):
"""批量导入量测数据"""
logger.info(f"量测数据导入请求: account_id={request.account_id}, 数据量={len(request.data)}")
return MeasurementDataService.batch_import(db, request.account_id, request.data)
@router.post("/query")
async def query(
request: MeasurementDataQuery,
db: Session = Depends(get_tunnel_db)
):
"""查询量测数据"""
logger.info(f"量测数据查询请求: {request}")
items, total = MeasurementDataService.query(db, request)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}
@router.post("/query_by_department")
async def query_by_department(
request: MeasurementByDepartmentQuery,
db: Session = Depends(get_tunnel_db)
):
"""根据department_id查询量测数据包含断面里程、围岩级别和观测点名称"""
logger.info(f"根据department_id查询量测数据: {request}")
items, total = MeasurementDataService.query_by_department(
db, request.account_id, request.department_id,
request.page, request.page_size,
request.monitoring_time_start, request.monitoring_time_end
)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}

56
app/api/section_data.py Normal file
View File

@@ -0,0 +1,56 @@
"""断面数据接口"""
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from app.core.database import get_tunnel_db
from app.core.logging_config import get_logger
from app.schemas.section_data import SectionDataBatchImport, SectionDataQuery
from app.schemas.common import BatchImportResponse
from app.servives.section_data_service import SectionDataService
router = APIRouter(prefix="/section_data", tags=["断面数据"])
logger = get_logger(__name__)
@router.post("/import", response_model=BatchImportResponse)
async def batch_import(
request: SectionDataBatchImport,
db: Session = Depends(get_tunnel_db)
):
"""批量导入断面数据"""
logger.info(f"断面数据导入请求: account_id={request.account_id}, 数据量={len(request.data)}")
return SectionDataService.batch_import(db, request.account_id, request.data)
@router.post("/query")
async def query(
request: SectionDataQuery,
db: Session = Depends(get_tunnel_db)
):
"""查询断面数据"""
logger.info(f"断面数据查询请求: {request}")
items, total = SectionDataService.query(db, request)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}
@router.post("/query_by_department")
async def query_by_department(
request: SectionDataQuery,
db: Session = Depends(get_tunnel_db)
):
"""根据department_id查询断面数据"""
logger.info(f"根据department_id查询断面数据: {request}")
params = SectionDataQuery(
account_id=request.account_id,
department_id=request.department_id,
page=request.page,
page_size=request.page_size
)
items, total = SectionDataService.query(db, params)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}

35
app/api/work_area.py Normal file
View File

@@ -0,0 +1,35 @@
"""工区数据接口"""
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from app.core.database import get_tunnel_db
from app.core.logging_config import get_logger
from app.schemas.work_area import WorkAreaBatchImport, WorkAreaQuery
from app.schemas.common import BatchImportResponse, PageResponse
from app.servives.work_area_service import WorkAreaService
router = APIRouter(prefix="/work_area", tags=["工区数据"])
logger = get_logger(__name__)
@router.post("/import", response_model=BatchImportResponse)
async def batch_import(
request: WorkAreaBatchImport,
db: Session = Depends(get_tunnel_db)
):
"""批量导入工区数据"""
logger.info(f"工区数据导入请求: account_id={request.account_id}, 数据量={len(request.data)}")
return WorkAreaService.batch_import(db, request.account_id, request.data)
@router.post("/query")
async def query(
request: WorkAreaQuery,
db: Session = Depends(get_tunnel_db)
):
"""查询工区数据"""
logger.info(f"工区数据查询请求: {request}")
items, total = WorkAreaService.query(db, request)
return {
"total": total,
"page": request.page,
"page_size": request.page_size,
"items": items
}