初始化
This commit is contained in:
176
app/servives/measurement_data_service.py
Normal file
176
app/servives/measurement_data_service.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""量测数据服务"""
|
||||
from typing import List, Dict, Tuple
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.measurement_data import MeasurementData
|
||||
from app.models.checkpoint import Checkpoint
|
||||
from app.models.section_data import SectionData
|
||||
from app.schemas.measurement_data import MeasurementDataCreate, MeasurementDataQuery
|
||||
from app.schemas.common import BatchImportResponse
|
||||
from .table_manager import TableManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class MeasurementDataService:
|
||||
"""量测数据服务"""
|
||||
|
||||
@staticmethod
|
||||
def batch_import(db: Session, account_id: int, data: List[MeasurementDataCreate]) -> BatchImportResponse:
|
||||
"""批量导入量测数据"""
|
||||
table_name = MeasurementData.get_table_name(account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "measurement_data", account_id):
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message="创建表失败"
|
||||
)
|
||||
|
||||
# 量测数据不检查重复,直接插入
|
||||
if data:
|
||||
try:
|
||||
values = []
|
||||
params = {}
|
||||
for i, item in enumerate(data):
|
||||
values.append(f"(:monitoring_time_{i}, :upload_time_{i}, :monitoring_value_{i}, "
|
||||
f":deformation_value_{i}, :time_interval_{i}, :cumulative_deformation_{i}, "
|
||||
f":deformation_rate_{i}, :distance_working_face_{i}, :point_id_{i})")
|
||||
params[f"monitoring_time_{i}"] = item.monitoring_time
|
||||
params[f"upload_time_{i}"] = item.upload_time
|
||||
params[f"monitoring_value_{i}"] = item.monitoring_value
|
||||
params[f"deformation_value_{i}"] = item.deformation_value
|
||||
params[f"time_interval_{i}"] = item.time_interval
|
||||
params[f"cumulative_deformation_{i}"] = item.cumulative_deformation
|
||||
params[f"deformation_rate_{i}"] = item.deformation_rate
|
||||
params[f"distance_working_face_{i}"] = item.distance_working_face
|
||||
params[f"point_id_{i}"] = item.point_id
|
||||
|
||||
sql = f"""INSERT INTO {table_name}
|
||||
(monitoring_time, upload_time, monitoring_value, deformation_value,
|
||||
time_interval, cumulative_deformation, deformation_rate, distance_working_face, point_id)
|
||||
VALUES {','.join(values)}"""
|
||||
db.execute(text(sql), params)
|
||||
db.commit()
|
||||
logger.info(f"量测数据导入成功: account_id={account_id}, 插入={len(data)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"量测数据导入失败: {e}")
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message=f"插入失败: {str(e)}"
|
||||
)
|
||||
|
||||
return BatchImportResponse(
|
||||
success=True, total=len(data), inserted=len(data), skipped=0,
|
||||
message="导入成功"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def query(db: Session, params: MeasurementDataQuery) -> Tuple[List[Dict], int]:
|
||||
"""查询量测数据"""
|
||||
table_name = MeasurementData.get_table_name(params.account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "measurement_data", params.account_id):
|
||||
return [], 0
|
||||
|
||||
conditions = []
|
||||
query_params = {}
|
||||
|
||||
if params.point_id:
|
||||
conditions.append("point_id = :point_id")
|
||||
query_params["point_id"] = params.point_id
|
||||
if params.monitoring_time_start:
|
||||
conditions.append("monitoring_time >= :monitoring_time_start")
|
||||
query_params["monitoring_time_start"] = params.monitoring_time_start
|
||||
if params.monitoring_time_end:
|
||||
conditions.append("monitoring_time <= :monitoring_time_end")
|
||||
query_params["monitoring_time_end"] = params.monitoring_time_end
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
count_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {where_clause}"
|
||||
total = db.execute(text(count_sql), query_params).scalar()
|
||||
|
||||
offset = (params.page - 1) * params.page_size
|
||||
query_params["limit"] = params.page_size
|
||||
query_params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {table_name} WHERE {where_clause} ORDER BY monitoring_time DESC LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), query_params)
|
||||
items = [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
return items, total
|
||||
|
||||
@staticmethod
|
||||
def query_by_department(db: Session, account_id: int, department_id: str,
|
||||
page: int = 1, page_size: int = 20,
|
||||
monitoring_time_start=None, monitoring_time_end=None) -> Tuple[List[Dict], int]:
|
||||
"""根据department_id查询量测数据(包含断面和观测点信息)"""
|
||||
measurement_table = MeasurementData.get_table_name(account_id)
|
||||
checkpoint_table = Checkpoint.get_table_name(account_id)
|
||||
section_table = SectionData.get_table_name(account_id)
|
||||
|
||||
for table_type in ["measurement_data", "checkpoint", "section_data"]:
|
||||
if not TableManager.ensure_table_exists(db, table_type, account_id):
|
||||
return [], 0
|
||||
|
||||
# 获取department下的section信息
|
||||
section_sql = f"SELECT section_id, mileage, rock_mass_classification FROM {section_table} WHERE department_id = :department_id"
|
||||
section_result = db.execute(text(section_sql), {"department_id": department_id})
|
||||
section_map = {row[0]: {"mileage": row[1], "rock_mass_classification": row[2]} for row in section_result.fetchall()}
|
||||
|
||||
if not section_map:
|
||||
return [], 0
|
||||
|
||||
section_ids = list(section_map.keys())
|
||||
section_placeholders = ",".join([f":sid_{i}" for i in range(len(section_ids))])
|
||||
params = {f"sid_{i}": sid for i, sid in enumerate(section_ids)}
|
||||
|
||||
# 获取这些section下的checkpoint信息
|
||||
checkpoint_sql = f"SELECT point_id, name, section_id FROM {checkpoint_table} WHERE section_id IN ({section_placeholders})"
|
||||
checkpoint_result = db.execute(text(checkpoint_sql), params)
|
||||
checkpoint_map = {row[0]: {"name": row[1], "section_id": row[2]} for row in checkpoint_result.fetchall()}
|
||||
|
||||
if not checkpoint_map:
|
||||
return [], 0
|
||||
|
||||
point_ids = list(checkpoint_map.keys())
|
||||
point_placeholders = ",".join([f":pid_{i}" for i in range(len(point_ids))])
|
||||
params = {f"pid_{i}": pid for i, pid in enumerate(point_ids)}
|
||||
|
||||
# 构建时间条件
|
||||
time_conditions = []
|
||||
if monitoring_time_start:
|
||||
time_conditions.append("monitoring_time >= :time_start")
|
||||
params["time_start"] = monitoring_time_start
|
||||
if monitoring_time_end:
|
||||
time_conditions.append("monitoring_time <= :time_end")
|
||||
params["time_end"] = monitoring_time_end
|
||||
|
||||
time_clause = " AND " + " AND ".join(time_conditions) if time_conditions else ""
|
||||
|
||||
# 查询总数
|
||||
count_sql = f"SELECT COUNT(*) FROM {measurement_table} WHERE point_id IN ({point_placeholders}){time_clause}"
|
||||
total = db.execute(text(count_sql), params).scalar()
|
||||
|
||||
# 分页查询
|
||||
offset = (page - 1) * page_size
|
||||
params["limit"] = page_size
|
||||
params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {measurement_table} WHERE point_id IN ({point_placeholders}){time_clause} ORDER BY monitoring_time DESC LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), params)
|
||||
|
||||
items = []
|
||||
for row in result.fetchall():
|
||||
item = dict(row._mapping)
|
||||
checkpoint_info = checkpoint_map.get(item.get("point_id"), {})
|
||||
item["point_name"] = checkpoint_info.get("name")
|
||||
|
||||
section_id = checkpoint_info.get("section_id")
|
||||
section_info = section_map.get(section_id, {})
|
||||
item["section_mileage"] = section_info.get("mileage")
|
||||
item["rock_mass_classification"] = section_info.get("rock_mass_classification")
|
||||
items.append(item)
|
||||
|
||||
return items, total
|
||||
Reference in New Issue
Block a user