初始化
This commit is contained in:
5
app/servives/__init__.py
Normal file
5
app/servives/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .table_manager import TableManager
|
||||
from .work_area_service import WorkAreaService
|
||||
from .section_data_service import SectionDataService
|
||||
from .checkpoint_service import CheckpointService
|
||||
from .measurement_data_service import MeasurementDataService
|
||||
186
app/servives/checkpoint_service.py
Normal file
186
app/servives/checkpoint_service.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""观测点数据服务"""
|
||||
from typing import List, Dict, Tuple
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.checkpoint import Checkpoint
|
||||
from app.models.section_data import SectionData
|
||||
from app.schemas.checkpoint import CheckpointCreate, CheckpointQuery
|
||||
from app.schemas.common import BatchImportResponse
|
||||
from .table_manager import TableManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class CheckpointService:
|
||||
"""观测点数据服务"""
|
||||
|
||||
@staticmethod
|
||||
def batch_import(db: Session, account_id: int, data: List[CheckpointCreate]) -> BatchImportResponse:
|
||||
"""批量导入观测点数据"""
|
||||
table_name = Checkpoint.get_table_name(account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "checkpoint", account_id):
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message="创建表失败"
|
||||
)
|
||||
|
||||
# 获取已存在的point_id
|
||||
point_ids = [item.point_id for item in data if item.point_id]
|
||||
existing_ids = set()
|
||||
if point_ids:
|
||||
placeholders = ",".join([f":id_{i}" for i in range(len(point_ids))])
|
||||
params = {f"id_{i}": pid for i, pid in enumerate(point_ids)}
|
||||
result = db.execute(
|
||||
text(f"SELECT point_id FROM {table_name} WHERE point_id IN ({placeholders})"),
|
||||
params
|
||||
)
|
||||
existing_ids = {row[0] for row in result.fetchall()}
|
||||
|
||||
to_insert = []
|
||||
skipped_ids = []
|
||||
for item in data:
|
||||
if item.point_id in existing_ids:
|
||||
skipped_ids.append(item.point_id)
|
||||
else:
|
||||
to_insert.append(item)
|
||||
existing_ids.add(item.point_id)
|
||||
|
||||
if to_insert:
|
||||
try:
|
||||
values = []
|
||||
params = {}
|
||||
for i, item in enumerate(to_insert):
|
||||
values.append(f"(:name_{i}, :burial_date_{i}, :objstate_{i}, :monitoring_type_{i}, "
|
||||
f":period_number_{i}, :first_time_{i}, :manufacturer_{i}, "
|
||||
f":point_code_{i}, :point_id_{i}, :section_id_{i})")
|
||||
params[f"name_{i}"] = item.name
|
||||
params[f"burial_date_{i}"] = item.burial_date
|
||||
params[f"objstate_{i}"] = item.objstate
|
||||
params[f"monitoring_type_{i}"] = item.monitoring_type
|
||||
params[f"period_number_{i}"] = item.period_number
|
||||
params[f"first_time_{i}"] = item.first_time
|
||||
params[f"manufacturer_{i}"] = item.manufacturer
|
||||
params[f"point_code_{i}"] = item.point_code
|
||||
params[f"point_id_{i}"] = item.point_id
|
||||
params[f"section_id_{i}"] = item.section_id
|
||||
|
||||
sql = f"""INSERT INTO {table_name}
|
||||
(name, burial_date, objstate, monitoring_type, period_number,
|
||||
first_time, manufacturer, point_code, point_id, section_id)
|
||||
VALUES {','.join(values)}"""
|
||||
db.execute(text(sql), params)
|
||||
db.commit()
|
||||
logger.info(f"观测点数据导入成功: account_id={account_id}, 插入={len(to_insert)}, 跳过={len(skipped_ids)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"观测点数据导入失败: {e}")
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message=f"插入失败: {str(e)}"
|
||||
)
|
||||
|
||||
return BatchImportResponse(
|
||||
success=True, total=len(data), inserted=len(to_insert), skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message="导入成功"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def query(db: Session, params: CheckpointQuery) -> Tuple[List[Dict], int]:
|
||||
"""查询观测点数据"""
|
||||
table_name = Checkpoint.get_table_name(params.account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "checkpoint", params.account_id):
|
||||
return [], 0
|
||||
|
||||
conditions = []
|
||||
query_params = {}
|
||||
|
||||
if params.section_id:
|
||||
conditions.append("section_id = :section_id")
|
||||
query_params["section_id"] = params.section_id
|
||||
if params.point_id:
|
||||
conditions.append("point_id = :point_id")
|
||||
query_params["point_id"] = params.point_id
|
||||
if params.name:
|
||||
conditions.append("name LIKE :name")
|
||||
query_params["name"] = f"%{params.name}%"
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
count_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {where_clause}"
|
||||
total = db.execute(text(count_sql), query_params).scalar()
|
||||
|
||||
offset = (params.page - 1) * params.page_size
|
||||
query_params["limit"] = params.page_size
|
||||
query_params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {table_name} WHERE {where_clause} LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), query_params)
|
||||
items = [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
return items, total
|
||||
|
||||
@staticmethod
|
||||
def query_by_department(db: Session, account_id: int, department_id: str,
|
||||
page: int = 1, page_size: int = 20) -> Tuple[List[Dict], int]:
|
||||
"""根据department_id查询观测点数据(包含断面信息)"""
|
||||
checkpoint_table = Checkpoint.get_table_name(account_id)
|
||||
section_table = SectionData.get_table_name(account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "checkpoint", account_id):
|
||||
return [], 0
|
||||
if not TableManager.ensure_table_exists(db, "section_data", account_id):
|
||||
return [], 0
|
||||
|
||||
# 先查询该department下的所有section_id
|
||||
section_sql = f"SELECT section_id, mileage, rock_mass_classification FROM {section_table} WHERE department_id = :department_id"
|
||||
section_result = db.execute(text(section_sql), {"department_id": department_id})
|
||||
section_map = {row[0]: {"mileage": row[1], "rock_mass_classification": row[2]} for row in section_result.fetchall()}
|
||||
|
||||
if not section_map:
|
||||
return [], 0
|
||||
|
||||
section_ids = list(section_map.keys())
|
||||
placeholders = ",".join([f":sid_{i}" for i in range(len(section_ids))])
|
||||
params = {f"sid_{i}": sid for i, sid in enumerate(section_ids)}
|
||||
|
||||
# 查询总数
|
||||
count_sql = f"SELECT COUNT(*) FROM {checkpoint_table} WHERE section_id IN ({placeholders})"
|
||||
total = db.execute(text(count_sql), params).scalar()
|
||||
|
||||
# 分页查询
|
||||
offset = (page - 1) * page_size
|
||||
params["limit"] = page_size
|
||||
params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {checkpoint_table} WHERE section_id IN ({placeholders}) LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), params)
|
||||
|
||||
items = []
|
||||
for row in result.fetchall():
|
||||
item = dict(row._mapping)
|
||||
section_info = section_map.get(item.get("section_id"), {})
|
||||
item["section_mileage"] = section_info.get("mileage")
|
||||
item["rock_mass_classification"] = section_info.get("rock_mass_classification")
|
||||
items.append(item)
|
||||
|
||||
return items, total
|
||||
|
||||
@staticmethod
|
||||
def get_by_point_ids(db: Session, account_id: int, point_ids: List[str]) -> Dict[str, Dict]:
|
||||
"""根据point_id批量获取观测点数据"""
|
||||
if not point_ids:
|
||||
return {}
|
||||
|
||||
table_name = Checkpoint.get_table_name(account_id)
|
||||
if not TableManager.ensure_table_exists(db, "checkpoint", account_id):
|
||||
return {}
|
||||
|
||||
placeholders = ",".join([f":id_{i}" for i in range(len(point_ids))])
|
||||
params = {f"id_{i}": pid for i, pid in enumerate(point_ids)}
|
||||
|
||||
sql = f"SELECT point_id, name, section_id FROM {table_name} WHERE point_id IN ({placeholders})"
|
||||
result = db.execute(text(sql), params)
|
||||
|
||||
return {row[0]: {"name": row[1], "section_id": row[2]} for row in result.fetchall()}
|
||||
176
app/servives/measurement_data_service.py
Normal file
176
app/servives/measurement_data_service.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""量测数据服务"""
|
||||
from typing import List, Dict, Tuple
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.measurement_data import MeasurementData
|
||||
from app.models.checkpoint import Checkpoint
|
||||
from app.models.section_data import SectionData
|
||||
from app.schemas.measurement_data import MeasurementDataCreate, MeasurementDataQuery
|
||||
from app.schemas.common import BatchImportResponse
|
||||
from .table_manager import TableManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class MeasurementDataService:
|
||||
"""量测数据服务"""
|
||||
|
||||
@staticmethod
|
||||
def batch_import(db: Session, account_id: int, data: List[MeasurementDataCreate]) -> BatchImportResponse:
|
||||
"""批量导入量测数据"""
|
||||
table_name = MeasurementData.get_table_name(account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "measurement_data", account_id):
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message="创建表失败"
|
||||
)
|
||||
|
||||
# 量测数据不检查重复,直接插入
|
||||
if data:
|
||||
try:
|
||||
values = []
|
||||
params = {}
|
||||
for i, item in enumerate(data):
|
||||
values.append(f"(:monitoring_time_{i}, :upload_time_{i}, :monitoring_value_{i}, "
|
||||
f":deformation_value_{i}, :time_interval_{i}, :cumulative_deformation_{i}, "
|
||||
f":deformation_rate_{i}, :distance_working_face_{i}, :point_id_{i})")
|
||||
params[f"monitoring_time_{i}"] = item.monitoring_time
|
||||
params[f"upload_time_{i}"] = item.upload_time
|
||||
params[f"monitoring_value_{i}"] = item.monitoring_value
|
||||
params[f"deformation_value_{i}"] = item.deformation_value
|
||||
params[f"time_interval_{i}"] = item.time_interval
|
||||
params[f"cumulative_deformation_{i}"] = item.cumulative_deformation
|
||||
params[f"deformation_rate_{i}"] = item.deformation_rate
|
||||
params[f"distance_working_face_{i}"] = item.distance_working_face
|
||||
params[f"point_id_{i}"] = item.point_id
|
||||
|
||||
sql = f"""INSERT INTO {table_name}
|
||||
(monitoring_time, upload_time, monitoring_value, deformation_value,
|
||||
time_interval, cumulative_deformation, deformation_rate, distance_working_face, point_id)
|
||||
VALUES {','.join(values)}"""
|
||||
db.execute(text(sql), params)
|
||||
db.commit()
|
||||
logger.info(f"量测数据导入成功: account_id={account_id}, 插入={len(data)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"量测数据导入失败: {e}")
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message=f"插入失败: {str(e)}"
|
||||
)
|
||||
|
||||
return BatchImportResponse(
|
||||
success=True, total=len(data), inserted=len(data), skipped=0,
|
||||
message="导入成功"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def query(db: Session, params: MeasurementDataQuery) -> Tuple[List[Dict], int]:
|
||||
"""查询量测数据"""
|
||||
table_name = MeasurementData.get_table_name(params.account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "measurement_data", params.account_id):
|
||||
return [], 0
|
||||
|
||||
conditions = []
|
||||
query_params = {}
|
||||
|
||||
if params.point_id:
|
||||
conditions.append("point_id = :point_id")
|
||||
query_params["point_id"] = params.point_id
|
||||
if params.monitoring_time_start:
|
||||
conditions.append("monitoring_time >= :monitoring_time_start")
|
||||
query_params["monitoring_time_start"] = params.monitoring_time_start
|
||||
if params.monitoring_time_end:
|
||||
conditions.append("monitoring_time <= :monitoring_time_end")
|
||||
query_params["monitoring_time_end"] = params.monitoring_time_end
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
count_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {where_clause}"
|
||||
total = db.execute(text(count_sql), query_params).scalar()
|
||||
|
||||
offset = (params.page - 1) * params.page_size
|
||||
query_params["limit"] = params.page_size
|
||||
query_params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {table_name} WHERE {where_clause} ORDER BY monitoring_time DESC LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), query_params)
|
||||
items = [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
return items, total
|
||||
|
||||
@staticmethod
|
||||
def query_by_department(db: Session, account_id: int, department_id: str,
|
||||
page: int = 1, page_size: int = 20,
|
||||
monitoring_time_start=None, monitoring_time_end=None) -> Tuple[List[Dict], int]:
|
||||
"""根据department_id查询量测数据(包含断面和观测点信息)"""
|
||||
measurement_table = MeasurementData.get_table_name(account_id)
|
||||
checkpoint_table = Checkpoint.get_table_name(account_id)
|
||||
section_table = SectionData.get_table_name(account_id)
|
||||
|
||||
for table_type in ["measurement_data", "checkpoint", "section_data"]:
|
||||
if not TableManager.ensure_table_exists(db, table_type, account_id):
|
||||
return [], 0
|
||||
|
||||
# 获取department下的section信息
|
||||
section_sql = f"SELECT section_id, mileage, rock_mass_classification FROM {section_table} WHERE department_id = :department_id"
|
||||
section_result = db.execute(text(section_sql), {"department_id": department_id})
|
||||
section_map = {row[0]: {"mileage": row[1], "rock_mass_classification": row[2]} for row in section_result.fetchall()}
|
||||
|
||||
if not section_map:
|
||||
return [], 0
|
||||
|
||||
section_ids = list(section_map.keys())
|
||||
section_placeholders = ",".join([f":sid_{i}" for i in range(len(section_ids))])
|
||||
params = {f"sid_{i}": sid for i, sid in enumerate(section_ids)}
|
||||
|
||||
# 获取这些section下的checkpoint信息
|
||||
checkpoint_sql = f"SELECT point_id, name, section_id FROM {checkpoint_table} WHERE section_id IN ({section_placeholders})"
|
||||
checkpoint_result = db.execute(text(checkpoint_sql), params)
|
||||
checkpoint_map = {row[0]: {"name": row[1], "section_id": row[2]} for row in checkpoint_result.fetchall()}
|
||||
|
||||
if not checkpoint_map:
|
||||
return [], 0
|
||||
|
||||
point_ids = list(checkpoint_map.keys())
|
||||
point_placeholders = ",".join([f":pid_{i}" for i in range(len(point_ids))])
|
||||
params = {f"pid_{i}": pid for i, pid in enumerate(point_ids)}
|
||||
|
||||
# 构建时间条件
|
||||
time_conditions = []
|
||||
if monitoring_time_start:
|
||||
time_conditions.append("monitoring_time >= :time_start")
|
||||
params["time_start"] = monitoring_time_start
|
||||
if monitoring_time_end:
|
||||
time_conditions.append("monitoring_time <= :time_end")
|
||||
params["time_end"] = monitoring_time_end
|
||||
|
||||
time_clause = " AND " + " AND ".join(time_conditions) if time_conditions else ""
|
||||
|
||||
# 查询总数
|
||||
count_sql = f"SELECT COUNT(*) FROM {measurement_table} WHERE point_id IN ({point_placeholders}){time_clause}"
|
||||
total = db.execute(text(count_sql), params).scalar()
|
||||
|
||||
# 分页查询
|
||||
offset = (page - 1) * page_size
|
||||
params["limit"] = page_size
|
||||
params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {measurement_table} WHERE point_id IN ({point_placeholders}){time_clause} ORDER BY monitoring_time DESC LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), params)
|
||||
|
||||
items = []
|
||||
for row in result.fetchall():
|
||||
item = dict(row._mapping)
|
||||
checkpoint_info = checkpoint_map.get(item.get("point_id"), {})
|
||||
item["point_name"] = checkpoint_info.get("name")
|
||||
|
||||
section_id = checkpoint_info.get("section_id")
|
||||
section_info = section_map.get(section_id, {})
|
||||
item["section_mileage"] = section_info.get("mileage")
|
||||
item["rock_mass_classification"] = section_info.get("rock_mass_classification")
|
||||
items.append(item)
|
||||
|
||||
return items, total
|
||||
144
app/servives/section_data_service.py
Normal file
144
app/servives/section_data_service.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""断面数据服务"""
|
||||
from typing import List, Dict, Tuple
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.section_data import SectionData
|
||||
from app.schemas.section_data import SectionDataCreate, SectionDataQuery
|
||||
from app.schemas.common import BatchImportResponse
|
||||
from .table_manager import TableManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class SectionDataService:
|
||||
"""断面数据服务"""
|
||||
|
||||
@staticmethod
|
||||
def batch_import(db: Session, account_id: int, data: List[SectionDataCreate]) -> BatchImportResponse:
|
||||
"""批量导入断面数据"""
|
||||
table_name = SectionData.get_table_name(account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "section_data", account_id):
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message="创建表失败"
|
||||
)
|
||||
|
||||
# 获取已存在的section_id
|
||||
section_ids = [item.section_id for item in data if item.section_id]
|
||||
existing_ids = set()
|
||||
if section_ids:
|
||||
placeholders = ",".join([f":id_{i}" for i in range(len(section_ids))])
|
||||
params = {f"id_{i}": sid for i, sid in enumerate(section_ids)}
|
||||
result = db.execute(
|
||||
text(f"SELECT section_id FROM {table_name} WHERE section_id IN ({placeholders})"),
|
||||
params
|
||||
)
|
||||
existing_ids = {row[0] for row in result.fetchall()}
|
||||
|
||||
to_insert = []
|
||||
skipped_ids = []
|
||||
for item in data:
|
||||
if item.section_id in existing_ids:
|
||||
skipped_ids.append(item.section_id)
|
||||
else:
|
||||
to_insert.append(item)
|
||||
existing_ids.add(item.section_id)
|
||||
|
||||
if to_insert:
|
||||
try:
|
||||
values = []
|
||||
params = {}
|
||||
for i, item in enumerate(to_insert):
|
||||
values.append(f"(:project_{i}, :mileage_{i}, :name_{i}, :number_{i}, :status_{i}, "
|
||||
f":excavation_method_{i}, :rock_mass_classification_{i}, :width_{i}, "
|
||||
f":U0_{i}, :remarks_{i}, :department_id_{i}, :section_id_{i})")
|
||||
params[f"project_{i}"] = item.project
|
||||
params[f"mileage_{i}"] = item.mileage
|
||||
params[f"name_{i}"] = item.name
|
||||
params[f"number_{i}"] = item.number
|
||||
params[f"status_{i}"] = item.status
|
||||
params[f"excavation_method_{i}"] = item.excavation_method
|
||||
params[f"rock_mass_classification_{i}"] = item.rock_mass_classification
|
||||
params[f"width_{i}"] = item.width
|
||||
params[f"U0_{i}"] = float(item.U0) if item.U0 else None
|
||||
params[f"remarks_{i}"] = item.remarks
|
||||
params[f"department_id_{i}"] = item.department_id
|
||||
params[f"section_id_{i}"] = item.section_id
|
||||
|
||||
sql = f"""INSERT INTO {table_name}
|
||||
(project, mileage, name, number, status, excavation_method,
|
||||
rock_mass_classification, width, U0, remarks, department_id, section_id)
|
||||
VALUES {','.join(values)}"""
|
||||
db.execute(text(sql), params)
|
||||
db.commit()
|
||||
logger.info(f"断面数据导入成功: account_id={account_id}, 插入={len(to_insert)}, 跳过={len(skipped_ids)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"断面数据导入失败: {e}")
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message=f"插入失败: {str(e)}"
|
||||
)
|
||||
|
||||
return BatchImportResponse(
|
||||
success=True, total=len(data), inserted=len(to_insert), skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message="导入成功"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def query(db: Session, params: SectionDataQuery) -> Tuple[List[Dict], int]:
|
||||
"""查询断面数据"""
|
||||
table_name = SectionData.get_table_name(params.account_id)
|
||||
|
||||
if not TableManager.ensure_table_exists(db, "section_data", params.account_id):
|
||||
return [], 0
|
||||
|
||||
conditions = []
|
||||
query_params = {}
|
||||
|
||||
if params.department_id:
|
||||
conditions.append("department_id = :department_id")
|
||||
query_params["department_id"] = params.department_id
|
||||
if params.section_id:
|
||||
conditions.append("section_id = :section_id")
|
||||
query_params["section_id"] = params.section_id
|
||||
if params.name:
|
||||
conditions.append("name LIKE :name")
|
||||
query_params["name"] = f"%{params.name}%"
|
||||
if params.number:
|
||||
conditions.append("number = :number")
|
||||
query_params["number"] = params.number
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
count_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {where_clause}"
|
||||
total = db.execute(text(count_sql), query_params).scalar()
|
||||
|
||||
offset = (params.page - 1) * params.page_size
|
||||
query_params["limit"] = params.page_size
|
||||
query_params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {table_name} WHERE {where_clause} LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), query_params)
|
||||
items = [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
return items, total
|
||||
|
||||
@staticmethod
|
||||
def get_by_section_ids(db: Session, account_id: int, section_ids: List[str]) -> Dict[str, Dict]:
|
||||
"""根据section_id批量获取断面数据"""
|
||||
if not section_ids:
|
||||
return {}
|
||||
|
||||
table_name = SectionData.get_table_name(account_id)
|
||||
if not TableManager.ensure_table_exists(db, "section_data", account_id):
|
||||
return {}
|
||||
|
||||
placeholders = ",".join([f":id_{i}" for i in range(len(section_ids))])
|
||||
params = {f"id_{i}": sid for i, sid in enumerate(section_ids)}
|
||||
|
||||
sql = f"SELECT section_id, mileage, rock_mass_classification FROM {table_name} WHERE section_id IN ({placeholders})"
|
||||
result = db.execute(text(sql), params)
|
||||
|
||||
return {row[0]: {"mileage": row[1], "rock_mass_classification": row[2]} for row in result.fetchall()}
|
||||
87
app/servives/table_manager.py
Normal file
87
app/servives/table_manager.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""
|
||||
动态表管理器
|
||||
处理分表创建,避免事务冲突
|
||||
"""
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.work_area import WorkArea
|
||||
from app.models.section_data import SectionData
|
||||
from app.models.checkpoint import Checkpoint
|
||||
from app.models.measurement_data import MeasurementData
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class TableManager:
|
||||
"""动态表管理器"""
|
||||
|
||||
# 缓存已创建的表
|
||||
_created_tables = set()
|
||||
|
||||
@classmethod
|
||||
def ensure_table_exists(cls, db: Session, table_type: str, account_id: int) -> bool:
|
||||
"""
|
||||
确保表存在,如果不存在则创建
|
||||
使用独立连接创建表,避免与业务事务冲突
|
||||
"""
|
||||
table_name = cls._get_table_name(table_type, account_id)
|
||||
cache_key = f"{table_type}_{account_id}"
|
||||
|
||||
# 检查缓存
|
||||
if cache_key in cls._created_tables:
|
||||
return True
|
||||
|
||||
try:
|
||||
# 检查表是否存在
|
||||
result = db.execute(text(f"SHOW TABLES LIKE '{table_name}'"))
|
||||
if result.fetchone():
|
||||
cls._created_tables.add(cache_key)
|
||||
return True
|
||||
|
||||
# 获取建表SQL
|
||||
create_sql = cls._get_create_sql(table_type, account_id)
|
||||
if not create_sql:
|
||||
logger.error(f"未知的表类型: {table_type}")
|
||||
return False
|
||||
|
||||
# 使用独立连接创建表(避免事务冲突)
|
||||
connection = db.get_bind().connect()
|
||||
try:
|
||||
connection.execute(text(create_sql))
|
||||
connection.commit()
|
||||
cls._created_tables.add(cache_key)
|
||||
logger.info(f"动态创建表成功: {table_name}")
|
||||
return True
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"创建表失败 {table_name}: {e}")
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _get_table_name(cls, table_type: str, account_id: int) -> str:
|
||||
"""获取表名"""
|
||||
table_map = {
|
||||
"work_area": WorkArea.get_table_name,
|
||||
"section_data": SectionData.get_table_name,
|
||||
"checkpoint": Checkpoint.get_table_name,
|
||||
"measurement_data": MeasurementData.get_table_name,
|
||||
}
|
||||
return table_map.get(table_type, lambda x: "")(account_id)
|
||||
|
||||
@classmethod
|
||||
def _get_create_sql(cls, table_type: str, account_id: int) -> str:
|
||||
"""获取建表SQL"""
|
||||
sql_map = {
|
||||
"work_area": WorkArea.get_create_sql,
|
||||
"section_data": SectionData.get_create_sql,
|
||||
"checkpoint": Checkpoint.get_create_sql,
|
||||
"measurement_data": MeasurementData.get_create_sql,
|
||||
}
|
||||
return sql_map.get(table_type, lambda x: "")(account_id)
|
||||
|
||||
@classmethod
|
||||
def clear_cache(cls):
|
||||
"""清除表缓存"""
|
||||
cls._created_tables.clear()
|
||||
120
app/servives/work_area_service.py
Normal file
120
app/servives/work_area_service.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""工区数据服务"""
|
||||
from typing import List, Dict, Any, Tuple
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
from app.core.logging_config import get_logger
|
||||
from app.models.work_area import WorkArea
|
||||
from app.schemas.work_area import WorkAreaCreate, WorkAreaQuery
|
||||
from app.schemas.common import BatchImportResponse
|
||||
from .table_manager import TableManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
class WorkAreaService:
|
||||
"""工区数据服务"""
|
||||
|
||||
@staticmethod
|
||||
def batch_import(db: Session, account_id: int, data: List[WorkAreaCreate]) -> BatchImportResponse:
|
||||
"""批量导入工区数据"""
|
||||
table_name = WorkArea.get_table_name(account_id)
|
||||
|
||||
# 确保表存在
|
||||
if not TableManager.ensure_table_exists(db, "work_area", account_id):
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=0,
|
||||
message="创建表失败"
|
||||
)
|
||||
|
||||
# 获取已存在的department_id
|
||||
department_ids = [item.department_id for item in data if item.department_id]
|
||||
existing_ids = set()
|
||||
if department_ids:
|
||||
placeholders = ",".join([f":id_{i}" for i in range(len(department_ids))])
|
||||
params = {f"id_{i}": did for i, did in enumerate(department_ids)}
|
||||
result = db.execute(
|
||||
text(f"SELECT department_id FROM {table_name} WHERE department_id IN ({placeholders})"),
|
||||
params
|
||||
)
|
||||
existing_ids = {row[0] for row in result.fetchall()}
|
||||
|
||||
# 过滤重复数据
|
||||
to_insert = []
|
||||
skipped_ids = []
|
||||
for item in data:
|
||||
if item.department_id in existing_ids:
|
||||
skipped_ids.append(item.department_id)
|
||||
else:
|
||||
to_insert.append(item)
|
||||
existing_ids.add(item.department_id) # 防止批次内重复
|
||||
|
||||
# 批量插入
|
||||
if to_insert:
|
||||
try:
|
||||
values = []
|
||||
params = {}
|
||||
for i, item in enumerate(to_insert):
|
||||
values.append(f"(:department_id_{i}, :parent_id_{i}, :type_{i}, :name_{i})")
|
||||
params[f"department_id_{i}"] = item.department_id
|
||||
params[f"parent_id_{i}"] = item.parent_id
|
||||
params[f"type_{i}"] = item.type
|
||||
params[f"name_{i}"] = item.name
|
||||
|
||||
sql = f"INSERT INTO {table_name} (department_id, parent_id, type, name) VALUES {','.join(values)}"
|
||||
db.execute(text(sql), params)
|
||||
db.commit()
|
||||
logger.info(f"工区数据导入成功: account_id={account_id}, 插入={len(to_insert)}, 跳过={len(skipped_ids)}")
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"工区数据导入失败: {e}")
|
||||
return BatchImportResponse(
|
||||
success=False, total=len(data), inserted=0, skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message=f"插入失败: {str(e)}"
|
||||
)
|
||||
|
||||
return BatchImportResponse(
|
||||
success=True, total=len(data), inserted=len(to_insert), skipped=len(skipped_ids),
|
||||
skipped_ids=skipped_ids, message="导入成功"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def query(db: Session, params: WorkAreaQuery) -> Tuple[List[Dict], int]:
|
||||
"""查询工区数据"""
|
||||
table_name = WorkArea.get_table_name(params.account_id)
|
||||
|
||||
# 确保表存在
|
||||
if not TableManager.ensure_table_exists(db, "work_area", params.account_id):
|
||||
return [], 0
|
||||
|
||||
# 构建查询条件
|
||||
conditions = []
|
||||
query_params = {}
|
||||
|
||||
if params.department_id:
|
||||
conditions.append("department_id = :department_id")
|
||||
query_params["department_id"] = params.department_id
|
||||
if params.parent_id:
|
||||
conditions.append("parent_id = :parent_id")
|
||||
query_params["parent_id"] = params.parent_id
|
||||
if params.type:
|
||||
conditions.append("type = :type")
|
||||
query_params["type"] = params.type
|
||||
if params.name:
|
||||
conditions.append("name LIKE :name")
|
||||
query_params["name"] = f"%{params.name}%"
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
# 查询总数
|
||||
count_sql = f"SELECT COUNT(*) FROM {table_name} WHERE {where_clause}"
|
||||
total = db.execute(text(count_sql), query_params).scalar()
|
||||
|
||||
# 分页查询
|
||||
offset = (params.page - 1) * params.page_size
|
||||
query_params["limit"] = params.page_size
|
||||
query_params["offset"] = offset
|
||||
|
||||
data_sql = f"SELECT * FROM {table_name} WHERE {where_clause} LIMIT :limit OFFSET :offset"
|
||||
result = db.execute(text(data_sql), query_params)
|
||||
items = [dict(row._mapping) for row in result.fetchall()]
|
||||
|
||||
return items, total
|
||||
Reference in New Issue
Block a user