Files
railway_cloud/app/services/original_data.py
2025-10-16 10:08:54 +08:00

122 lines
5.0 KiB
Python

from sqlalchemy.orm import Session
from typing import List, Optional, Dict, Any
from ..models.original_data import OriginalData
from .base import BaseService
from ..models.settlement_data import SettlementData
class OriginalDataService(BaseService[OriginalData]):
def __init__(self):
super().__init__(OriginalData)
def get_by_nyid(self, db: Session, nyid: str) -> List[OriginalData]:
"""根据期数ID获取原始数据"""
return self.get_by_field(db, "NYID", nyid)
def get_by_bfpcode(self, db: Session, bfpcode: str) -> List[OriginalData]:
"""根据前(后)视点名称获取原始数据"""
return self.get_by_field(db, "bfpcode", bfpcode)
def search_original_data(self, db: Session,
id: Optional[int] = None,
bfpcode: Optional[str] = None,
bffb: Optional[str] = None,
nyid: Optional[str] = None,
bfpl: Optional[str] = None) -> List[OriginalData]:
"""根据多个条件搜索原始数据"""
conditions = {}
if id is not None:
conditions["id"] = id
if bfpcode is not None:
conditions["bfpcode"] = bfpcode
if bffb is not None:
conditions["bffb"] = bffb
if nyid is not None:
conditions["NYID"] = nyid
if bfpl is not None:
conditions["bfpl"] = bfpl
return self.search_by_conditions(db, conditions)
def _check_settlement_exists(self, db: Session, nyid: str) -> bool:
"""检查期数id沉降数据是否存在"""
settlement = db.query(SettlementData).filter(SettlementData.NYID == nyid).first()
return settlement is not None
def batch_import_original_data(self, db: Session, data: List) -> Dict[str, Any]:
"""
批量导入原始数据,直接新增,无需检查重复
支持事务回滚,失败时重试一次
"""
import logging
logger = logging.getLogger(__name__)
total_count = len(data)
success_count = 0
failed_count = 0
failed_items = []
for attempt in range(2): # 最多重试1次
try:
db.begin()
success_count = 0
failed_count = 0
failed_items = []
for item_data in data:
try:
# 判断期数id是否存在
settlement = self._check_settlement_exists(db, item_data.get('NYID'))
if not settlement:
logger.error(f"Settlement {item_data.get('NYID')} not found")
raise Exception(f"Settlement {item_data.get('NYID')} not found")
# 直接新增操作
original_data = OriginalData(
bfpcode=item_data.get('bfpcode'),
mtime=item_data.get('mtime'),
bffb=item_data.get('bffb'),
bfpl=item_data.get('bfpl'),
bfpvalue=item_data.get('bfpvalue'),
NYID=item_data.get('NYID'),
sort=item_data.get('sort')
)
db.add(original_data)
logger.info(f"Created original data: {item_data.get('bfpcode')}-{item_data.get('NYID')}")
success_count += 1
except Exception as e:
failed_count += 1
failed_items.append({
'data': item_data,
'error': str(e)
})
logger.error(f"Failed to process original data {item_data.get('bfpcode')}-{item_data.get('NYID')}: {str(e)}")
raise e
db.commit()
logger.info(f"Batch import original data completed. Success: {success_count}, Failed: {failed_count}")
break
except Exception as e:
db.rollback()
logger.warning(f"Batch import attempt {attempt + 1} failed: {str(e)}")
if attempt == 1: # 最后一次重试失败
logger.error("Batch import original data failed after retries")
return {
'success': False,
'message': f'批量导入失败: {str(e)}',
'total_count': total_count,
'success_count': 0,
'failed_count': total_count,
'failed_items': failed_items
}
return {
'success': failed_count == 0,
'message': '批量导入完成' if failed_count == 0 else f'部分导入失败',
'total_count': total_count,
'success_count': success_count,
'failed_count': failed_count,
'failed_items': failed_items
}