509 lines
17 KiB
Python
509 lines
17 KiB
Python
"""
|
|
TS Pini (Total Station) data loader - Refactored version with async support.
|
|
|
|
This script processes Total Station survey data from multiple instrument types
|
|
(Leica, Trimble S7, S9) and manages complex monitoring with multi-level alarms.
|
|
|
|
**STATUS**: Essential refactoring - Base structure with coordinate transformations.
|
|
**TODO**: Complete alarm management, threshold checking, and additional monitoring.
|
|
|
|
Replaces the legacy TS_PiniScript.py (2,587 lines) with a modular, maintainable architecture.
|
|
"""
|
|
|
|
import asyncio
|
|
import logging
|
|
import sys
|
|
from datetime import datetime
|
|
from enum import IntEnum
|
|
from pathlib import Path
|
|
|
|
import utm
|
|
from pyproj import Transformer
|
|
|
|
from refactory_scripts.config import DatabaseConfig
|
|
from refactory_scripts.utils import execute_query, get_db_connection
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class StationType(IntEnum):
|
|
"""Total Station instrument types."""
|
|
|
|
LEICA = 1
|
|
TRIMBLE_S7 = 4
|
|
TRIMBLE_S9 = 7
|
|
TRIMBLE_S7_INVERTED = 10 # x-y coordinates inverted
|
|
|
|
|
|
class CoordinateSystem(IntEnum):
|
|
"""Coordinate system types for transformations."""
|
|
|
|
CH1903 = 6 # Swiss coordinate system (old)
|
|
UTM = 7 # Universal Transverse Mercator
|
|
CH1903_PLUS = 10 # Swiss coordinate system LV95 (new)
|
|
LAT_LON = 0 # Default: already in lat/lon
|
|
|
|
|
|
class TSPiniLoader:
|
|
"""
|
|
Loads Total Station Pini survey data with coordinate transformations and alarm management.
|
|
|
|
This loader handles:
|
|
- Multiple station types (Leica, Trimble S7/S9)
|
|
- Coordinate system transformations (CH1903, UTM, lat/lon)
|
|
- Target point (mira) management
|
|
- Multi-level alarm system (TODO: complete implementation)
|
|
- Additional monitoring for railways, walls, trusses (TODO)
|
|
"""
|
|
|
|
# Folder name mappings for special cases
|
|
FOLDER_MAPPINGS = {
|
|
"[276_208_TS0003]": "TS0003",
|
|
"[Neuchatel_CDP]": "TS7",
|
|
"[TS0006_EP28]": "TS0006_EP28",
|
|
"[TS0007_ChesaArcoiris]": "TS0007_ChesaArcoiris",
|
|
"[TS0006_EP28_3]": "TS0006_EP28_3",
|
|
"[TS0006_EP28_4]": "TS0006_EP28_4",
|
|
"[TS0006_EP28_5]": "TS0006_EP28_5",
|
|
"[TS18800]": "TS18800",
|
|
"[Granges_19 100]": "Granges_19 100",
|
|
"[Granges_19 200]": "Granges_19 200",
|
|
"[Chesa_Arcoiris_2]": "Chesa_Arcoiris_2",
|
|
"[TS0006_EP28_1]": "TS0006_EP28_1",
|
|
"[TS_PS_Petites_Croisettes]": "TS_PS_Petites_Croisettes",
|
|
"[_Chesa_Arcoiris_1]": "_Chesa_Arcoiris_1",
|
|
"[TS_test]": "TS_test",
|
|
"[TS-VIME]": "TS-VIME",
|
|
}
|
|
|
|
def __init__(self, db_config: DatabaseConfig):
|
|
"""
|
|
Initialize the TS Pini loader.
|
|
|
|
Args:
|
|
db_config: Database configuration object
|
|
"""
|
|
self.db_config = db_config
|
|
self.conn = None
|
|
|
|
async def __aenter__(self):
|
|
"""Async context manager entry."""
|
|
self.conn = await get_db_connection(self.db_config.as_dict())
|
|
return self
|
|
|
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
"""Async context manager exit."""
|
|
if self.conn:
|
|
self.conn.close()
|
|
|
|
def _extract_folder_name(self, file_path: Path) -> str:
|
|
"""
|
|
Extract and normalize folder name from file path.
|
|
|
|
Handles special folder name mappings for specific projects.
|
|
|
|
Args:
|
|
file_path: Path to the CSV file
|
|
|
|
Returns:
|
|
Normalized folder name
|
|
"""
|
|
# Get folder name from path
|
|
folder_name = file_path.parent.name
|
|
|
|
# Check for special mappings in filename
|
|
filename = file_path.name
|
|
for pattern, mapped_name in self.FOLDER_MAPPINGS.items():
|
|
if pattern in filename:
|
|
logger.debug(f"Mapped folder: {pattern} -> {mapped_name}")
|
|
return mapped_name
|
|
|
|
return folder_name
|
|
|
|
async def _get_project_info(self, folder_name: str) -> dict | None:
|
|
"""
|
|
Get project information from database based on folder name.
|
|
|
|
Args:
|
|
folder_name: Folder/station name
|
|
|
|
Returns:
|
|
Dictionary with project info or None if not found
|
|
"""
|
|
query = """
|
|
SELECT
|
|
l.id as lavoro_id,
|
|
s.id as site_id,
|
|
st.type_id,
|
|
s.upgeo_sist_coordinate,
|
|
s.upgeo_utmzone,
|
|
s.upgeo_utmhemisphere
|
|
FROM upgeo_st as st
|
|
LEFT JOIN upgeo_lavori as l ON st.lavoro_id = l.id
|
|
LEFT JOIN sites as s ON s.id = l.site_id
|
|
WHERE st.name = %s
|
|
"""
|
|
|
|
result = await execute_query(self.conn, query, (folder_name,), fetch_one=True)
|
|
|
|
if not result:
|
|
logger.error(f"Project not found for folder: {folder_name}")
|
|
return None
|
|
|
|
return {
|
|
"lavoro_id": result["lavoro_id"],
|
|
"site_id": result["site_id"],
|
|
"station_type": result["type_id"],
|
|
"coordinate_system": int(result["upgeo_sist_coordinate"]),
|
|
"utm_zone": result["upgeo_utmzone"],
|
|
"utm_hemisphere": result["upgeo_utmhemisphere"] != "S", # True for North
|
|
}
|
|
|
|
def _parse_csv_row(self, row: list[str], station_type: int) -> tuple[str, str, str, str, str]:
|
|
"""
|
|
Parse CSV row based on station type.
|
|
|
|
Different station types have different column orders.
|
|
|
|
Args:
|
|
row: List of CSV values
|
|
station_type: Station type identifier
|
|
|
|
Returns:
|
|
Tuple of (mira_name, easting, northing, height, timestamp)
|
|
"""
|
|
if station_type == StationType.LEICA:
|
|
# Leica format: name, easting, northing, height, timestamp
|
|
mira_name = row[0]
|
|
easting = row[1]
|
|
northing = row[2]
|
|
height = row[3]
|
|
# Convert timestamp: DD.MM.YYYY HH:MM:SS.fff -> YYYY-MM-DD HH:MM:SS
|
|
timestamp = datetime.strptime(row[4], "%d.%m.%Y %H:%M:%S.%f").strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
elif station_type in (StationType.TRIMBLE_S7, StationType.TRIMBLE_S9):
|
|
# Trimble S7/S9 format: timestamp, name, northing, easting, height
|
|
timestamp = row[0]
|
|
mira_name = row[1]
|
|
northing = row[2]
|
|
easting = row[3]
|
|
height = row[4]
|
|
|
|
elif station_type == StationType.TRIMBLE_S7_INVERTED:
|
|
# Trimble S7 inverted: timestamp, name, easting(row[2]), northing(row[3]), height
|
|
timestamp = row[0]
|
|
mira_name = row[1]
|
|
northing = row[3] # Inverted!
|
|
easting = row[2] # Inverted!
|
|
height = row[4]
|
|
|
|
else:
|
|
raise ValueError(f"Unknown station type: {station_type}")
|
|
|
|
return mira_name, easting, northing, height, timestamp
|
|
|
|
def _transform_coordinates(
|
|
self, easting: float, northing: float, coord_system: int, utm_zone: str = None, utm_hemisphere: bool = True
|
|
) -> tuple[float, float]:
|
|
"""
|
|
Transform coordinates to lat/lon based on coordinate system.
|
|
|
|
Args:
|
|
easting: Easting coordinate
|
|
northing: Northing coordinate
|
|
coord_system: Coordinate system type
|
|
utm_zone: UTM zone (required for UTM system)
|
|
utm_hemisphere: True for Northern, False for Southern
|
|
|
|
Returns:
|
|
Tuple of (latitude, longitude)
|
|
"""
|
|
if coord_system == CoordinateSystem.CH1903:
|
|
# Old Swiss coordinate system transformation
|
|
y = easting
|
|
x = northing
|
|
y_ = (y - 2600000) / 1000000
|
|
x_ = (x - 1200000) / 1000000
|
|
|
|
lambda_ = 2.6779094 + 4.728982 * y_ + 0.791484 * y_ * x_ + 0.1306 * y_ * x_**2 - 0.0436 * y_**3
|
|
phi_ = 16.9023892 + 3.238272 * x_ - 0.270978 * y_**2 - 0.002528 * x_**2 - 0.0447 * y_**2 * x_ - 0.0140 * x_**3
|
|
|
|
lat = phi_ * 100 / 36
|
|
lon = lambda_ * 100 / 36
|
|
|
|
elif coord_system == CoordinateSystem.UTM:
|
|
# UTM to lat/lon
|
|
if not utm_zone:
|
|
raise ValueError("UTM zone required for UTM coordinate system")
|
|
|
|
result = utm.to_latlon(easting, northing, utm_zone, northern=utm_hemisphere)
|
|
lat = result[0]
|
|
lon = result[1]
|
|
|
|
elif coord_system == CoordinateSystem.CH1903_PLUS:
|
|
# New Swiss coordinate system (LV95) using EPSG:21781 -> EPSG:4326
|
|
transformer = Transformer.from_crs("EPSG:21781", "EPSG:4326")
|
|
lat, lon = transformer.transform(easting, northing)
|
|
|
|
else:
|
|
# Already in lat/lon
|
|
lon = easting
|
|
lat = northing
|
|
|
|
logger.debug(f"Transformed coordinates: ({easting}, {northing}) -> ({lat:.6f}, {lon:.6f})")
|
|
return lat, lon
|
|
|
|
async def _get_or_create_mira(self, mira_name: str, lavoro_id: int) -> int | None:
|
|
"""
|
|
Get existing mira (target point) ID or create new one if allowed.
|
|
|
|
Args:
|
|
mira_name: Name of the target point
|
|
lavoro_id: Project ID
|
|
|
|
Returns:
|
|
Mira ID or None if creation not allowed
|
|
"""
|
|
# Check if mira exists
|
|
query = """
|
|
SELECT m.id as mira_id, m.name
|
|
FROM upgeo_mire as m
|
|
JOIN upgeo_lavori as l ON m.lavoro_id = l.id
|
|
WHERE m.name = %s AND m.lavoro_id = %s
|
|
"""
|
|
|
|
result = await execute_query(self.conn, query, (mira_name, lavoro_id), fetch_one=True)
|
|
|
|
if result:
|
|
return result["mira_id"]
|
|
|
|
# Mira doesn't exist - check if we can create it
|
|
logger.info(f"Mira '{mira_name}' not found, attempting to create...")
|
|
|
|
# TODO: Implement mira creation logic
|
|
# This requires checking company limits and updating counters
|
|
# For now, return None to skip
|
|
logger.warning("Mira creation not yet implemented in refactored version")
|
|
return None
|
|
|
|
async def _insert_survey_data(
|
|
self,
|
|
mira_id: int,
|
|
timestamp: str,
|
|
northing: float,
|
|
easting: float,
|
|
height: float,
|
|
lat: float,
|
|
lon: float,
|
|
coord_system: int,
|
|
) -> bool:
|
|
"""
|
|
Insert survey data into ELABDATAUPGEO table.
|
|
|
|
Args:
|
|
mira_id: Target point ID
|
|
timestamp: Survey timestamp
|
|
northing: Northing coordinate
|
|
easting: Easting coordinate
|
|
height: Elevation
|
|
lat: Latitude
|
|
lon: Longitude
|
|
coord_system: Coordinate system type
|
|
|
|
Returns:
|
|
True if insert was successful
|
|
"""
|
|
query = """
|
|
INSERT IGNORE INTO ELABDATAUPGEO
|
|
(mira_id, EventTimestamp, north, east, elevation, lat, lon, sist_coordinate)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
|
"""
|
|
|
|
params = (mira_id, timestamp, northing, easting, height, lat, lon, coord_system)
|
|
|
|
try:
|
|
await execute_query(self.conn, query, params)
|
|
logger.debug(f"Inserted survey data for mira_id {mira_id} at {timestamp}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to insert survey data: {e}")
|
|
return False
|
|
|
|
async def _process_thresholds_and_alarms(self, lavoro_id: int, processed_miras: list[int]) -> None:
|
|
"""
|
|
Process thresholds and create alarms for monitored points.
|
|
|
|
**TODO**: This is a stub for the complex alarm system.
|
|
The complete implementation requires:
|
|
- Multi-level threshold checking (3 levels: attention, intervention, immediate)
|
|
- 5 dimensions: N, E, H, R2D, R3D
|
|
- Email and SMS notifications
|
|
- Time-series analysis
|
|
- Railway/wall/truss specific monitoring
|
|
|
|
Args:
|
|
lavoro_id: Project ID
|
|
processed_miras: List of mira IDs that were processed
|
|
"""
|
|
logger.warning("Threshold and alarm processing is not yet implemented")
|
|
logger.info(f"Would process alarms for {len(processed_miras)} miras in lavoro {lavoro_id}")
|
|
|
|
# TODO: Implement alarm system
|
|
# 1. Load threshold configurations from upgeo_lavori and upgeo_mire tables
|
|
# 2. Query latest survey data for each mira
|
|
# 3. Calculate displacements (N, E, H, R2D, R3D)
|
|
# 4. Check against 3-level thresholds
|
|
# 5. Create alarms if thresholds exceeded
|
|
# 6. Handle additional monitoring (railways, walls, trusses)
|
|
|
|
async def process_file(self, file_path: str | Path) -> bool:
|
|
"""
|
|
Process a Total Station CSV file and load data into the database.
|
|
|
|
**Current Implementation**: Core data loading with coordinate transformations.
|
|
**TODO**: Complete alarm and additional monitoring implementation.
|
|
|
|
Args:
|
|
file_path: Path to the CSV file to process
|
|
|
|
Returns:
|
|
True if processing was successful, False otherwise
|
|
"""
|
|
file_path = Path(file_path)
|
|
|
|
if not file_path.exists():
|
|
logger.error(f"File not found: {file_path}")
|
|
return False
|
|
|
|
try:
|
|
logger.info(f"Processing Total Station file: {file_path.name}")
|
|
|
|
# Extract folder name
|
|
folder_name = self._extract_folder_name(file_path)
|
|
logger.info(f"Station/Project: {folder_name}")
|
|
|
|
# Get project information
|
|
project_info = await self._get_project_info(folder_name)
|
|
if not project_info:
|
|
return False
|
|
|
|
station_type = project_info["station_type"]
|
|
coord_system = project_info["coordinate_system"]
|
|
lavoro_id = project_info["lavoro_id"]
|
|
|
|
logger.info(f"Station type: {station_type}, Coordinate system: {coord_system}")
|
|
|
|
# Read and parse CSV file
|
|
with open(file_path, encoding="utf-8") as f:
|
|
lines = [line.rstrip() for line in f.readlines()]
|
|
|
|
# Skip header
|
|
if lines:
|
|
lines = lines[1:]
|
|
|
|
processed_count = 0
|
|
processed_miras = []
|
|
|
|
# Process each survey point
|
|
for line in lines:
|
|
if not line:
|
|
continue
|
|
|
|
row = line.split(",")
|
|
|
|
try:
|
|
# Parse row based on station type
|
|
mira_name, easting, northing, height, timestamp = self._parse_csv_row(row, station_type)
|
|
|
|
# Transform coordinates to lat/lon
|
|
lat, lon = self._transform_coordinates(
|
|
float(easting),
|
|
float(northing),
|
|
coord_system,
|
|
project_info.get("utm_zone"),
|
|
project_info.get("utm_hemisphere"),
|
|
)
|
|
|
|
# Get or create mira
|
|
mira_id = await self._get_or_create_mira(mira_name, lavoro_id)
|
|
|
|
if not mira_id:
|
|
logger.warning(f"Skipping mira '{mira_name}' - not found and creation not allowed")
|
|
continue
|
|
|
|
# Insert survey data
|
|
success = await self._insert_survey_data(
|
|
mira_id, timestamp, float(northing), float(easting), float(height), lat, lon, coord_system
|
|
)
|
|
|
|
if success:
|
|
processed_count += 1
|
|
if mira_id not in processed_miras:
|
|
processed_miras.append(mira_id)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to process row: {e}")
|
|
logger.debug(f"Row data: {row}")
|
|
continue
|
|
|
|
logger.info(f"Processed {processed_count} survey points for {len(processed_miras)} miras")
|
|
|
|
# Process thresholds and alarms (TODO: complete implementation)
|
|
if processed_miras:
|
|
await self._process_thresholds_and_alarms(lavoro_id, processed_miras)
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to process file {file_path}: {e}", exc_info=True)
|
|
return False
|
|
|
|
|
|
async def main(file_path: str):
|
|
"""
|
|
Main entry point for the TS Pini loader.
|
|
|
|
Args:
|
|
file_path: Path to the CSV file to process
|
|
"""
|
|
# Setup logging
|
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
|
|
logger.info("TS Pini Loader started")
|
|
logger.info(f"Processing file: {file_path}")
|
|
logger.warning("NOTE: Alarm system not yet fully implemented in this refactored version")
|
|
|
|
try:
|
|
# Load configuration
|
|
db_config = DatabaseConfig()
|
|
|
|
# Process file
|
|
async with TSPiniLoader(db_config) as loader:
|
|
success = await loader.process_file(file_path)
|
|
|
|
if success:
|
|
logger.info("Processing completed successfully")
|
|
return 0
|
|
else:
|
|
logger.error("Processing failed")
|
|
return 1
|
|
|
|
except Exception as e:
|
|
logger.error(f"Unexpected error: {e}", exc_info=True)
|
|
return 1
|
|
|
|
finally:
|
|
logger.info("TS Pini Loader finished")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
if len(sys.argv) < 2:
|
|
print("Usage: python ts_pini_loader.py <path_to_csv_file>")
|
|
print("\nNOTE: This is an essential refactoring of the legacy TS_PiniScript.py")
|
|
print(" Core functionality (data loading, coordinates) is implemented.")
|
|
print(" Alarm system and additional monitoring require completion.")
|
|
sys.exit(1)
|
|
|
|
exit_code = asyncio.run(main(sys.argv[1]))
|
|
sys.exit(exit_code)
|