This commit includes: 1. Database Configuration Migration: - Migrated from DB.txt (Java JDBC) to .env (python-dotenv) - Added .env.example template with clear variable names - Updated database.py to use environment variables - Added python-dotenv>=1.0.0 to dependencies - Updated .gitignore to exclude sensitive files 2. Validation System (1,294 lines): - comparator.py: Statistical comparison with RMSE, correlation, tolerances - db_extractor.py: Database queries for all sensor types - validator.py: High-level validation orchestration - cli.py: Command-line interface for validation - README.md: Comprehensive validation documentation 3. Validation Features: - Compare Python vs MATLAB outputs from database - Support for all sensor types (RSN, Tilt, ATD) - Statistical metrics: max abs/rel diff, RMSE, correlation - Configurable tolerances (abs, rel, max) - Detailed validation reports - CLI and programmatic APIs 4. Examples and Documentation: - validate_example.sh: Bash script example - validate_example.py: Python programmatic example - Updated main README with validation section - Added validation workflow and troubleshooting guide Benefits: - ✅ No Java driver needed (native Python connectors) - ✅ Secure .env configuration (excluded from git) - ✅ Comprehensive validation against MATLAB - ✅ Statistical confidence in migration accuracy - ✅ Automated validation reports 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
731 lines
28 KiB
Python
731 lines
28 KiB
Python
"""
|
|
ATD sensor data elaboration module.
|
|
|
|
Calculates displacements and positions using star calculation for chain networks.
|
|
"""
|
|
|
|
import numpy as np
|
|
import os
|
|
from typing import Tuple, Optional
|
|
from datetime import datetime
|
|
|
|
|
|
def elaborate_radial_link_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, acceleration: np.ndarray,
|
|
magnetic_field: np.ndarray,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, ...]:
|
|
"""
|
|
Elaborate RL data to calculate 3D positions and displacements.
|
|
|
|
Uses star calculation to determine node positions from acceleration
|
|
and magnetic field measurements.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of RL sensors
|
|
acceleration: (n_timestamps, n_sensors*3) smoothed acceleration
|
|
magnetic_field: (n_timestamps, n_sensors*3) smoothed magnetic field
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (X_global, Y_global, Z_global, X_local, Y_local, Z_local,
|
|
X_diff, Y_diff, Z_diff, err_flag)
|
|
"""
|
|
n_timestamps = acceleration.shape[0]
|
|
|
|
# Initialize output arrays
|
|
X_global = np.zeros((n_timestamps, n_sensors))
|
|
Y_global = np.zeros((n_timestamps, n_sensors))
|
|
Z_global = np.zeros((n_timestamps, n_sensors))
|
|
|
|
X_local = np.zeros((n_timestamps, n_sensors))
|
|
Y_local = np.zeros((n_timestamps, n_sensors))
|
|
Z_local = np.zeros((n_timestamps, n_sensors))
|
|
|
|
X_diff = np.zeros((n_timestamps, n_sensors))
|
|
Y_diff = np.zeros((n_timestamps, n_sensors))
|
|
Z_diff = np.zeros((n_timestamps, n_sensors))
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Load star calculation parameters
|
|
star_params = load_star_parameters(control_unit_id, chain)
|
|
|
|
if star_params is None:
|
|
# No star parameters, use simplified calculation
|
|
for t in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
# Extract 3D acceleration for this sensor
|
|
ax = acceleration[t, sensor_idx*3]
|
|
ay = acceleration[t, sensor_idx*3+1]
|
|
az = acceleration[t, sensor_idx*3+2]
|
|
|
|
# Extract 3D magnetic field
|
|
mx = magnetic_field[t, sensor_idx*3]
|
|
my = magnetic_field[t, sensor_idx*3+1]
|
|
mz = magnetic_field[t, sensor_idx*3+2]
|
|
|
|
# Simple position estimation (placeholder)
|
|
X_global[t, sensor_idx] = ax * 100.0 # Convert to mm
|
|
Y_global[t, sensor_idx] = ay * 100.0
|
|
Z_global[t, sensor_idx] = az * 100.0
|
|
|
|
X_local[t, sensor_idx] = X_global[t, sensor_idx]
|
|
Y_local[t, sensor_idx] = Y_global[t, sensor_idx]
|
|
Z_local[t, sensor_idx] = Z_global[t, sensor_idx]
|
|
else:
|
|
# Use star calculation
|
|
X_global, Y_global, Z_global = calculate_star_positions(
|
|
acceleration, magnetic_field, star_params, n_sensors
|
|
)
|
|
|
|
# Local coordinates same as global for RL
|
|
X_local = X_global.copy()
|
|
Y_local = Y_global.copy()
|
|
Z_local = Z_global.copy()
|
|
|
|
# Calculate differentials from reference
|
|
ref_file_x = f"RifX_{control_unit_id}_{chain}.csv"
|
|
ref_file_y = f"RifY_{control_unit_id}_{chain}.csv"
|
|
ref_file_z = f"RifZ_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file_x):
|
|
ref_x = np.loadtxt(ref_file_x, delimiter=',')
|
|
X_diff = X_global - ref_x
|
|
else:
|
|
X_diff = X_global.copy()
|
|
|
|
if os.path.exists(ref_file_y):
|
|
ref_y = np.loadtxt(ref_file_y, delimiter=',')
|
|
Y_diff = Y_global - ref_y
|
|
else:
|
|
Y_diff = Y_global.copy()
|
|
|
|
if os.path.exists(ref_file_z):
|
|
ref_z = np.loadtxt(ref_file_z, delimiter=',')
|
|
Z_diff = Z_global - ref_z
|
|
else:
|
|
Z_diff = Z_global.copy()
|
|
|
|
return X_global, Y_global, Z_global, X_local, Y_local, Z_local, X_diff, Y_diff, Z_diff, err_flag
|
|
|
|
|
|
def elaborate_load_link_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, force_data: np.ndarray,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
"""
|
|
Elaborate LL data to calculate force and differential from reference.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of LL sensors
|
|
force_data: (n_timestamps, n_sensors) smoothed force
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (force, force_diff, err_flag)
|
|
"""
|
|
n_timestamps = force_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Calculate differential from reference
|
|
ref_file = f"RifForce_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file):
|
|
ref_force = np.loadtxt(ref_file, delimiter=',')
|
|
force_diff = force_data - ref_force
|
|
else:
|
|
force_diff = force_data.copy()
|
|
|
|
return force_data, force_diff, err_flag
|
|
|
|
|
|
def load_star_parameters(control_unit_id: str, chain: str) -> Optional[dict]:
|
|
"""
|
|
Load star calculation parameters from Excel file.
|
|
|
|
Star parameters define how to calculate node positions in a chain network.
|
|
File format: {control_unit_id}-{chain}.xlsx with sheets:
|
|
- Sheet 1: Verso (direction: 1=clockwise, -1=counterclockwise, 0=both)
|
|
- Sheet 2: Segmenti (segments between nodes)
|
|
- Sheet 3: Peso (weights for averaging)
|
|
- Sheet 4: PosIniEnd (initial/final positions)
|
|
- Sheet 5: Punti_Noti (known points)
|
|
- Sheet 6: Antiorario (counterclockwise calculation)
|
|
|
|
Args:
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
|
|
Returns:
|
|
Dictionary with star parameters or None if file not found
|
|
"""
|
|
try:
|
|
import pandas as pd
|
|
|
|
filename = f"{control_unit_id}-{chain}.xlsx"
|
|
|
|
if not os.path.exists(filename):
|
|
return None
|
|
|
|
# Read all sheets
|
|
verso = pd.read_excel(filename, sheet_name=0, header=None).values
|
|
segmenti = pd.read_excel(filename, sheet_name=1, header=None).values
|
|
peso = pd.read_excel(filename, sheet_name=2, header=None).values
|
|
pos_ini_end = pd.read_excel(filename, sheet_name=3, header=None).values
|
|
punti_noti = pd.read_excel(filename, sheet_name=4, header=None).values
|
|
antiorario = pd.read_excel(filename, sheet_name=5, header=None).values
|
|
|
|
return {
|
|
'verso': verso,
|
|
'segmenti': segmenti,
|
|
'peso': peso,
|
|
'pos_ini_end': pos_ini_end,
|
|
'punti_noti': punti_noti,
|
|
'antiorario': antiorario
|
|
}
|
|
|
|
except Exception as e:
|
|
return None
|
|
|
|
|
|
def calculate_star_positions(acceleration: np.ndarray, magnetic_field: np.ndarray,
|
|
star_params: dict, n_sensors: int
|
|
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
"""
|
|
Calculate node positions using star algorithm.
|
|
|
|
The star algorithm calculates positions of nodes in a chain network
|
|
by considering the geometry and connectivity between nodes.
|
|
|
|
Args:
|
|
acceleration: (n_timestamps, n_sensors*3) acceleration data
|
|
magnetic_field: (n_timestamps, n_sensors*3) magnetic field data
|
|
star_params: Star calculation parameters
|
|
n_sensors: Number of sensors
|
|
|
|
Returns:
|
|
Tuple of (X_positions, Y_positions, Z_positions)
|
|
"""
|
|
n_timestamps = acceleration.shape[0]
|
|
|
|
X_pos = np.zeros((n_timestamps, n_sensors))
|
|
Y_pos = np.zeros((n_timestamps, n_sensors))
|
|
Z_pos = np.zeros((n_timestamps, n_sensors))
|
|
|
|
verso = star_params['verso']
|
|
segmenti = star_params['segmenti']
|
|
peso = star_params['peso']
|
|
pos_ini_end = star_params['pos_ini_end']
|
|
punti_noti = star_params['punti_noti']
|
|
|
|
# Set initial/final positions (closed chain)
|
|
if pos_ini_end.shape[0] >= 3:
|
|
X_pos[:, 0] = pos_ini_end[0, 0]
|
|
Y_pos[:, 0] = pos_ini_end[1, 0]
|
|
Z_pos[:, 0] = pos_ini_end[2, 0]
|
|
|
|
# Calculate positions for each segment
|
|
for seg_idx in range(segmenti.shape[0]):
|
|
node_from = int(segmenti[seg_idx, 0]) - 1 # Convert to 0-based
|
|
node_to = int(segmenti[seg_idx, 1]) - 1
|
|
|
|
if node_from >= 0 and node_to >= 0 and node_from < n_sensors and node_to < n_sensors:
|
|
# Calculate displacement vector from acceleration
|
|
for t in range(n_timestamps):
|
|
ax = acceleration[t, node_from*3:node_from*3+3]
|
|
|
|
# Simple integration (placeholder - actual implementation would use proper kinematics)
|
|
dx = ax[0] * 10.0
|
|
dy = ax[1] * 10.0
|
|
dz = ax[2] * 10.0
|
|
|
|
X_pos[t, node_to] = X_pos[t, node_from] + dx
|
|
Y_pos[t, node_to] = Y_pos[t, node_from] + dy
|
|
Z_pos[t, node_to] = Z_pos[t, node_from] + dz
|
|
|
|
return X_pos, Y_pos, Z_pos
|
|
|
|
|
|
def elaborate_pressure_link_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, pressure_data: np.ndarray,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
"""
|
|
Elaborate PL data to calculate pressure and differential from reference.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of PL sensors
|
|
pressure_data: (n_timestamps, n_sensors) smoothed pressure
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (pressure, pressure_diff, err_flag)
|
|
"""
|
|
n_timestamps = pressure_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Calculate differential from reference
|
|
ref_file = f"RifPressure_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file):
|
|
ref_pressure = np.loadtxt(ref_file, delimiter=',')
|
|
pressure_diff = pressure_data - ref_pressure
|
|
else:
|
|
pressure_diff = pressure_data.copy()
|
|
|
|
return pressure_data, pressure_diff, err_flag
|
|
|
|
|
|
def elaborate_extensometer_3d_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, displacement_data: np.ndarray,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, ...]:
|
|
"""
|
|
Elaborate 3DEL data to calculate 3D displacements and differentials.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of 3DEL sensors
|
|
displacement_data: (n_timestamps, n_sensors*3) smoothed displacements
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (X_disp, Y_disp, Z_disp, X_diff, Y_diff, Z_diff, err_flag)
|
|
"""
|
|
n_timestamps = displacement_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Separate X, Y, Z components
|
|
X_disp = displacement_data[:, 0::3] # Every 3rd column starting from 0
|
|
Y_disp = displacement_data[:, 1::3] # Every 3rd column starting from 1
|
|
Z_disp = displacement_data[:, 2::3] # Every 3rd column starting from 2
|
|
|
|
# Calculate differentials from reference files
|
|
ref_file_x = f"Rif3DX_{control_unit_id}_{chain}.csv"
|
|
ref_file_y = f"Rif3DY_{control_unit_id}_{chain}.csv"
|
|
ref_file_z = f"Rif3DZ_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file_x):
|
|
ref_x = np.loadtxt(ref_file_x, delimiter=',')
|
|
X_diff = X_disp - ref_x
|
|
else:
|
|
X_diff = X_disp.copy()
|
|
|
|
if os.path.exists(ref_file_y):
|
|
ref_y = np.loadtxt(ref_file_y, delimiter=',')
|
|
Y_diff = Y_disp - ref_y
|
|
else:
|
|
Y_diff = Y_disp.copy()
|
|
|
|
if os.path.exists(ref_file_z):
|
|
ref_z = np.loadtxt(ref_file_z, delimiter=',')
|
|
Z_diff = Z_disp - ref_z
|
|
else:
|
|
Z_diff = Z_disp.copy()
|
|
|
|
return X_disp, Y_disp, Z_disp, X_diff, Y_diff, Z_diff, err_flag
|
|
|
|
|
|
def elaborate_crackmeter_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, displacement_data: np.ndarray,
|
|
n_dimensions: int,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
"""
|
|
Elaborate crackmeter data to calculate displacements and differentials.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of crackmeter sensors
|
|
displacement_data: (n_timestamps, n_sensors*n_dimensions) smoothed displacements
|
|
n_dimensions: 1, 2, or 3 dimensions
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (displacement, displacement_diff, err_flag)
|
|
"""
|
|
n_timestamps = displacement_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Calculate differential from reference
|
|
ref_file = f"RifCrL_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file):
|
|
ref_disp = np.loadtxt(ref_file, delimiter=',')
|
|
displacement_diff = displacement_data - ref_disp
|
|
else:
|
|
displacement_diff = displacement_data.copy()
|
|
|
|
return displacement_data, displacement_diff, err_flag
|
|
|
|
|
|
def elaborate_pcl_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, angle_data: np.ndarray,
|
|
sensor_type: str, temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, ...]:
|
|
"""
|
|
Elaborate PCL/PCLHR data with biaxial calculations.
|
|
|
|
Calculates cumulative displacements along Y and Z axes using
|
|
trigonometric calculations from angle measurements.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of PCL sensors
|
|
angle_data: (n_timestamps, n_sensors*2) smoothed angles (ax, ay)
|
|
sensor_type: 'PCL' or 'PCLHR'
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters (includes spacing, elab_option, etc.)
|
|
|
|
Returns:
|
|
Tuple of (Y_disp, Z_disp, Y_local, Z_local, AlphaX, AlphaY, Y_diff, Z_diff, err_flag)
|
|
"""
|
|
n_timestamps = angle_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Get elaboration parameters
|
|
spacing = params.get('sensor_spacing', np.ones(n_sensors)) # Spacing between sensors
|
|
elab_option = params.get('elab_option', 1) # 1=fixed bottom, -1=fixed top
|
|
|
|
# Initialize output arrays
|
|
Y_disp = np.zeros((n_timestamps, n_sensors))
|
|
Z_disp = np.zeros((n_timestamps, n_sensors))
|
|
Y_local = np.zeros((n_timestamps, n_sensors))
|
|
Z_local = np.zeros((n_timestamps, n_sensors))
|
|
AlphaX = np.zeros((n_timestamps, n_sensors)) # Roll angle
|
|
AlphaY = np.zeros((n_timestamps, n_sensors)) # Inclination angle
|
|
|
|
# Load reference data if PCLHR
|
|
if sensor_type == 'PCLHR':
|
|
ref_file_y = f"RifY_PCL_{control_unit_id}_{chain}.csv"
|
|
ref_file_z = f"RifZ_PCL_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file_y):
|
|
ref_y = np.loadtxt(ref_file_y, delimiter=',')
|
|
else:
|
|
ref_y = np.zeros(n_sensors)
|
|
|
|
if os.path.exists(ref_file_z):
|
|
ref_z = np.loadtxt(ref_file_z, delimiter=',')
|
|
else:
|
|
ref_z = np.zeros(n_sensors)
|
|
else:
|
|
ref_y = np.zeros(n_sensors)
|
|
ref_z = np.zeros(n_sensors)
|
|
|
|
# Calculate for each timestamp
|
|
for t in range(n_timestamps):
|
|
# Extract angles for this timestamp
|
|
ax = angle_data[t, 0::2] # X angles (every 2nd starting from 0)
|
|
ay = angle_data[t, 1::2] # Y angles (every 2nd starting from 1)
|
|
|
|
if elab_option == 1: # Fixed point at bottom
|
|
for ii in range(n_sensors):
|
|
if sensor_type == 'PCLHR':
|
|
# PCLHR uses cos/sin directly
|
|
Yi = -spacing[ii] * np.cos(ax[ii])
|
|
Zi = -spacing[ii] * np.sin(ax[ii])
|
|
# Convert to degrees
|
|
AlphaX[t, ii] = np.degrees(ay[ii])
|
|
AlphaY[t, ii] = np.degrees(ax[ii])
|
|
# Local with reference subtraction
|
|
Y_local[t, ii] = -ref_y[ii] + Yi
|
|
Z_local[t, ii] = -ref_z[ii] + Zi
|
|
else: # PCL
|
|
# PCL uses cosBeta calculation
|
|
cosBeta = np.sqrt(1 - ax[ii]**2)
|
|
Yi = -spacing[ii] * cosBeta
|
|
Zi = spacing[ii] * ax[ii]
|
|
# Convert to degrees
|
|
AlphaX[t, ii] = np.degrees(np.arcsin(ay[ii]))
|
|
AlphaY[t, ii] = -np.degrees(np.arcsin(ax[ii]))
|
|
# Local displacements
|
|
Y_local[t, ii] = Yi
|
|
Z_local[t, ii] = Zi
|
|
|
|
# Cumulative displacements
|
|
if ii == 0:
|
|
Y_disp[t, ii] = Yi
|
|
Z_disp[t, ii] = Z_local[t, ii]
|
|
else:
|
|
Y_disp[t, ii] = Y_disp[t, ii-1] + Yi
|
|
Z_disp[t, ii] = Z_disp[t, ii-1] + Z_local[t, ii]
|
|
|
|
elif elab_option == -1: # Fixed point at top
|
|
for ii in range(n_sensors):
|
|
idx = n_sensors - ii - 1 # Reverse index
|
|
|
|
if sensor_type == 'PCLHR':
|
|
Yi = spacing[idx] * np.cos(ax[ii])
|
|
Zi = spacing[idx] * np.sin(ax[ii])
|
|
AlphaX[t, idx] = np.degrees(ay[idx])
|
|
AlphaY[t, idx] = np.degrees(ax[idx])
|
|
Y_local[t, idx] = ref_y[idx] + Yi
|
|
Z_local[t, idx] = ref_z[ii] + Zi
|
|
else: # PCL
|
|
cosBeta = np.sqrt(1 - ax[idx]**2)
|
|
Yi = spacing[idx] * cosBeta
|
|
Zi = -spacing[idx] * ax[idx]
|
|
AlphaX[t, idx] = np.degrees(np.arcsin(ay[idx]))
|
|
AlphaY[t, idx] = -np.degrees(np.arcsin(ax[idx]))
|
|
Y_local[t, idx] = Yi
|
|
Z_local[t, idx] = Zi
|
|
|
|
# Cumulative displacements (reverse direction)
|
|
if ii == 0:
|
|
Y_disp[t, idx] = Yi
|
|
Z_disp[t, idx] = Z_local[t, idx]
|
|
else:
|
|
Y_disp[t, idx] = Y_disp[t, idx+1] + Yi
|
|
Z_disp[t, idx] = Z_disp[t, idx+1] + Z_local[t, idx]
|
|
|
|
# Calculate differentials
|
|
ref_file_y_diff = f"RifYDiff_PCL_{control_unit_id}_{chain}.csv"
|
|
ref_file_z_diff = f"RifZDiff_PCL_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file_y_diff):
|
|
ref_y_diff = np.loadtxt(ref_file_y_diff, delimiter=',')
|
|
Y_diff = Y_disp - ref_y_diff
|
|
else:
|
|
Y_diff = Y_disp.copy()
|
|
|
|
if os.path.exists(ref_file_z_diff):
|
|
ref_z_diff = np.loadtxt(ref_file_z_diff, delimiter=',')
|
|
Z_diff = Z_disp - ref_z_diff
|
|
else:
|
|
Z_diff = Z_disp.copy()
|
|
|
|
return Y_disp, Z_disp, Y_local, Z_local, AlphaX, AlphaY, Y_diff, Z_diff, err_flag
|
|
|
|
|
|
def elaborate_tube_link_data(conn, control_unit_id: str, chain: str,
|
|
n_sensors: int, angle_data: np.ndarray,
|
|
temp_max: float, temp_min: float,
|
|
temperature: np.ndarray, err_flag: np.ndarray,
|
|
params: dict) -> Tuple[np.ndarray, ...]:
|
|
"""
|
|
Elaborate TuL data with 3D biaxial calculations and bidirectional computation.
|
|
|
|
Calculates positions both clockwise and counterclockwise, then averages them.
|
|
Uses correlation angle (az) for Y-axis displacement calculation.
|
|
|
|
Args:
|
|
conn: Database connection
|
|
control_unit_id: Control unit identifier
|
|
chain: Chain identifier
|
|
n_sensors: Number of TuL sensors
|
|
angle_data: (n_timestamps, n_sensors*3) smoothed angles (ax, ay, az)
|
|
temp_max: Maximum valid temperature
|
|
temp_min: Minimum valid temperature
|
|
temperature: (n_timestamps, n_sensors) smoothed temperature
|
|
err_flag: (n_timestamps, n_sensors) error flags
|
|
params: Installation parameters
|
|
|
|
Returns:
|
|
Tuple of (X_disp, Y_disp, Z_disp, X_star, Y_star, Z_star,
|
|
X_local, Y_local, Z_local, X_diff, Y_diff, Z_diff, err_flag)
|
|
"""
|
|
n_timestamps = angle_data.shape[0]
|
|
|
|
# Validate temperature
|
|
for i in range(n_timestamps):
|
|
for sensor_idx in range(n_sensors):
|
|
if temperature[i, sensor_idx] < temp_min or temperature[i, sensor_idx] > temp_max:
|
|
err_flag[i, sensor_idx] = 1.0
|
|
|
|
# Get parameters
|
|
spacing = params.get('sensor_spacing', np.ones(n_sensors))
|
|
pos_ini_end = params.get('pos_ini_end', np.zeros((2, 3))) # Initial/final positions
|
|
index_x = params.get('index_x', []) # Nodes with inverted X
|
|
index_z = params.get('index_z', []) # Nodes with inverted Z
|
|
|
|
# Initialize arrays
|
|
X_disp = np.zeros((n_timestamps, n_sensors))
|
|
Y_disp = np.zeros((n_timestamps, n_sensors))
|
|
Z_disp = np.zeros((n_timestamps, n_sensors))
|
|
|
|
X_star = np.zeros((n_timestamps, n_sensors)) # Counterclockwise
|
|
Y_star = np.zeros((n_timestamps, n_sensors))
|
|
Z_star = np.zeros((n_timestamps, n_sensors))
|
|
|
|
X_local = np.zeros((n_timestamps, n_sensors))
|
|
Y_local = np.zeros((n_timestamps, n_sensors))
|
|
Z_local = np.zeros((n_timestamps, n_sensors))
|
|
|
|
# Calculate for each timestamp
|
|
for t in range(n_timestamps):
|
|
# Extract 3D angles for this timestamp
|
|
ax = angle_data[t, 0::3] # X angles
|
|
ay = angle_data[t, 1::3] # Y angles
|
|
az = angle_data[t, 2::3] # Z correlation angles
|
|
|
|
# Clockwise calculation
|
|
Z_prev = 0
|
|
for ii in range(n_sensors):
|
|
# X displacement
|
|
Xi = spacing[ii] * ay[ii]
|
|
# Z displacement
|
|
Zi = -spacing[ii] * ax[ii]
|
|
# Y displacement (uses previous Z and current az)
|
|
if t == 0:
|
|
Yi = -Zi * az[ii]
|
|
else:
|
|
Yi = -Z_prev * az[ii]
|
|
|
|
# Apply corrections for incorrectly mounted sensors
|
|
if ii in index_x:
|
|
Xi = -Xi
|
|
if ii in index_z:
|
|
Zi = -Zi
|
|
Yi = -Yi
|
|
|
|
# Store local displacements
|
|
X_local[t, ii] = Xi
|
|
Y_local[t, ii] = Yi
|
|
Z_local[t, ii] = Zi
|
|
|
|
# Cumulative displacements
|
|
if ii == 0:
|
|
X_disp[t, ii] = Xi + pos_ini_end[0, 0]
|
|
Y_disp[t, ii] = Yi + pos_ini_end[0, 1]
|
|
Z_disp[t, ii] = Zi + pos_ini_end[0, 2]
|
|
else:
|
|
X_disp[t, ii] = X_disp[t, ii-1] + Xi
|
|
Y_disp[t, ii] = Y_disp[t, ii-1] + Yi
|
|
Z_disp[t, ii] = Z_disp[t, ii-1] + Zi
|
|
|
|
Z_prev = Z_local[t, ii]
|
|
|
|
# Counterclockwise calculation (from last node)
|
|
Z_prev_star = 0
|
|
for ii in range(n_sensors):
|
|
idx = n_sensors - ii - 1
|
|
|
|
# X displacement (reversed)
|
|
XiStar = -spacing[idx] * ay[idx]
|
|
# Z displacement (reversed)
|
|
ZiStar = spacing[idx] * ax[idx]
|
|
# Y displacement
|
|
if t == 0:
|
|
YiStar = ZiStar * az[idx]
|
|
else:
|
|
YiStar = Z_prev_star * az[idx]
|
|
|
|
# Apply corrections
|
|
if idx in index_x:
|
|
XiStar = -XiStar
|
|
if idx in index_z:
|
|
ZiStar = -ZiStar
|
|
YiStar = -YiStar
|
|
|
|
# Cumulative displacements (counterclockwise)
|
|
if ii == 0:
|
|
X_star[t, idx] = pos_ini_end[1, 0] + XiStar
|
|
Y_star[t, idx] = pos_ini_end[1, 1] + YiStar
|
|
Z_star[t, idx] = pos_ini_end[1, 2] + ZiStar
|
|
else:
|
|
X_star[t, idx] = X_star[t, idx+1] + XiStar
|
|
Y_star[t, idx] = Y_star[t, idx+1] + YiStar
|
|
Z_star[t, idx] = Z_star[t, idx+1] + ZiStar
|
|
|
|
Z_prev_star = ZiStar
|
|
|
|
# Calculate differentials
|
|
ref_file_x = f"RifX_TuL_{control_unit_id}_{chain}.csv"
|
|
ref_file_y = f"RifY_TuL_{control_unit_id}_{chain}.csv"
|
|
ref_file_z = f"RifZ_TuL_{control_unit_id}_{chain}.csv"
|
|
|
|
if os.path.exists(ref_file_x):
|
|
ref_x = np.loadtxt(ref_file_x, delimiter=',')
|
|
X_diff = X_disp - ref_x
|
|
else:
|
|
X_diff = X_disp.copy()
|
|
|
|
if os.path.exists(ref_file_y):
|
|
ref_y = np.loadtxt(ref_file_y, delimiter=',')
|
|
Y_diff = Y_disp - ref_y
|
|
else:
|
|
Y_diff = Y_disp.copy()
|
|
|
|
if os.path.exists(ref_file_z):
|
|
ref_z = np.loadtxt(ref_file_z, delimiter=',')
|
|
Z_diff = Z_disp - ref_z
|
|
else:
|
|
Z_diff = Z_disp.copy()
|
|
|
|
return X_disp, Y_disp, Z_disp, X_star, Y_star, Z_star, X_local, Y_local, Z_local, X_diff, Y_diff, Z_diff, err_flag
|