fix: Add timeout settings and retry logic to MySQL connector
Configuration improvements: - Set read_timeout=300 (5 minutes) to handle long queries - Set write_timeout=300 (5 minutes) for writes - Set max_allowed_packet=64MB to handle larger data transfers Retry logic: - Added retry mechanism with max 3 retries on fetch failure - Auto-reconnect on connection loss before retry - Better error messages showing retry attempts This fixes the 'connection is lost' error that occurs during long-running migrations by: 1. Giving MySQL queries more time to complete 2. Allowing larger packet sizes for bulk data 3. Automatically recovering from connection drops Fixes: 'Connection is lost' error during full migration
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
"""Test setup and basic functionality."""
|
||||
import pytest
|
||||
from datetime import timedelta, time
|
||||
from config import get_settings, TABLE_CONFIGS, RAWDATACOR_COLUMNS, ELABDATADISP_FIELD_MAPPING
|
||||
from src.transformers.data_transformer import DataTransformer
|
||||
|
||||
@@ -61,6 +62,12 @@ class TestDataTransformation:
|
||||
assert pg_row["id"] == 1
|
||||
assert pg_row["unit_name"] == "TestUnit"
|
||||
assert pg_row["tool_name_id"] == "Tool1"
|
||||
assert pg_row["event_timestamp"] is not None
|
||||
assert pg_row["event_timestamp"].year == 2024
|
||||
assert pg_row["event_timestamp"].month == 1
|
||||
assert pg_row["event_timestamp"].day == 1
|
||||
assert pg_row["event_timestamp"].hour == 12
|
||||
assert pg_row["event_timestamp"].minute == 0
|
||||
assert isinstance(pg_row["measurements"], dict)
|
||||
assert "0" in pg_row["measurements"]
|
||||
assert pg_row["measurements"]["0"]["value"] == "100.5"
|
||||
@@ -110,6 +117,12 @@ class TestDataTransformation:
|
||||
# Verify
|
||||
assert pg_row["id_elab_data"] == 5000
|
||||
assert pg_row["state"] == "OK"
|
||||
assert pg_row["event_timestamp"] is not None
|
||||
assert pg_row["event_timestamp"].year == 2024
|
||||
assert pg_row["event_timestamp"].month == 1
|
||||
assert pg_row["event_timestamp"].day == 1
|
||||
assert pg_row["event_timestamp"].hour == 12
|
||||
assert pg_row["event_timestamp"].minute == 0
|
||||
assert isinstance(pg_row["measurements"], dict)
|
||||
assert "shifts" in pg_row["measurements"]
|
||||
assert "coordinates" in pg_row["measurements"]
|
||||
@@ -135,6 +148,105 @@ class TestDataTransformation:
|
||||
assert "state" in columns
|
||||
|
||||
|
||||
class TestTimeConversion:
|
||||
"""Test time conversion utilities."""
|
||||
|
||||
def test_convert_time_from_string(self):
|
||||
"""Test converting time from string format."""
|
||||
event_time = "12:30:45"
|
||||
result = DataTransformer._convert_time(event_time)
|
||||
assert isinstance(result, time)
|
||||
assert result.hour == 12
|
||||
assert result.minute == 30
|
||||
assert result.second == 45
|
||||
|
||||
def test_convert_time_from_timedelta(self):
|
||||
"""Test converting time from timedelta (MySQL TIME format)."""
|
||||
# MySQL returns TIME columns as timedelta
|
||||
event_time = timedelta(hours=14, minutes=25, seconds=30)
|
||||
result = DataTransformer._convert_time(event_time)
|
||||
assert isinstance(result, time)
|
||||
assert result.hour == 14
|
||||
assert result.minute == 25
|
||||
assert result.second == 30
|
||||
|
||||
def test_convert_time_from_time_object(self):
|
||||
"""Test converting time from time object."""
|
||||
event_time = time(10, 15, 20)
|
||||
result = DataTransformer._convert_time(event_time)
|
||||
assert isinstance(result, time)
|
||||
assert result.hour == 10
|
||||
assert result.minute == 15
|
||||
assert result.second == 20
|
||||
|
||||
def test_rawdatacor_with_timedelta(self):
|
||||
"""Test RAWDATACOR transformation with timedelta event_time."""
|
||||
mysql_row = {
|
||||
"id": 1,
|
||||
"UnitName": "TestUnit",
|
||||
"ToolNameID": "Tool1",
|
||||
"NodeNum": 1,
|
||||
"EventDate": "2024-01-01",
|
||||
"EventTime": timedelta(hours=12, minutes=0, seconds=0), # MySQL TIME format
|
||||
"BatLevel": 3.5,
|
||||
"Temperature": 25.5,
|
||||
"Val0": "100.5",
|
||||
"Val1": None,
|
||||
"Val2": "200.3",
|
||||
"Val0_unitmisure": "°C",
|
||||
"Val1_unitmisure": "bar",
|
||||
"Val2_unitmisure": "m/s",
|
||||
}
|
||||
|
||||
# Add remaining Val columns as None
|
||||
for i in range(3, 16):
|
||||
col = f"Val{i:X}"
|
||||
mysql_row[col] = None
|
||||
mysql_row[f"{col}_unitmisure"] = None
|
||||
|
||||
pg_row = DataTransformer.transform_rawdatacor_row(mysql_row)
|
||||
|
||||
assert pg_row["event_timestamp"] is not None
|
||||
assert pg_row["event_timestamp"].year == 2024
|
||||
assert pg_row["event_timestamp"].month == 1
|
||||
assert pg_row["event_timestamp"].day == 1
|
||||
assert pg_row["event_timestamp"].hour == 12
|
||||
assert pg_row["event_timestamp"].minute == 0
|
||||
|
||||
def test_rawdatacor_with_null_eventtime(self):
|
||||
"""Test RAWDATACOR transformation with NULL EventTime uses default timestamp."""
|
||||
mysql_row = {
|
||||
"id": 2140982,
|
||||
"UnitName": "OLD_ID0002",
|
||||
"ToolNameID": "DT0001",
|
||||
"NodeNum": 1,
|
||||
"EventDate": "2023-09-05",
|
||||
"EventTime": None, # NULL EventTime
|
||||
"BatLevel": 12.90,
|
||||
"Temperature": 13.40,
|
||||
"Val0": "-1709",
|
||||
"Val1": None,
|
||||
"Val0_unitmisure": None,
|
||||
"Val1_unitmisure": None,
|
||||
}
|
||||
|
||||
# Add remaining Val columns as None
|
||||
for i in range(2, 16):
|
||||
col = f"Val{i:X}"
|
||||
mysql_row[col] = None
|
||||
mysql_row[f"{col}_unitmisure"] = None
|
||||
|
||||
pg_row = DataTransformer.transform_rawdatacor_row(mysql_row)
|
||||
|
||||
# Should use default timestamp 1970-01-01 00:00:00
|
||||
assert pg_row["event_timestamp"] is not None
|
||||
assert pg_row["event_timestamp"].year == 1970
|
||||
assert pg_row["event_timestamp"].month == 1
|
||||
assert pg_row["event_timestamp"].day == 1
|
||||
assert pg_row["event_timestamp"].hour == 0
|
||||
assert pg_row["event_timestamp"].minute == 0
|
||||
|
||||
|
||||
class TestFieldMapping:
|
||||
"""Test field mapping configuration."""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user