fix caricamenti

This commit is contained in:
2025-07-27 00:32:12 +02:00
parent a8df0f9584
commit 287d2de81e
15 changed files with 200 additions and 64 deletions

View File

@@ -40,7 +40,14 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines()
matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]:
"""
Ciclo su tutte le righe del file CSV, escludendo quelle che:
non hanno il pattern ';|;' perché non sono dati ma è la header
che hanno il pattern 'No RX' perché sono letture non pervenute o in errore
che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola
che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina
"""
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
EventDate, EventTime = timestamp.split(' ')
if batlevel == '|':
@@ -70,7 +77,7 @@ async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
list: A list of lists, where each inner list represents a row in the matrix.
"""
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
pattern = r'^(?:\d{4}\/\d{2}\/\d{2}|\d{2}\/\d{2}\/\d{4}) \d{2}:\d{2}:\d{2}(?:;\d+\.\d+){2}(?:;\d+){4}$'
@@ -104,10 +111,10 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
list: A list of lists, where each inner list represents a row in the matrix.
"""
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]:
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
EventDate, EventTime = timestamp.split(' ')
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
@@ -132,10 +139,10 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
list: A list of lists, where each inner list represents a row in the matrix.
"""
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]:
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
if timestamp == '':
continue
@@ -194,17 +201,21 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines()
matrice_valori = []
pattern = r'^-\d*dB$'
for riga in [riga for riga in righe if ';|;' in riga]:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
pattern = r';-?\d+dB$'
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
timestamp, rilevazioni = riga.split(';|;',1)
EventDate, EventTime = timestamp.split(' ')
if batlevel == '|':
batlevel = temperature
temperature, rilevazioni = rilevazioni.split(';',1)
if re.match(pattern, rilevazioni):
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;'
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
valori = valori_nodo.split(';')
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
logger.info(f"GD id {id}: {pattern} {rilevazioni}")
if re.search(pattern, rilevazioni):
batlevel, temperature, rssi = rilevazioni.split(';')
logger.info(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {rssi}")
elif all(char == ';' for char in rilevazioni):
pass
elif ';|;' in rilevazioni:
unit_metrics, data = rilevazioni.split(';|;')
batlevel, temperature = unit_metrics.split(';')
logger.info(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {data}")
else:
logger.warning(f"GD id {id}: dati non trattati - {rilevazioni}")
return matrice_valori

View File

@@ -1,5 +1,5 @@
from utils.database.loader_action import load_data, update_status, unlock
from utils.database import DATA_LOADED
from utils.database import WorkflowFlags
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix, make_tlp_matrix, make_gd_matrix, make_musa_matrix
import logging
@@ -32,13 +32,13 @@ async def main_loader(cfg: object, id: int, pool: object, action: str) -> None:
logger.info("matrice valori creata")
# Load the data into the database
if await load_data(cfg, matrice_valori, pool):
await update_status(cfg, id, DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await unlock(cfg, id, pool)
else:
logger.warning(f"Action '{action}' non riconosciuta.")
async def get_next_csv_atomic(pool, table_name, status):
async def get_next_csv_atomic(pool, table_name, status, next_status):
"""Preleva atomicamente il prossimo CSV da elaborare"""
async with pool.acquire() as conn:
# IMPORTANTE: Disabilita autocommit per questa transazione
@@ -47,14 +47,17 @@ async def get_next_csv_atomic(pool, table_name, status):
try:
async with conn.cursor() as cur:
# Usa SELECT FOR UPDATE per lock atomico
await cur.execute(f"""
SELECT id, unit_type, tool_type, unit_name, tool_name
FROM {table_name}
WHERE locked = 0 AND status = %s
WHERE locked = 0
AND ((status & %s) > 0 OR %s = 0)
AND (status & %s) = 0
ORDER BY id
LIMIT 1
FOR UPDATE SKIP LOCKED
""", (status,))
""", (status, status, next_status))
result = await cur.fetchone()
if result: