This commit is contained in:
2025-07-28 23:03:56 +02:00
parent d6f1998d78
commit acaad8a99f
3 changed files with 45 additions and 2 deletions

1
lista_gd.txt Normal file
View File

@@ -0,0 +1 @@
select count(id) from RAWDATACOR where date(created_at) = '2025-07-27' and concat(UnitName,ToolNameID) in ('ID0005_DT0000','ID0005_DT0036','ID0005_DT0037','ID0005_DT0038','ID0005_DT0039','ID0005_DT0040','ID0005_DT0041','ID0005_DT0042','ID0005_DT0043','ID0006_DT0044','ID0006_DT0045','ID0006_DT0046','ID0006_DT0047','ID0006_DT0048','ID0006_DT0049','ID0007_DT0050','ID0007_DT0051','ID0007_DT0052','ID0007_DT0053','ID0010_DT0054','ID0010_DT0055','ID0012_DT0001','ID0012_DT0102','ID0012_DT0103','ID0012_DT0104','ID0013_DT0001','ID0034_DT0002','ID0039_DT0001','ID0039_DT0002','ID0039_DT0003','ID0039_DT0004','ID0039_DT0005','ID0039_DT0006','ID0039_DT0007','ID0039_DT0008','ID0190_DT0001','ID0190_DT0002','ID0190_DT0003','ID0190_DT0004','ID0190_DT0005','ID0198_DT0229','ID0198_DT0230','ID0199_DT0010','ID0215_DT0234','ID0215_DT0235','ID0215_DT0236','ID0215_DT0237','ID0226_DT0000','ID0226_DT0239','ID0226_DT0240','ID0226_DT0241','ID0226_DT0242','ID0226_DT0243','ID0226_DT0244','ID0226_DT0245','ID0226_DT0246','ID0226_DT0247','ID0226_DT0248','ID0226_DT0249','ID0226_DT0250','ID0226_DT0251')

View File

@@ -1,6 +1,7 @@
#!.venv/bin/python #!.venv/bin/python
from utils.database.nodes_query import get_nodes_type from utils.database.nodes_query import get_nodes_type
from utils.timestamp.date_check import normalizza_data, normalizza_orario from utils.timestamp.date_check import normalizza_data, normalizza_orario
from utils.database.loader_action import find_nearest_timestamp
import logging import logging
import re import re
@@ -224,8 +225,11 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
unit_metrics, data = rilevazioni.split(';|;') unit_metrics, data = rilevazioni.split(';|;')
batlevel, temperature = unit_metrics.split(';') batlevel, temperature = unit_metrics.split(';')
#logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {data}") #logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {data}")
dt_timestamp, dt_batlevel, dt_temperature = await find_nearest_timestamp(cfg, {"timestamp": f"{normalizza_data(EventDate)} {normalizza_orario(EventTime)}", "unit": UnitName, "tool": ToolNameID.replace("GD", "DT"), "node_num": 1}, pool)
EventDate, EventTime = dt_timestamp.strftime('%Y-%m-%d %H:%M:%S').split(' ')
valori = data.split(';') valori = data.split(';')
matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 2, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori)))) matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 2, EventDate, EventTime, float(dt_batlevel), float(dt_temperature)] + valori + ([None] * (16 - len(valori))) + [batlevel, temperature, None])
else: else:
logger.warning(f"GD id {id}: dati non trattati - {rilevazioni}") logger.warning(f"GD id {id}: dati non trattati - {rilevazioni}")

View File

@@ -3,6 +3,7 @@ import logging
import asyncio import asyncio
from utils.database import FLAG_TO_TIMESTAMP, BATCH_SIZE from utils.database import FLAG_TO_TIMESTAMP, BATCH_SIZE
from datetime import datetime, timedelta
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -189,6 +190,43 @@ async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tup
inner join units as u on u.id = t.unit_id inner join units as u on u.id = t.unit_id
inner join statustools as s on t.statustool_id = s.id inner join statustools as s on t.statustool_id = s.id
where t.name = "{tool}" and u.name = "{unit}"''') where t.name = "{tool}" and u.name = "{unit}"''')
return cur.fetchone() return await cur.fetchone()
except Exception as e: except Exception as e:
logger.error(f"Error: {e}") logger.error(f"Error: {e}")
async def find_nearest_timestamp(cfg: object, unit_tool_data: dict, pool: object) -> tuple:
"""
Finds the nearest timestamp in the raw data table based on a reference timestamp
and unit/tool/node information.
Args:
cfg (object): Configuration object containing database table name (`cfg.dbrawdata`).
unit_tool_data (dict): A dictionary containing:
- "timestamp" (str): The reference timestamp string in "%Y-%m-%d %H:%M:%S" format.
- "unit" (str): The UnitName to filter by.
- "tool" (str): The ToolNameID to filter by.
- "node_num" (int): The NodeNum to filter by.
pool (object): The database connection pool.
Returns:
tuple: A tuple containing the event timestamp, BatLevel, and Temperature of the
nearest record, or None if an error occurs or no record is found.
"""
ref_timestamp = datetime.strptime(unit_tool_data["timestamp"], "%Y-%m-%d %H:%M:%S")
start_timestamp = ref_timestamp - timedelta(seconds=45)
end_timestamp = ref_timestamp + timedelta(seconds=45)
logger.info(f"Find nearest timestamp: {ref_timestamp}")
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''SELECT TIMESTAMP(`EventDate`, `EventTime`) AS event_timestamp, BatLevel, Temperature
FROM {cfg.dbrawdata}
WHERE UnitName = "{unit_tool_data["unit"]}" AND ToolNameID = "{unit_tool_data["tool"]}" AND NodeNum = {unit_tool_data["node_num"]}
AND TIMESTAMP(`EventDate`, `EventTime`) BETWEEN "{start_timestamp}" AND "{end_timestamp}"
ORDER BY ABS(TIMESTAMPDIFF(SECOND, TIMESTAMP(`EventDate`, `EventTime`), "{ref_timestamp}"))
LIMIT 1
''')
return await cur.fetchone()
except Exception as e:
logger.error(f"Error: {e}")