112 lines
6.0 KiB
Python
112 lines
6.0 KiB
Python
#!.venv/bin/python
|
|
import logging
|
|
import asyncio
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
timestamp_cols = ['inserted_at', 'loaded_at', 'elaborated_at']
|
|
|
|
|
|
async def load_data(cfg: object, matrice_valori: list, pool) -> bool :
|
|
if not matrice_valori:
|
|
logger.info("Nulla da caricare.")
|
|
return True
|
|
sql_insert_RAWDATA = f'''
|
|
INSERT INTO {cfg.dbrawdata} (
|
|
`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,
|
|
`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,
|
|
`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`,
|
|
`BatLevelModule`,`TemperatureModule`, `RssiModule`
|
|
)
|
|
VALUES (
|
|
%s, %s, %s, %s, %s, %s, %s,
|
|
%s, %s, %s, %s, %s, %s, %s, %s,
|
|
%s, %s, %s, %s, %s, %s, %s, %s,
|
|
%s, %s, %s
|
|
) as new_data
|
|
ON DUPLICATE KEY UPDATE
|
|
`BatLevel` = IF({cfg.dbrawdata}.`BatLevel` != new_data.`BatLevel`, new_data.`BatLevel`, {cfg.dbrawdata}.`BatLevel`),
|
|
`Temperature` = IF({cfg.dbrawdata}.`Temperature` != new_data.Temperature, new_data.Temperature, {cfg.dbrawdata}.`Temperature`),
|
|
`Val0` = IF({cfg.dbrawdata}.`Val0` != new_data.Val0, new_data.Val0, {cfg.dbrawdata}.`Val0`),
|
|
`Val1` = IF({cfg.dbrawdata}.`Val1` != new_data.Val1, new_data.Val1, {cfg.dbrawdata}.`Val1`),
|
|
`Val2` = IF({cfg.dbrawdata}.`Val2` != new_data.Val2, new_data.Val2, {cfg.dbrawdata}.`Val2`),
|
|
`Val3` = IF({cfg.dbrawdata}.`Val3` != new_data.Val3, new_data.Val3, {cfg.dbrawdata}.`Val3`),
|
|
`Val4` = IF({cfg.dbrawdata}.`Val4` != new_data.Val4, new_data.Val4, {cfg.dbrawdata}.`Val4`),
|
|
`Val5` = IF({cfg.dbrawdata}.`Val5` != new_data.Val5, new_data.Val5, {cfg.dbrawdata}.`Val5`),
|
|
`Val6` = IF({cfg.dbrawdata}.`Val6` != new_data.Val6, new_data.Val6, {cfg.dbrawdata}.`Val6`),
|
|
`Val7` = IF({cfg.dbrawdata}.`Val7` != new_data.Val7, new_data.Val7, {cfg.dbrawdata}.`Val7`),
|
|
`Val8` = IF({cfg.dbrawdata}.`Val8` != new_data.Val8, new_data.Val8, {cfg.dbrawdata}.`Val8`),
|
|
`Val9` = IF({cfg.dbrawdata}.`Val9` != new_data.Val9, new_data.Val9, {cfg.dbrawdata}.`Val9`),
|
|
`ValA` = IF({cfg.dbrawdata}.`ValA` != new_data.ValA, new_data.ValA, {cfg.dbrawdata}.`ValA`),
|
|
`ValB` = IF({cfg.dbrawdata}.`ValB` != new_data.ValB, new_data.ValB, {cfg.dbrawdata}.`ValB`),
|
|
`ValC` = IF({cfg.dbrawdata}.`ValC` != new_data.ValC, new_data.ValC, {cfg.dbrawdata}.`ValC`),
|
|
`ValD` = IF({cfg.dbrawdata}.`ValD` != new_data.ValD, new_data.ValD, {cfg.dbrawdata}.`ValD`),
|
|
`ValE` = IF({cfg.dbrawdata}.`ValE` != new_data.ValE, new_data.ValE, {cfg.dbrawdata}.`ValE`),
|
|
`ValF` = IF({cfg.dbrawdata}.`ValF` != new_data.ValF, new_data.ValF, {cfg.dbrawdata}.`ValF`),
|
|
`BatLevelModule` = IF({cfg.dbrawdata}.`BatLevelModule` != new_data.BatLevelModule, new_data.BatLevelModule, {cfg.dbrawdata}.`BatLevelModule`),
|
|
`TemperatureModule` = IF({cfg.dbrawdata}.`TemperatureModule` != new_data.TemperatureModule, new_data.TemperatureModule, {cfg.dbrawdata}.`TemperatureModule`),
|
|
`RssiModule` = IF({cfg.dbrawdata}.`RssiModule` != new_data.RssiModule, new_data.RssiModule, {cfg.dbrawdata}.`RssiModule`),
|
|
`Created_at` = NOW()
|
|
'''
|
|
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
rc = False
|
|
for attempt in range(cfg.max_retries):
|
|
try:
|
|
await cur.executemany(sql_insert_RAWDATA, matrice_valori)
|
|
await conn.commit()
|
|
logging.info("Data loaded.")
|
|
rc = True
|
|
except Exception as e:
|
|
await conn.rollback()
|
|
logging.error(f"Error: {e}.")
|
|
|
|
if e.args[0] == 1213: # Deadlock detected
|
|
logging.warning(f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}")
|
|
|
|
if attempt < cfg.max_retries - 1:
|
|
delay = (2 * attempt)
|
|
await asyncio.sleep(delay)
|
|
continue
|
|
else:
|
|
logging.error("Max retry attempts reached for deadlock")
|
|
raise
|
|
finally:
|
|
return rc
|
|
|
|
async def update_status(cfg: object, id: int, status: int, pool) -> None:
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
try:
|
|
await cur.execute(f'update {cfg.dbrectable} set status = {status}, {timestamp_cols[status]} = now() where id = {id}')
|
|
await conn.commit()
|
|
logging.info("Status updated.")
|
|
except Exception as e:
|
|
await conn.rollback()
|
|
logging.error(f'Error: {e}')
|
|
|
|
async def unlock(cfg: object, id: int, pool) -> None:
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
try:
|
|
await cur.execute(f'update {cfg.dbrectable} set locked = 0 where id = {id}')
|
|
await conn.commit()
|
|
logging.info(f"id {id} unlocked.")
|
|
except Exception as e:
|
|
await conn.rollback()
|
|
logging.error(f'Error: {e}')
|
|
|
|
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool) -> tuple:
|
|
async with pool.acquire() as conn:
|
|
async with conn.cursor() as cur:
|
|
try:
|
|
await cur.execute(f'''select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate
|
|
from matfuncs as m
|
|
inner join tools as t on t.matfunc = m.id
|
|
inner join units as u on u.id = t.unit_id
|
|
inner join statustools as s on t.statustool_id = s.id
|
|
where t.name = "{tool}" and u.name = "{unit}"''')
|
|
return cur.fetchone()
|
|
except Exception as e:
|
|
logging.error(f'Error: {e}') |