altre fix
This commit is contained in:
@@ -32,6 +32,8 @@ class Config:
|
||||
self.dbuser = c.get("db", "user")
|
||||
self.dbpass = c.get("db", "password")
|
||||
self.dbname = c.get("db", "dbName")
|
||||
self.max_retries = c.getint("db", "maxRetries")
|
||||
|
||||
|
||||
# Tables
|
||||
self.dbusertable = c.get("tables", "userTableName")
|
||||
|
||||
@@ -21,6 +21,7 @@ class Config:
|
||||
self.dbuser = c.get("db", "user")
|
||||
self.dbpass = c.get("db", "password")
|
||||
self.dbname = c.get("db", "dbName")
|
||||
self.max_retries = c.getint("db", "maxRetries")
|
||||
|
||||
# Tables
|
||||
self.dbusertable = c.get("tables", "userTableName")
|
||||
|
||||
@@ -21,6 +21,7 @@ class Config:
|
||||
self.dbuser = c.get("db", "user")
|
||||
self.dbpass = c.get("db", "password")
|
||||
self.dbname = c.get("db", "dbName")
|
||||
self.max_retries = c.getint("db", "maxRetries")
|
||||
|
||||
# Tables
|
||||
self.dbusertable = c.get("tables", "userTableName")
|
||||
|
||||
@@ -44,6 +44,9 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool) -> list:
|
||||
for riga in [riga for riga in righe if ';|;' in riga]:
|
||||
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
|
||||
EventDate, EventTime = timestamp.split(' ')
|
||||
if batlevel == '|':
|
||||
batlevel = temperature
|
||||
temperature, rilevazioni = rilevazioni.split(';',1)
|
||||
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;'
|
||||
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
|
||||
valori = valori_nodo.split(';')
|
||||
@@ -102,7 +105,7 @@ async def make_channels_matrix(cfg: object, id: int, pool) -> list:
|
||||
righe = ToolData.splitlines()
|
||||
matrice_valori = []
|
||||
for riga in [riga for riga in righe if ';|;' in riga]:
|
||||
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
|
||||
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
|
||||
EventDate, EventTime = timestamp.split(' ')
|
||||
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
|
||||
valori_iter = iter(valori_splitted)
|
||||
@@ -113,3 +116,62 @@ async def make_channels_matrix(cfg: object, id: int, pool) -> list:
|
||||
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
|
||||
|
||||
return matrice_valori
|
||||
|
||||
async def make_musa_matrix(cfg: object, id: int, pool) -> list:
|
||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName)
|
||||
righe = ToolData.splitlines()
|
||||
matrice_valori = []
|
||||
for riga in [riga for riga in righe if ';|;' in riga]:
|
||||
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
|
||||
if timestamp == '':
|
||||
continue
|
||||
EventDate, EventTime = timestamp.split(' ')
|
||||
temperature = rilevazioni.split(';')[0]
|
||||
logger.info(f'{temperature}, {rilevazioni}')
|
||||
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
|
||||
valori_iter = iter(valori_splitted)
|
||||
|
||||
valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels]
|
||||
|
||||
for num_nodo, valori in enumerate(valori_nodi, start=1):
|
||||
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
|
||||
|
||||
return matrice_valori
|
||||
|
||||
|
||||
async def make_tlp_matrix(cfg: object, id: int, pool) -> list:
|
||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||
righe = ToolData.splitlines()
|
||||
valori_x_nodo = 2
|
||||
matrice_valori = []
|
||||
for riga in righe:
|
||||
timestamp, batlevel, temperature, barometer, rilevazioni = riga.split(';',4)
|
||||
EventDate, EventTime = timestamp.split(' ')
|
||||
lista_rilevazioni = rilevazioni.strip(';').split(';')
|
||||
lista_rilevazioni.append(barometer)
|
||||
valori_nodi = [lista_rilevazioni[i:i + valori_x_nodo] for i in range(0, len(lista_rilevazioni), valori_x_nodo)]
|
||||
for num_nodo, valori in enumerate(valori_nodi, start=1):
|
||||
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
|
||||
return matrice_valori
|
||||
|
||||
|
||||
|
||||
async def make_gd_matrix(cfg: object, id: int, pool) -> list:
|
||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||
righe = ToolData.splitlines()
|
||||
matrice_valori = []
|
||||
pattern = r'^-\d*dB$'
|
||||
for riga in [riga for riga in righe if ';|;' in riga]:
|
||||
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
|
||||
EventDate, EventTime = timestamp.split(' ')
|
||||
if batlevel == '|':
|
||||
batlevel = temperature
|
||||
temperature, rilevazioni = rilevazioni.split(';',1)
|
||||
if re.match(pattern, rilevazioni):
|
||||
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;'
|
||||
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
|
||||
valori = valori_nodo.split(';')
|
||||
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
|
||||
|
||||
return matrice_valori
|
||||
@@ -1,6 +1,6 @@
|
||||
from utils.database.loader_action import load_data, update_status
|
||||
from utils.database.loader_action import load_data, update_status, unlock
|
||||
from utils.database import DATA_LOADED
|
||||
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix
|
||||
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix, make_tlp_matrix, make_gd_matrix, make_musa_matrix
|
||||
|
||||
import logging
|
||||
|
||||
@@ -10,7 +10,10 @@ async def main_loader(cfg: object, id: int, pool, action: str) -> None:
|
||||
type_matrix_mapping = {
|
||||
"pipe_separator": make_pipe_sep_matrix,
|
||||
"analogic_digital": make_ain_din_matrix,
|
||||
"channels": make_channels_matrix
|
||||
"channels": make_channels_matrix,
|
||||
"tlp": make_tlp_matrix,
|
||||
"gd": make_gd_matrix,
|
||||
"musa": make_musa_matrix
|
||||
}
|
||||
if action in type_matrix_mapping:
|
||||
function_to_call = type_matrix_mapping[action]
|
||||
@@ -21,5 +24,6 @@ async def main_loader(cfg: object, id: int, pool, action: str) -> None:
|
||||
# Load the data into the database
|
||||
if await load_data(cfg, matrice_valori, pool):
|
||||
await update_status(cfg, id, DATA_LOADED, pool)
|
||||
await unlock(cfg, id, pool)
|
||||
else:
|
||||
logger.warning(f"Action '{action}' non riconosciuta.")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!.venv/bin/python
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -50,29 +51,52 @@ async def load_data(cfg: object, matrice_valori: list, pool) -> bool :
|
||||
|
||||
async with pool.acquire() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
try:
|
||||
await cur.executemany(sql_insert_RAWDATA, matrice_valori)
|
||||
await conn.commit()
|
||||
logging.info("Data loaded.")
|
||||
rc = True
|
||||
except Exception as e:
|
||||
await conn.rollback()
|
||||
logging.error(f"Error: {e}.")
|
||||
rc = False
|
||||
finally:
|
||||
return rc
|
||||
rc = False
|
||||
for attempt in range(cfg.max_retries):
|
||||
try:
|
||||
await cur.executemany(sql_insert_RAWDATA, matrice_valori)
|
||||
await conn.commit()
|
||||
logging.info("Data loaded.")
|
||||
rc = True
|
||||
except Exception as e:
|
||||
await conn.rollback()
|
||||
logging.error(f"Error: {e}.")
|
||||
|
||||
if e.args[0] == 1213: # Deadlock detected
|
||||
logging.warning(f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}")
|
||||
|
||||
if attempt < cfg.max_retries - 1:
|
||||
delay = (2 * attempt)
|
||||
await asyncio.sleep(delay)
|
||||
continue
|
||||
else:
|
||||
logging.error("Max retry attempts reached for deadlock")
|
||||
raise
|
||||
finally:
|
||||
return rc
|
||||
|
||||
async def update_status(cfg: object, id: int, status: int, pool) -> None:
|
||||
async with pool.acquire() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
try:
|
||||
await cur.execute(f'update {cfg.dbrectable} set locked = 0, status = {status}, {timestamp_cols[status]} = now() where id = {id}')
|
||||
await cur.execute(f'update {cfg.dbrectable} set status = {status}, {timestamp_cols[status]} = now() where id = {id}')
|
||||
await conn.commit()
|
||||
logging.info("Status updated.")
|
||||
except Exception as e:
|
||||
await conn.rollback()
|
||||
logging.error(f'Error: {e}')
|
||||
|
||||
async def unlock(cfg: object, id: int, pool) -> None:
|
||||
async with pool.acquire() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
try:
|
||||
await cur.execute(f'update {cfg.dbrectable} set locked = 0 where id = {id}')
|
||||
await conn.commit()
|
||||
logging.info(f"id {id} unlocked.")
|
||||
except Exception as e:
|
||||
await conn.rollback()
|
||||
logging.error(f'Error: {e}')
|
||||
|
||||
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool) -> tuple:
|
||||
async with pool.acquire() as conn:
|
||||
async with conn.cursor() as cur:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from utils.csv.loaders import main_loader as pipe_sep_main_loader
|
||||
|
||||
async def main_loader(cfg: object, id: int) -> None:
|
||||
return pipe_sep_main_loader(cfg, id, "pipe_separator")
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")
|
||||
@@ -1,4 +1,4 @@
|
||||
from utils.csv.loaders import main_loader as pipe_sep_main_loader
|
||||
|
||||
async def main_loader(cfg: object, id: int) -> None:
|
||||
await pipe_sep_main_loader(cfg, id, "pipe_separator")
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")
|
||||
@@ -1,16 +1,4 @@
|
||||
#!.venv/bin/python
|
||||
# Import necessary modules
|
||||
from utils.database.loader_action import load_data, update_status
|
||||
from utils.database import DATA_LOADED
|
||||
from utils.csv.data_preparation import make_matrix
|
||||
import logging
|
||||
from utils.csv.loaders import main_loader as musa_main_loader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define the main function for loading data
|
||||
async def main_loader(cfg: object, id: int) -> None:
|
||||
# Create a matrix of values from the data
|
||||
matrice_valori = make_matrix(cfg, id)
|
||||
# Load the data into the database
|
||||
if load_data(cfg, matrice_valori):
|
||||
update_status(cfg, id, DATA_LOADED)
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await musa_main_loader(cfg, id, pool, "musa")
|
||||
@@ -1,16 +1,4 @@
|
||||
#!.venv/bin/python
|
||||
# Import necessary modules
|
||||
from utils.database.loader_action import load_data, update_status
|
||||
from utils.database import DATA_LOADED
|
||||
from utils.csv.data_preparation import make_matrix
|
||||
import logging
|
||||
from utils.csv.loaders import main_loader as gd_main_loader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define the main function for loading data
|
||||
async def main_loader(cfg: object, id: int) -> None:
|
||||
# Create a matrix of values from the data
|
||||
matrice_valori = make_matrix(cfg, id)
|
||||
# Load the data into the database
|
||||
if load_data(cfg, matrice_valori):
|
||||
update_status(cfg, id, DATA_LOADED)
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await gd_main_loader(cfg, id, pool, "gd")
|
||||
@@ -1,4 +1,4 @@
|
||||
from .tlp_tlp import main_loader as tlp_tlp_main_loader
|
||||
from utils.csv.loaders import main_loader as tlp_main_loader
|
||||
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await tlp_tlp_main_loader(cfg, id)
|
||||
await tlp_main_loader(cfg, id, pool, "tlp")
|
||||
@@ -1,16 +1,4 @@
|
||||
#!.venv/bin/python
|
||||
# Import necessary modules
|
||||
from utils.database.loader_action import load_data, update_status
|
||||
from utils.database import DATA_LOADED
|
||||
from utils.csv.data_preparation import make_matrix
|
||||
import logging
|
||||
from utils.csv.loaders import main_loader as tlp_main_loader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define the main function for loading data
|
||||
async def main_loader(cfg: object, id: int) -> None:
|
||||
# Create a matrix of values from the data
|
||||
matrice_valori = make_matrix(cfg, id)
|
||||
# Load the data into the database
|
||||
if load_data(cfg, matrice_valori):
|
||||
update_status(cfg, id, DATA_LOADED)
|
||||
async def main_loader(cfg: object, id: int, pool) -> None:
|
||||
await tlp_main_loader(cfg, id, pool, "tlp")
|
||||
Reference in New Issue
Block a user