fix logging to use the new
This commit is contained in:
@@ -47,7 +47,7 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola
|
che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola
|
||||||
che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina
|
che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina
|
||||||
"""
|
"""
|
||||||
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
|
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
|
||||||
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
|
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
|
||||||
EventDate, EventTime = timestamp.split(' ')
|
EventDate, EventTime = timestamp.split(' ')
|
||||||
if batlevel == '|':
|
if batlevel == '|':
|
||||||
@@ -114,7 +114,7 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
|
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
|
||||||
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
|
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
|
||||||
EventDate, EventTime = timestamp.split(' ')
|
EventDate, EventTime = timestamp.split(' ')
|
||||||
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
|
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
|
||||||
@@ -142,7 +142,7 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
|
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
|
||||||
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
|
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
|
||||||
if timestamp == '':
|
if timestamp == '':
|
||||||
continue
|
continue
|
||||||
@@ -202,7 +202,7 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
pattern = r';-?\d+dB$'
|
pattern = r';-?\d+dB$'
|
||||||
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]:
|
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
|
||||||
timestamp, rilevazioni = riga.split(';|;',1)
|
timestamp, rilevazioni = riga.split(';|;',1)
|
||||||
EventDate, EventTime = timestamp.split(' ')
|
EventDate, EventTime = timestamp.split(' ')
|
||||||
logger.info(f"GD id {id}: {pattern} {rilevazioni}")
|
logger.info(f"GD id {id}: {pattern} {rilevazioni}")
|
||||||
|
|||||||
@@ -56,5 +56,5 @@ async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_
|
|||||||
return csv_data
|
return csv_data
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}")
|
logger.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}")
|
||||||
return None
|
return None
|
||||||
@@ -68,7 +68,7 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool:
|
|||||||
async with conn.cursor() as cur:
|
async with conn.cursor() as cur:
|
||||||
for attempt in range(cfg.max_retries):
|
for attempt in range(cfg.max_retries):
|
||||||
try:
|
try:
|
||||||
logging.info(f"Loading data attempt {attempt + 1}.")
|
logger.info(f"Loading data attempt {attempt + 1}.")
|
||||||
|
|
||||||
for i in range(0, len(matrice_valori), BATCH_SIZE):
|
for i in range(0, len(matrice_valori), BATCH_SIZE):
|
||||||
batch = matrice_valori[i:i + BATCH_SIZE]
|
batch = matrice_valori[i:i + BATCH_SIZE]
|
||||||
@@ -76,17 +76,18 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool:
|
|||||||
await cur.executemany(sql_insert_RAWDATA, batch)
|
await cur.executemany(sql_insert_RAWDATA, batch)
|
||||||
await conn.commit()
|
await conn.commit()
|
||||||
|
|
||||||
logging.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}")
|
logger.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}")
|
||||||
|
|
||||||
logging.info("Data loaded.")
|
logger.info("Data loaded.")
|
||||||
rc = True
|
rc = True
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await conn.rollback()
|
await conn.rollback()
|
||||||
logging.error(f"Error: {e}.")
|
logger.error(f"Error: {e}.")
|
||||||
|
logger.error(f"Matrice valori da inserire: {batch}.")
|
||||||
|
|
||||||
if e.args[0] == 1213: # Deadlock detected
|
if e.args[0] == 1213: # Deadlock detected
|
||||||
logging.warning(
|
logger.warning(
|
||||||
f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}"
|
f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -95,7 +96,7 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool:
|
|||||||
await asyncio.sleep(delay)
|
await asyncio.sleep(delay)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
logging.error("Max retry attempts reached for deadlock")
|
logger.error("Max retry attempts reached for deadlock")
|
||||||
raise
|
raise
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
@@ -120,10 +121,10 @@ async def update_status(cfg: object, id: int, status: str, pool: object) -> None
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
await conn.commit()
|
await conn.commit()
|
||||||
logging.info(f"Status updated id {id}.")
|
logger.info(f"Status updated id {id}.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await conn.rollback()
|
await conn.rollback()
|
||||||
logging.error(f"Error: {e}")
|
logger.error(f"Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
async def unlock(cfg: object, id: int, pool: object) -> None:
|
async def unlock(cfg: object, id: int, pool: object) -> None:
|
||||||
@@ -143,10 +144,10 @@ async def unlock(cfg: object, id: int, pool: object) -> None:
|
|||||||
f"update {cfg.dbrectable} set locked = 0 where id = {id}"
|
f"update {cfg.dbrectable} set locked = 0 where id = {id}"
|
||||||
)
|
)
|
||||||
await conn.commit()
|
await conn.commit()
|
||||||
logging.info(f"id {id} unlocked.")
|
logger.info(f"id {id} unlocked.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await conn.rollback()
|
await conn.rollback()
|
||||||
logging.error(f"Error: {e}")
|
logger.error(f"Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple:
|
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple:
|
||||||
@@ -172,4 +173,4 @@ async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tup
|
|||||||
where t.name = "{tool}" and u.name = "{unit}"''')
|
where t.name = "{tool}" and u.name = "{unit}"''')
|
||||||
return cur.fetchone()
|
return cur.fetchone()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error: {e}")
|
logger.error(f"Error: {e}")
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ def on_file_received(self: object, file: str) -> None:
|
|||||||
"""
|
"""
|
||||||
if not os.stat(file).st_size:
|
if not os.stat(file).st_size:
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
logging.info(f'File {file} is empty: removed.')
|
logger.info(f'File {file} is empty: removed.')
|
||||||
else:
|
else:
|
||||||
cfg = self.cfg
|
cfg = self.cfg
|
||||||
path, filenameExt = os.path.split(file)
|
path, filenameExt = os.path.split(file)
|
||||||
@@ -35,7 +35,7 @@ def on_file_received(self: object, file: str) -> None:
|
|||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Create a cursor
|
# Create a cursor
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
@@ -45,8 +45,8 @@ def on_file_received(self: object, file: str) -> None:
|
|||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f'File {file} not loaded. Held in user path.')
|
logger.error(f'File {file} not loaded. Held in user path.')
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
else:
|
else:
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
logging.info(f'File {file} loaded: removed.')
|
logger.info(f'File {file} loaded: removed.')
|
||||||
@@ -72,7 +72,7 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs
|
|||||||
send_ftp_info = await cur.fetchone()
|
send_ftp_info = await cur.fetchone()
|
||||||
logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp")
|
logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"id {id} - {unit} - {tool} - errore nel query per invio ftp: {e}")
|
logger.error(f"id {id} - {unit} - {tool} - errore nel query per invio ftp: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Converti in bytes
|
# Converti in bytes
|
||||||
@@ -95,17 +95,17 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs
|
|||||||
result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer)
|
result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer)
|
||||||
|
|
||||||
if result.startswith('226'):
|
if result.startswith('226'):
|
||||||
logging.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo")
|
logger.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logging.error(f"Errore nell'invio: {result}")
|
logger.error(f"Errore nell'invio: {result}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except all_errors as e:
|
except all_errors as e:
|
||||||
logging.error(f"Errore FTP: {e}")
|
logger.error(f"Errore FTP: {e}")
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Errore generico: {e}")
|
logger.error(f"Errore generico: {e}")
|
||||||
return False
|
return False
|
||||||
finally:
|
finally:
|
||||||
csv_buffer.close()
|
csv_buffer.close()
|
||||||
|
|||||||
@@ -41,14 +41,14 @@ def ftp_SITE_ADDU(self: object, line: str) -> None:
|
|||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Create a cursor
|
# Create a cursor
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')")
|
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')")
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
logging.info(f"User {user} created.")
|
logger.info(f"User {user} created.")
|
||||||
self.respond('200 SITE ADDU successful.')
|
self.respond('200 SITE ADDU successful.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.respond(f'501 SITE ADDU failed: {e}.')
|
self.respond(f'501 SITE ADDU failed: {e}.')
|
||||||
@@ -72,7 +72,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None:
|
|||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Crea un cursore
|
# Crea un cursore
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
@@ -80,7 +80,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None:
|
|||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
logging.info(f"User {user} deleted.")
|
logger.info(f"User {user} deleted.")
|
||||||
self.respond('200 SITE DISU successful.')
|
self.respond('200 SITE DISU successful.')
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.respond('501 SITE DISU failed.')
|
self.respond('501 SITE DISU failed.')
|
||||||
@@ -102,7 +102,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
|
|||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Crea un cursore
|
# Crea un cursore
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
@@ -110,7 +110,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
|
|||||||
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'")
|
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'")
|
||||||
conn.commit()
|
conn.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Update DB failed: {e}")
|
logger.error(f"Update DB failed: {e}")
|
||||||
|
|
||||||
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'")
|
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'")
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
|
|||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
logging.info(f"User {user} restored.")
|
logger.info(f"User {user} restored.")
|
||||||
self.respond('200 SITE ENAU successful.')
|
self.respond('200 SITE ENAU successful.')
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -145,7 +145,7 @@ def ftp_SITE_LSTU(self: object, line: str) -> None:
|
|||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
logging.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Crea un cursore
|
# Crea un cursore
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|||||||
Reference in New Issue
Block a user