diff --git a/utils/csv/data_preparation.py b/utils/csv/data_preparation.py index efa58d6..a449366 100644 --- a/utils/csv/data_preparation.py +++ b/utils/csv/data_preparation.py @@ -47,7 +47,7 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list: che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina """ - for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]: + for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]: timestamp, batlevel, temperature, rilevazioni = riga.split(';',3) EventDate, EventTime = timestamp.split(' ') if batlevel == '|': @@ -114,7 +114,7 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list: node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool) righe = ToolData.splitlines() matrice_valori = [] - for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]: + for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]: timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3) EventDate, EventTime = timestamp.split(' ') valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|'] @@ -142,7 +142,7 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list: node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool) righe = ToolData.splitlines() matrice_valori = [] - for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]: + for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]: timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2) if timestamp == '': continue @@ -202,7 +202,7 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list: righe = ToolData.splitlines() matrice_valori = [] pattern = r';-?\d+dB$' - for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga]: + for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]: timestamp, rilevazioni = riga.split(';|;',1) EventDate, EventTime = timestamp.split(' ') logger.info(f"GD id {id}: {pattern} {rilevazioni}") diff --git a/utils/database/elab_query.py b/utils/database/elab_query.py index 51d3857..5aeb918 100644 --- a/utils/database/elab_query.py +++ b/utils/database/elab_query.py @@ -56,5 +56,5 @@ async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_ return csv_data except Exception as e: - logging.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}") + logger.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}") return None \ No newline at end of file diff --git a/utils/database/loader_action.py b/utils/database/loader_action.py index e53bdc9..a016118 100644 --- a/utils/database/loader_action.py +++ b/utils/database/loader_action.py @@ -68,7 +68,7 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool: async with conn.cursor() as cur: for attempt in range(cfg.max_retries): try: - logging.info(f"Loading data attempt {attempt + 1}.") + logger.info(f"Loading data attempt {attempt + 1}.") for i in range(0, len(matrice_valori), BATCH_SIZE): batch = matrice_valori[i:i + BATCH_SIZE] @@ -76,17 +76,18 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool: await cur.executemany(sql_insert_RAWDATA, batch) await conn.commit() - logging.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}") + logger.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}") - logging.info("Data loaded.") + logger.info("Data loaded.") rc = True break except Exception as e: await conn.rollback() - logging.error(f"Error: {e}.") + logger.error(f"Error: {e}.") + logger.error(f"Matrice valori da inserire: {batch}.") if e.args[0] == 1213: # Deadlock detected - logging.warning( + logger.warning( f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}" ) @@ -95,7 +96,7 @@ async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool: await asyncio.sleep(delay) continue else: - logging.error("Max retry attempts reached for deadlock") + logger.error("Max retry attempts reached for deadlock") raise return rc @@ -120,10 +121,10 @@ async def update_status(cfg: object, id: int, status: str, pool: object) -> None """ ) await conn.commit() - logging.info(f"Status updated id {id}.") + logger.info(f"Status updated id {id}.") except Exception as e: await conn.rollback() - logging.error(f"Error: {e}") + logger.error(f"Error: {e}") async def unlock(cfg: object, id: int, pool: object) -> None: @@ -143,10 +144,10 @@ async def unlock(cfg: object, id: int, pool: object) -> None: f"update {cfg.dbrectable} set locked = 0 where id = {id}" ) await conn.commit() - logging.info(f"id {id} unlocked.") + logger.info(f"id {id} unlocked.") except Exception as e: await conn.rollback() - logging.error(f"Error: {e}") + logger.error(f"Error: {e}") async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple: @@ -172,4 +173,4 @@ async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tup where t.name = "{tool}" and u.name = "{unit}"''') return cur.fetchone() except Exception as e: - logging.error(f"Error: {e}") + logger.error(f"Error: {e}") diff --git a/utils/ftp/file_management.py b/utils/ftp/file_management.py index a43e8e6..cddc43e 100644 --- a/utils/ftp/file_management.py +++ b/utils/ftp/file_management.py @@ -17,7 +17,7 @@ def on_file_received(self: object, file: str) -> None: """ if not os.stat(file).st_size: os.remove(file) - logging.info(f'File {file} is empty: removed.') + logger.info(f'File {file} is empty: removed.') else: cfg = self.cfg path, filenameExt = os.path.split(file) @@ -35,7 +35,7 @@ def on_file_received(self: object, file: str) -> None: conn = connetti_db(cfg) except mysql.connector.Error as e: print(f"Error: {e}") - logging.error(f'{e}') + logger.error(f'{e}') # Create a cursor cur = conn.cursor() @@ -45,8 +45,8 @@ def on_file_received(self: object, file: str) -> None: conn.close() except Exception as e: - logging.error(f'File {file} not loaded. Held in user path.') - logging.error(f'{e}') + logger.error(f'File {file} not loaded. Held in user path.') + logger.error(f'{e}') else: os.remove(file) - logging.info(f'File {file} loaded: removed.') \ No newline at end of file + logger.info(f'File {file} loaded: removed.') \ No newline at end of file diff --git a/utils/ftp/send_data.py b/utils/ftp/send_data.py index c906136..fdff40b 100644 --- a/utils/ftp/send_data.py +++ b/utils/ftp/send_data.py @@ -72,7 +72,7 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs send_ftp_info = await cur.fetchone() logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp") except Exception as e: - logging.error(f"id {id} - {unit} - {tool} - errore nel query per invio ftp: {e}") + logger.error(f"id {id} - {unit} - {tool} - errore nel query per invio ftp: {e}") try: # Converti in bytes @@ -95,17 +95,17 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer) if result.startswith('226'): - logging.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo") + logger.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo") return True else: - logging.error(f"Errore nell'invio: {result}") + logger.error(f"Errore nell'invio: {result}") return False except all_errors as e: - logging.error(f"Errore FTP: {e}") + logger.error(f"Errore FTP: {e}") return False except Exception as e: - logging.error(f"Errore generico: {e}") + logger.error(f"Errore generico: {e}") return False finally: csv_buffer.close() diff --git a/utils/ftp/user_admin.py b/utils/ftp/user_admin.py index f4e4d4e..2d097e4 100644 --- a/utils/ftp/user_admin.py +++ b/utils/ftp/user_admin.py @@ -41,14 +41,14 @@ def ftp_SITE_ADDU(self: object, line: str) -> None: conn = connetti_db(cfg) except mysql.connector.Error as e: print(f"Error: {e}") - logging.error(f'{e}') + logger.error(f'{e}') # Create a cursor cur = conn.cursor() cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')") conn.commit() conn.close() - logging.info(f"User {user} created.") + logger.info(f"User {user} created.") self.respond('200 SITE ADDU successful.') except Exception as e: self.respond(f'501 SITE ADDU failed: {e}.') @@ -72,7 +72,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None: conn = connetti_db(cfg) except mysql.connector.Error as e: print(f"Error: {e}") - logging.error(f'{e}') + logger.error(f'{e}') # Crea un cursore cur = conn.cursor() @@ -80,7 +80,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None: conn.commit() conn.close() - logging.info(f"User {user} deleted.") + logger.info(f"User {user} deleted.") self.respond('200 SITE DISU successful.') except Exception as e: self.respond('501 SITE DISU failed.') @@ -102,7 +102,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None: conn = connetti_db(cfg) except mysql.connector.Error as e: print(f"Error: {e}") - logging.error(f'{e}') + logger.error(f'{e}') # Crea un cursore cur = conn.cursor() @@ -110,7 +110,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None: cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'") conn.commit() except Exception as e: - logging.error(f"Update DB failed: {e}") + logger.error(f"Update DB failed: {e}") cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'") @@ -123,7 +123,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None: conn.close() - logging.info(f"User {user} restored.") + logger.info(f"User {user} restored.") self.respond('200 SITE ENAU successful.') except Exception as e: @@ -145,7 +145,7 @@ def ftp_SITE_LSTU(self: object, line: str) -> None: conn = connetti_db(cfg) except mysql.connector.Error as e: print(f"Error: {e}") - logging.error(f'{e}') + logger.error(f'{e}') # Crea un cursore cur = conn.cursor()