add src path

This commit is contained in:
2025-07-31 23:10:23 +02:00
parent acaad8a99f
commit 6ff97316dc
57 changed files with 50 additions and 42 deletions

2
env/elab.ini vendored
View File

@@ -1,5 +1,5 @@
[logging] [logging]
logFilename = ./elab_data.log logFilename = ../logs/elab_data.log
[threads] [threads]
max_num = 10 max_num = 10

2
env/ftp.ini vendored
View File

@@ -18,7 +18,7 @@
path = /home/alex/aseftp/csvfs/ path = /home/alex/aseftp/csvfs/
[logging] [logging]
logFilename = ./ftp_csv_rec.log logFilename = ../logs/ftp_csv_rec.log
[unit] [unit]
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG

2
env/load.ini vendored
View File

@@ -1,5 +1,5 @@
[logging] [logging]
logFilename = ./load_raw_data.log logFilename = ../logs/load_raw_data.log
[threads] [threads]
max_num = 5 max_num = 5

2
env/send.ini vendored
View File

@@ -1,5 +1,5 @@
[logging] [logging]
logFilename = ./send_data.log logFilename = ../logs/send_data.log
[threads] [threads]
max_num = 5 max_num = 5

View File

@@ -1 +0,0 @@
select count(id) from RAWDATACOR where date(created_at) = '2025-07-27' and concat(UnitName,ToolNameID) in ('ID0005_DT0000','ID0005_DT0036','ID0005_DT0037','ID0005_DT0038','ID0005_DT0039','ID0005_DT0040','ID0005_DT0041','ID0005_DT0042','ID0005_DT0043','ID0006_DT0044','ID0006_DT0045','ID0006_DT0046','ID0006_DT0047','ID0006_DT0048','ID0006_DT0049','ID0007_DT0050','ID0007_DT0051','ID0007_DT0052','ID0007_DT0053','ID0010_DT0054','ID0010_DT0055','ID0012_DT0001','ID0012_DT0102','ID0012_DT0103','ID0012_DT0104','ID0013_DT0001','ID0034_DT0002','ID0039_DT0001','ID0039_DT0002','ID0039_DT0003','ID0039_DT0004','ID0039_DT0005','ID0039_DT0006','ID0039_DT0007','ID0039_DT0008','ID0190_DT0001','ID0190_DT0002','ID0190_DT0003','ID0190_DT0004','ID0190_DT0005','ID0198_DT0229','ID0198_DT0230','ID0199_DT0010','ID0215_DT0234','ID0215_DT0235','ID0215_DT0236','ID0215_DT0237','ID0226_DT0000','ID0226_DT0239','ID0226_DT0240','ID0226_DT0241','ID0226_DT0242','ID0226_DT0243','ID0226_DT0244','ID0226_DT0245','ID0226_DT0246','ID0226_DT0247','ID0226_DT0248','ID0226_DT0249','ID0226_DT0250','ID0226_DT0251')

View File

@@ -41,17 +41,16 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
while True: while True:
try: try:
logger.info("Inizio elaborazione") logger.info("Inizio elaborazione")
record = await get_next_csv_atomic(pool, cfg.dbrectable, WorkflowFlags.DATA_LOADED, WorkflowFlags.DATA_ELABORATED) record = await get_next_csv_atomic(pool, cfg.dbrectable, WorkflowFlags.DATA_LOADED, WorkflowFlags.DATA_ELABORATED)
if record: if record:
id, unit_type, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record] id, unit_type, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record]
tool_elab_info = await get_matlab_command(cfg, tool_name, unit_name, pool) if tool_type.lower() != "gd": # i tool GD non devono essere elaborati
tool_elab_info = await get_matlab_command(cfg, tool_name.upper(), unit_name.upper(), pool)
if tool_elab_info: if tool_elab_info:
if tool_elab_info['statustools'].lower() in cfg.elab_status: if tool_elab_info['statustools'].lower() in cfg.elab_status:
logger.info(f"Elaborazione id {id} per {unit_name} {tool_name} ") logger.info(f"Elaborazione id {id} per {unit_name} {tool_name} ")
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name} {tool_name}" matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name.upper()} {tool_name.upper()}"
proc = await asyncio.create_subprocess_shell( proc = await asyncio.create_subprocess_shell(
matlab_cmd, matlab_cmd,
cwd=cfg.matlab_func_path, cwd=cfg.matlab_func_path,
@@ -73,6 +72,14 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
await asyncio.sleep(ELAB_PROCESSING_DELAY) await asyncio.sleep(ELAB_PROCESSING_DELAY)
else: else:
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['statustools']}: MatLab calc by-passed.") logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['statustools']}: MatLab calc by-passed.")
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, id, pool)
else:
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, id, pool)
else: else:
logger.info("Nessun record disponibile") logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP) await asyncio.sleep(NO_RECORD_SLEEP)

View File

@@ -7,7 +7,7 @@ class Config:
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/ftp.ini", "env/db.ini"]) c.read(["../env/ftp.ini", "../env/db.ini"])
# FTP setting # FTP setting
self.service_port = c.getint("ftpserver", "service_port") self.service_port = c.getint("ftpserver", "service_port")

View File

@@ -7,7 +7,7 @@ class Config:
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/load.ini", "env/db.ini"]) c.read(["../env/load.ini", "../env/db.ini"])
# LOG setting # LOG setting
self.logfilename = c.get("logging", "logFilename") self.logfilename = c.get("logging", "logFilename")

View File

@@ -7,7 +7,7 @@ class Config:
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/elab.ini", "env/db.ini"]) c.read(["../env/elab.ini", "../env/db.ini"])
# LOG setting # LOG setting
self.logfilename = c.get("logging", "logFilename") self.logfilename = c.get("logging", "logFilename")

View File

@@ -7,7 +7,7 @@ class Config:
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/send.ini", "env/db.ini"]) c.read(["../env/send.ini", "../env/db.ini"])
# LOG setting # LOG setting
self.logfilename = c.get("logging", "logFilename") self.logfilename = c.get("logging", "logFilename")

View File

@@ -7,7 +7,7 @@ class Config:
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/db.ini"]) c.read(["../env/db.ini"])
# DB setting # DB setting
self.dbhost = c.get("db", "hostname") self.dbhost = c.get("db", "hostname")

View File

@@ -4,6 +4,7 @@ class WorkflowFlags:
DATA_ELABORATED = 2 # 0010 DATA_ELABORATED = 2 # 0010
SENT_RAW_DATA = 4 # 0100 SENT_RAW_DATA = 4 # 0100
SENT_ELAB_DATA = 8 # 1000 SENT_ELAB_DATA = 8 # 1000
DUMMY_ELABORATED = 16 # 10000
# Mappatura flag -> colonna timestamp # Mappatura flag -> colonna timestamp
@@ -12,7 +13,8 @@ FLAG_TO_TIMESTAMP = {
WorkflowFlags.DATA_LOADED: "loaded_at", WorkflowFlags.DATA_LOADED: "loaded_at",
WorkflowFlags.DATA_ELABORATED: "elaborated_at", WorkflowFlags.DATA_ELABORATED: "elaborated_at",
WorkflowFlags.SENT_RAW_DATA: "sent_raw_at", WorkflowFlags.SENT_RAW_DATA: "sent_raw_at",
WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at" WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at",
WorkflowFlags.DUMMY_ELABORATED: "elaborated_at"
} }
# Dimensione degli split della matrice per il caricamento # Dimensione degli split della matrice per il caricamento

View File

@@ -14,7 +14,7 @@ async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_
Args: Args:
cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency). cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency).
id (int): The ID of the record being processed (used for logging). id_recv (int): The ID of the record being processed (used for logging).
pool (object): The database connection pool. pool (object): The database connection pool.
unit (str): The name of the unit to filter the data. unit (str): The name of the unit to filter the data.
tool (str): The ID of the tool to filter the data. tool (str): The ID of the tool to filter the data.

View File

@@ -110,7 +110,7 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs
finally: finally:
csv_buffer.close() csv_buffer.close()
def parse_ftp_parms(ftp_parms): def parse_ftp_parms(ftp_parms: str) -> dict:
""" """
Parses a string of FTP parameters into a dictionary. Parses a string of FTP parameters into a dictionary.