add src path
This commit is contained in:
2
env/elab.ini
vendored
2
env/elab.ini
vendored
@@ -1,5 +1,5 @@
|
||||
[logging]
|
||||
logFilename = ./elab_data.log
|
||||
logFilename = ../logs/elab_data.log
|
||||
|
||||
[threads]
|
||||
max_num = 10
|
||||
|
||||
2
env/ftp.ini
vendored
2
env/ftp.ini
vendored
@@ -18,7 +18,7 @@
|
||||
path = /home/alex/aseftp/csvfs/
|
||||
|
||||
[logging]
|
||||
logFilename = ./ftp_csv_rec.log
|
||||
logFilename = ../logs/ftp_csv_rec.log
|
||||
|
||||
[unit]
|
||||
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
|
||||
|
||||
2
env/load.ini
vendored
2
env/load.ini
vendored
@@ -1,5 +1,5 @@
|
||||
[logging]
|
||||
logFilename = ./load_raw_data.log
|
||||
logFilename = ../logs/load_raw_data.log
|
||||
|
||||
[threads]
|
||||
max_num = 5
|
||||
2
env/send.ini
vendored
2
env/send.ini
vendored
@@ -1,5 +1,5 @@
|
||||
[logging]
|
||||
logFilename = ./send_data.log
|
||||
logFilename = ../logs/send_data.log
|
||||
|
||||
[threads]
|
||||
max_num = 5
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
select count(id) from RAWDATACOR where date(created_at) = '2025-07-27' and concat(UnitName,ToolNameID) in ('ID0005_DT0000','ID0005_DT0036','ID0005_DT0037','ID0005_DT0038','ID0005_DT0039','ID0005_DT0040','ID0005_DT0041','ID0005_DT0042','ID0005_DT0043','ID0006_DT0044','ID0006_DT0045','ID0006_DT0046','ID0006_DT0047','ID0006_DT0048','ID0006_DT0049','ID0007_DT0050','ID0007_DT0051','ID0007_DT0052','ID0007_DT0053','ID0010_DT0054','ID0010_DT0055','ID0012_DT0001','ID0012_DT0102','ID0012_DT0103','ID0012_DT0104','ID0013_DT0001','ID0034_DT0002','ID0039_DT0001','ID0039_DT0002','ID0039_DT0003','ID0039_DT0004','ID0039_DT0005','ID0039_DT0006','ID0039_DT0007','ID0039_DT0008','ID0190_DT0001','ID0190_DT0002','ID0190_DT0003','ID0190_DT0004','ID0190_DT0005','ID0198_DT0229','ID0198_DT0230','ID0199_DT0010','ID0215_DT0234','ID0215_DT0235','ID0215_DT0236','ID0215_DT0237','ID0226_DT0000','ID0226_DT0239','ID0226_DT0240','ID0226_DT0241','ID0226_DT0242','ID0226_DT0243','ID0226_DT0244','ID0226_DT0245','ID0226_DT0246','ID0226_DT0247','ID0226_DT0248','ID0226_DT0249','ID0226_DT0250','ID0226_DT0251')
|
||||
@@ -41,17 +41,16 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
|
||||
while True:
|
||||
try:
|
||||
logger.info("Inizio elaborazione")
|
||||
|
||||
record = await get_next_csv_atomic(pool, cfg.dbrectable, WorkflowFlags.DATA_LOADED, WorkflowFlags.DATA_ELABORATED)
|
||||
|
||||
if record:
|
||||
id, unit_type, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record]
|
||||
tool_elab_info = await get_matlab_command(cfg, tool_name, unit_name, pool)
|
||||
if tool_type.lower() != "gd": # i tool GD non devono essere elaborati
|
||||
tool_elab_info = await get_matlab_command(cfg, tool_name.upper(), unit_name.upper(), pool)
|
||||
if tool_elab_info:
|
||||
if tool_elab_info['statustools'].lower() in cfg.elab_status:
|
||||
logger.info(f"Elaborazione id {id} per {unit_name} {tool_name} ")
|
||||
|
||||
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name} {tool_name}"
|
||||
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name.upper()} {tool_name.upper()}"
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
matlab_cmd,
|
||||
cwd=cfg.matlab_func_path,
|
||||
@@ -73,6 +72,14 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
|
||||
await asyncio.sleep(ELAB_PROCESSING_DELAY)
|
||||
else:
|
||||
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['statustools']}: MatLab calc by-passed.")
|
||||
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||
await update_status(cfg, id, WorkflowFlags.DUMMY_ELABORATED, pool)
|
||||
await unlock(cfg, id, pool)
|
||||
else:
|
||||
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||
await update_status(cfg, id, WorkflowFlags.DUMMY_ELABORATED, pool)
|
||||
await unlock(cfg, id, pool)
|
||||
|
||||
else:
|
||||
logger.info("Nessun record disponibile")
|
||||
await asyncio.sleep(NO_RECORD_SLEEP)
|
||||
@@ -7,7 +7,7 @@ class Config:
|
||||
def __init__(self):
|
||||
|
||||
c = ConfigParser()
|
||||
c.read(["env/ftp.ini", "env/db.ini"])
|
||||
c.read(["../env/ftp.ini", "../env/db.ini"])
|
||||
|
||||
# FTP setting
|
||||
self.service_port = c.getint("ftpserver", "service_port")
|
||||
@@ -7,7 +7,7 @@ class Config:
|
||||
def __init__(self):
|
||||
|
||||
c = ConfigParser()
|
||||
c.read(["env/load.ini", "env/db.ini"])
|
||||
c.read(["../env/load.ini", "../env/db.ini"])
|
||||
|
||||
# LOG setting
|
||||
self.logfilename = c.get("logging", "logFilename")
|
||||
@@ -7,7 +7,7 @@ class Config:
|
||||
def __init__(self):
|
||||
|
||||
c = ConfigParser()
|
||||
c.read(["env/elab.ini", "env/db.ini"])
|
||||
c.read(["../env/elab.ini", "../env/db.ini"])
|
||||
|
||||
# LOG setting
|
||||
self.logfilename = c.get("logging", "logFilename")
|
||||
@@ -7,7 +7,7 @@ class Config:
|
||||
def __init__(self):
|
||||
|
||||
c = ConfigParser()
|
||||
c.read(["env/send.ini", "env/db.ini"])
|
||||
c.read(["../env/send.ini", "../env/db.ini"])
|
||||
|
||||
# LOG setting
|
||||
self.logfilename = c.get("logging", "logFilename")
|
||||
@@ -7,7 +7,7 @@ class Config:
|
||||
def __init__(self):
|
||||
|
||||
c = ConfigParser()
|
||||
c.read(["env/db.ini"])
|
||||
c.read(["../env/db.ini"])
|
||||
|
||||
# DB setting
|
||||
self.dbhost = c.get("db", "hostname")
|
||||
@@ -4,6 +4,7 @@ class WorkflowFlags:
|
||||
DATA_ELABORATED = 2 # 0010
|
||||
SENT_RAW_DATA = 4 # 0100
|
||||
SENT_ELAB_DATA = 8 # 1000
|
||||
DUMMY_ELABORATED = 16 # 10000
|
||||
|
||||
|
||||
# Mappatura flag -> colonna timestamp
|
||||
@@ -12,7 +13,8 @@ FLAG_TO_TIMESTAMP = {
|
||||
WorkflowFlags.DATA_LOADED: "loaded_at",
|
||||
WorkflowFlags.DATA_ELABORATED: "elaborated_at",
|
||||
WorkflowFlags.SENT_RAW_DATA: "sent_raw_at",
|
||||
WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at"
|
||||
WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at",
|
||||
WorkflowFlags.DUMMY_ELABORATED: "elaborated_at"
|
||||
}
|
||||
|
||||
# Dimensione degli split della matrice per il caricamento
|
||||
@@ -14,7 +14,7 @@ async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_
|
||||
|
||||
Args:
|
||||
cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency).
|
||||
id (int): The ID of the record being processed (used for logging).
|
||||
id_recv (int): The ID of the record being processed (used for logging).
|
||||
pool (object): The database connection pool.
|
||||
unit (str): The name of the unit to filter the data.
|
||||
tool (str): The ID of the tool to filter the data.
|
||||
@@ -110,7 +110,7 @@ async def send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, cs
|
||||
finally:
|
||||
csv_buffer.close()
|
||||
|
||||
def parse_ftp_parms(ftp_parms):
|
||||
def parse_ftp_parms(ftp_parms: str) -> dict:
|
||||
"""
|
||||
Parses a string of FTP parameters into a dictionary.
|
||||
|
||||
Reference in New Issue
Block a user