Compare commits

...

63 Commits

Author SHA1 Message Date
35527c89cd fix ftp 2025-09-15 22:32:12 +02:00
8cd5a21275 fix flag elab 2025-09-15 22:06:01 +02:00
2d2668c92c setting vscode 2025-09-12 20:54:21 +02:00
adfe2e7809 fix cread user dir 2025-09-12 20:52:11 +02:00
1a99b55dbb add flag stop elab 2025-09-11 21:28:42 +02:00
54cb20b6af pylint 2 2025-09-03 21:22:35 +02:00
39dba8f54a fix pylint 2025-09-03 21:05:19 +02:00
9b3f1171f3 gitignore 2025-09-03 20:48:54 +02:00
f7e2efa03e resync toml 2025-09-03 20:39:55 +02:00
4e548c883c versionato toml 2025-09-03 20:36:07 +02:00
1ce6c7fd09 fix alias + add username sender 2025-08-27 22:43:36 +02:00
730869ef1f mod alterna valori ping pong 2025-08-23 16:58:52 +02:00
d1582b8f9e add multi file logs filter errors 2025-08-22 21:15:10 +02:00
f33ae140fc ini e email 2025-08-21 19:03:23 +02:00
d3f7e9090a std ini file e load config 2025-08-21 16:21:47 +02:00
05816ee95d add doc in load ftp user 2025-08-20 21:55:59 +02:00
55383e51b8 docs db __init__ 2025-08-19 22:08:57 +02:00
fb0383b6b6 fix 2025-08-19 14:19:09 +02:00
ea5cdac7c0 rename old_script -> old_scripts 2025-08-19 14:15:15 +02:00
c6d486d0bd refactory old script 2025-08-19 12:36:27 +02:00
b79f07b407 add funcs docs 2025-08-19 12:01:15 +02:00
2b976d06b3 util ftp renamed connect 2025-08-11 22:59:38 +02:00
dbe2e7f5a7 fix send ftp e api 2025-08-10 16:47:04 +02:00
cfb185e029 fix status val 2025-08-09 20:14:20 +02:00
3a3b63e360 reorg elab_query 2025-08-09 19:09:40 +02:00
5fc40093e2 add alias for tools and units types and names 2025-08-03 21:46:15 +02:00
fdefd0a430 pini 2025-08-02 19:22:48 +02:00
6ff97316dc add src path 2025-07-31 23:10:23 +02:00
acaad8a99f fix GD 2025-07-28 23:03:56 +02:00
d6f1998d78 GD RSSI + normalizza orario 2025-07-27 23:20:18 +02:00
dc20713cad gestione GD 2025-07-27 19:25:42 +02:00
cee070d237 fix logging to use the new 2025-07-27 17:56:57 +02:00
287d2de81e fix caricamenti 2025-07-27 00:32:12 +02:00
a8df0f9584 fix 2025-07-21 22:07:46 +02:00
a4079ee089 add ftp parm from db 2025-07-18 17:25:56 +02:00
c23027918c add send ftp 2025-07-18 15:26:41 +02:00
f003ba68ed estrtto codice duplicato e devinito modulo orchestrator_utils 2025-07-12 18:16:55 +02:00
7edaef3563 add func parm type 2025-07-12 17:33:38 +02:00
b1ce9061b1 add comment 2025-07-11 22:06:45 +02:00
0022d0e326 dict cursor e pool conn 2025-07-06 23:27:13 +02:00
301aa53c72 elab matlab 2025-07-06 21:52:41 +02:00
2c67956505 fix 2025-06-25 21:47:01 +02:00
2c4b356df1 fix 2025-06-16 22:50:42 +02:00
726d04ace3 rimoso autocommit nei pool 2025-06-13 08:34:59 +02:00
40a1ac23ee tolto tabelle 2025-06-12 15:41:34 +02:00
f1736e6bad add ftp user define 2025-06-12 15:39:46 +02:00
e939846812 nuove tipologie 2025-06-11 22:13:30 +02:00
ce6b55d2f9 cambio timeout 2025-06-03 19:37:11 +02:00
991eb6900d altre fix 2025-06-01 21:33:03 +02:00
c40378b654 fix async 2025-05-27 23:50:25 +02:00
670972bd45 fix x channels 2025-05-26 22:38:19 +02:00
95c8ced201 loc ok 2025-05-25 23:23:00 +02:00
43acf4f415 reorg parsers 2025-05-17 19:02:50 +02:00
976116e2f3 reorg ini e log 2025-05-13 22:48:08 +02:00
12ac522b98 load async worker 2025-05-11 16:40:46 +02:00
cbcadbf015 load csv async 2025-05-11 11:20:18 +02:00
1dfb1a2efa evol 5 2025-05-11 10:01:23 +02:00
e9dc7c1192 evol4 2025-05-03 15:40:58 +02:00
138474aa0b loc rel1 2025-05-02 19:10:49 +02:00
a752210a33 query channels ain din 2025-05-01 15:34:55 +02:00
fd5429ee0d evol 3 2025-05-01 00:58:07 +02:00
cc7a136cf3 refactory 2025-04-28 22:29:35 +02:00
40ef173694 evol 2 2025-04-27 17:21:36 +02:00
102 changed files with 7090 additions and 1822 deletions

16
.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
*.pyc
*.toml
.python-version
uv.lock
*.log*
.vscode/settings.json
README.md
prova*.*
.codegpt
build/
LoadCSVData.pl
matlab_elab.py
doc_carri.txt
ase.egg-info/
site/
site.zip

2
.pylintrc Normal file
View File

@@ -0,0 +1,2 @@
# Oppure se vuoi essere più permissivo
max-line-length=140

14
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,14 @@
{
// Usare IntelliSense per informazioni sui possibili attributi.
// Al passaggio del mouse vengono visualizzate le descrizioni degli attributi esistenti.
// Per altre informazioni, visitare: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: Python File",
"type": "debugpy",
"request": "launch",
"program": "${file}"
}
]
}

4
.vscode/setting.json vendored Normal file
View File

@@ -0,0 +1,4 @@
{
"flake8.args": ["--max-line-length=140"],
"python.linting.flake8Args": ["--config","flake8.cfg"]
}

View File

@@ -1,193 +0,0 @@
#!.venv/bin/python
import sys
import os
import pika
import logging
import csv
import re
import mysql.connector as mysql
import shutil
from utils.time import timestamp_fmt as ts
from utils.time import date_refmt as df
from utils.config import set_config as setting
class sqlraw:
def __init__(self, cfg):
self.config = {"host": cfg.dbhost, "user": cfg.dbuser, "password": cfg.dbpass}
self.dbname = cfg.dbname
self.table = cfg.table
self.sql_head = (
"INSERT IGNORE INTO "
+ self.dbname
+ "."
+ self.table
+ " (`UnitName`,`ToolName`,`eventDT`,`BatteryLevel`,`Temperature`,`NodeNum`,"
+ "`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,"
+ "`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`) VALUES "
)
def add_data(self, values):
self.sql = self.sql_head + "(" + "),(".join(values) + ");"
def write_db(self):
try:
conn = mysql.connect(**self.config, database=self.dbname)
except Exception as err:
logging.error(
f"PID {os.getpid():>5} >> Error to connet to DB {self.dbname} - System error {err}."
)
sys.exit(1)
cur = conn.cursor()
try:
cur.execute(self.sql)
except Exception as err:
logging.error(
f"PID {os.getpid():>5} >> Error write into DB {self.dbname} - System error {err}."
)
print(err)
sys.exit(1)
finally:
conn.close()
def callback_ase(ch, method, properties, body, config): # body è di tipo byte
logging.info(
"PID {0:>5} >> Read message {1}".format(os.getpid(), body.decode("utf-8"))
)
msg = body.decode("utf-8").split(";")
sql = sqlraw(config)
stmlst = []
commonData = '"{0}","{1}"'.format(msg[1], msg[2])
tooltype = msg[3]
with open(msg[6], "r") as csvfile:
lines = csvfile.read().splitlines()
for line in lines:
fields = line.split(";|;")
if mG501 := re.match(
r"^(\d\d\d\d\/\d\d\/\d\d\s\d\d:\d\d:\d\d);(.+);(.+)$", fields[0]
):
rowData = ',"{0}",{1},{2}'.format(
mG501.group(1), mG501.group(2), mG501.group(3)
)
fields.pop(0)
elif mG201 := re.match(
r"^(\d\d\/\d\d\/\d\d\d\d\s\d\d:\d\d:\d\d)$", fields[0]
):
mbtG201 = re.match(r"^(.+);(.+)$", fields[1])
rowData = ',"{0}",{1},{2}'.format(
df.dateTimeFmt(mG201.group(1)), mbtG201.group(1), mbtG201.group(2)
)
fields.pop(0)
fields.pop(0)
else:
continue
nodeNum = 0
for field in fields:
nodeNum += 1
vals = field.split(";")
stmlst.append(
commonData
+ rowData
+ ",{0},".format(nodeNum)
+ ",".join('"{0}"'.format(d) for d in vals)
+ ","
+ ",".join(["null"] * (config.valueNum - len(vals)))
)
if config.maxInsertRow < len(stmlst):
sql.add_data(stmlst)
try:
sql.write_db()
stmlst.clear()
except:
print("errore nell'inserimento")
sys.exit(1)
if len(stmlst) > 0:
sql.add_data(stmlst)
try:
sql.write_db()
ch.basic_ack(delivery_tag=method.delivery_tag)
except:
print("errore nell'inserimento")
sys.exit(1)
newFilename = msg[6].replace("received", "loaded")
newPath, filenameExt = os.path.split(newFilename)
try:
os.makedirs(newPath)
logging.info("PID {:>5} >> path {} created.".format(os.getpid(), newPath))
except FileExistsError:
logging.info(
"PID {:>5} >> path {} already exists.".format(os.getpid(), newPath)
)
try:
shutil.move(msg[6], newFilename)
logging.info(
"PID {:>5} >> {} moved into {}.".format(
os.getpid(), filenameExt, newFilename
)
)
except OSError:
logging.error(
"PID {:>5} >> Error to move {} into {}.".format(
os.getpid(), filenameExt, newFilename
)
)
def main():
cfg = setting.config()
logging.basicConfig(
format="%(asctime)s %(message)s",
filename="/var/log/" + cfg.elablog,
level=logging.INFO,
)
parameters = pika.URLParameters(
"amqp://"
+ cfg.mquser
+ ":"
+ cfg.mqpass
+ "@"
+ cfg.mqhost
+ ":"
+ cfg.mqport
+ "/%2F"
)
connection = pika.BlockingConnection(parameters)
channel = connection.channel()
channel.queue_declare(queue=cfg.csv_queue, durable=True)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(
queue=cfg.csv_queue,
on_message_callback=lambda ch, method, properties, body: callback_ase(
ch, method, properties, body, config=cfg
),
)
# channel.basic_consume(queue=cfg.csv_queue, on_message_callback=callback,arguments=cfg)
try:
channel.start_consuming()
except KeyboardInterrupt:
logging.info(
"PID {0:>5} >> Info: {1}.".format(
os.getpid(), "Shutdown requested...exiting"
)
)
if __name__ == "__main__":
main()

View File

@@ -1,316 +0,0 @@
#!.venv/bin/python
"""This module implements an FTP server with custom commands for managing virtual users and handling CSV file uploads."""
import sys
import os
# import ssl
import re
import logging
import mysql.connector
from hashlib import sha256
from pathlib import Path
from utils.time import timestamp_fmt as ts
from utils.config import set_config as setting
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
def conn_db(cfg):
"""Establishes a connection to the MySQL database.
Args:
cfg: The configuration object containing database connection details.
Returns:
A MySQL database connection object.
"""
return mysql.connector.connect(user=cfg.dbuser, password=cfg.dbpass, host=cfg.dbhost, port=cfg.dbport)
def extract_value(patterns, primary_source, secondary_source, default='Not Defined'):
"""Extracts the first match for a list of patterns from the primary source.
Falls back to the secondary source if no match is found.
"""
for source in (primary_source, secondary_source):
for pattern in patterns:
matches = re.findall(pattern, source, re.IGNORECASE)
if matches:
return matches[0] # Return the first match immediately
return default # Return default if no matches are found
class DummySha256Authorizer(DummyAuthorizer):
"""Custom authorizer that uses SHA256 for password hashing and manages users from a database."""
def __init__(self, cfg):
"""Initializes the authorizer, adds the admin user, and loads users from the database.
Args:
cfg: The configuration object.
"""
super().__init__()
self.add_user(
cfg.adminuser[0], cfg.adminuser[1], cfg.adminuser[2], perm=cfg.adminuser[3])
# Define the database connection
conn = conn_db(cfg)
# Create a cursor
cur = conn.cursor()
cur.execute(f'SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL')
for ftpuser, hash, virtpath, perm in cur.fetchall():
self.add_user(ftpuser, hash, virtpath, perm)
"""
Create the user's directory if it does not exist.
"""
try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
except Exception as e:
self.responde(f'551 Error in create virtual user path: {e}')
def validate_authentication(self, username, password, handler):
# Validate the user's password against the stored hash
hash = sha256(password.encode("UTF-8")).hexdigest()
try:
if self.user_table[username]["pwd"] != hash:
raise KeyError
except KeyError:
raise AuthenticationFailed
class ASEHandler(FTPHandler):
"""Custom FTP handler that extends FTPHandler with custom commands and file handling."""
def __init__(self, conn, server, ioloop=None):
"""Initializes the handler, adds custom commands, and sets up command permissions.
Args:
conn: The connection object.
server: The FTP server object.
ioloop: The I/O loop object.
"""
super().__init__(conn, server, ioloop)
self.proto_cmds = FTPHandler.proto_cmds.copy()
# Add custom FTP commands for managing virtual users - command in lowercase
self.proto_cmds.update(
{'SITE ADDU': dict(perm='M', auth=True, arg=True,
help='Syntax: SITE <SP> ADDU USERNAME PASSWORD (add virtual user).')}
)
self.proto_cmds.update(
{'SITE DISU': dict(perm='M', auth=True, arg=True,
help='Syntax: SITE <SP> DISU USERNAME (disable virtual user).')}
)
self.proto_cmds.update(
{'SITE ENAU': dict(perm='M', auth=True, arg=True,
help='Syntax: SITE <SP> ENAU USERNAME (enable virtual user).')}
)
self.proto_cmds.update(
{'SITE LSTU': dict(perm='M', auth=True, arg=None,
help='Syntax: SITE <SP> LSTU (list virtual users).')}
)
def on_file_received(self, file):
"""Handles the event when a file is successfully received.
Args:
file: The path to the received file.
"""
if not os.stat(file).st_size:
os.remove(file)
logging.info(f'File {file} was empty: removed.')
else:
cfg = self.cfg
path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt)
if (fileExtension.upper() in (cfg.fileext)):
with open(file, 'r') as csvfile:
lines = csvfile.readlines()
unit_name = extract_value(cfg.units_name, filename, str(lines[0:9]))
unit_type = extract_value(cfg.units_type, filename, str(lines[0:9]))
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:9]))
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:9]))
conn = conn_db(cfg)
# Create a cursor
cur = conn.cursor()
try:
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data) VALUES (%s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines)))
conn.commit()
conn.close()
except Exception as e:
logging.error(f'File {file} not loaded. Held in user path.')
logging.error(f'{e}')
else:
os.remove(file)
logging.info(f'File {file} loaded: removed.')
def on_incomplete_file_received(self, file):
"""Removes partially uploaded files.
Args:
file: The path to the incomplete file.
"""
os.remove(file)
def ftp_SITE_ADDU(self, line):
"""Adds a virtual user, creates their directory, and saves their details to the database.
"""
cfg = self.cfg
try:
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
password = parms[1] # Get the password
hash = sha256(password.encode("UTF-8")).hexdigest() # Hash the password
except IndexError:
self.respond('501 SITE ADDU failed. Command needs 2 arguments')
else:
try:
# Create the user's directory
Path(cfg.virtpath + user).mkdir(parents=True, exist_ok=True)
except Exception as e:
self.respond(f'551 Error in create virtual user path: {e}')
else:
try:
# Add the user to the authorizer
self.authorizer.add_user(str(user),
hash, cfg.virtpath + "/" + user, perm=cfg.defperm)
# Save the user to the database
# Define the database connection
conn = conn_db(cfg)
# Create a cursor
cur = conn.cursor()
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')")
conn.commit()
conn.close()
logging.info(f"User {user} created.")
self.respond('200 SITE ADDU successful.')
except Exception as e:
self.respond(f'501 SITE ADDU failed: {e}.')
print(e)
def ftp_SITE_DISU(self, line):
"""Removes a virtual user from the authorizer and marks them as deleted in the database."""
cfg = self.cfg
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
try:
# Remove the user from the authorizer
self.authorizer.remove_user(str(user))
# Delete the user from database
conn = conn_db(cfg)
# Crea un cursore
cur = conn.cursor()
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = now() WHERE ftpuser = '{user}'")
conn.commit()
conn.close()
logging.info(f"User {user} deleted.")
self.respond('200 SITE DISU successful.')
except Exception as e:
self.respond('501 SITE DISU failed.')
print(e)
def ftp_SITE_ENAU(self, line):
"""Restores a virtual user by updating their status in the database and adding them back to the authorizer."""
cfg = self.cfg
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
try:
# Restore the user into database
conn = conn_db(cfg)
# Crea un cursore
cur = conn.cursor()
try:
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'")
conn.commit()
except Exception as e:
logging.error(f"Update DB failed: {e}")
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'")
ftpuser, hash, virtpath, perm = cur.fetchone()
self.authorizer.add_user(ftpuser, hash, virtpath, perm)
try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
except Exception as e:
self.responde(f'551 Error in create virtual user path: {e}')
conn.close()
logging.info(f"User {user} restored.")
self.respond('200 SITE ENAU successful.')
except Exception as e:
self.respond('501 SITE ENAU failed.')
print(e)
def ftp_SITE_LSTU(self, line):
"""Lists all virtual users from the database."""
cfg = self.cfg
users_list = []
try:
# Connect to the SQLite database to fetch users
conn = conn_db(cfg)
# Crea un cursore
cur = conn.cursor()
self.push("214-The following virtual users are defined:\r\n")
cur.execute(f'SELECT ftpuser, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL ')
[users_list.append(f'Username: {ftpuser}\tPerms: {perm}\r\n') for ftpuser, perm in cur.fetchall()]
self.push(''.join(users_list))
self.respond("214 LSTU SITE command successful.")
except Exception as e:
self.respond(f'501 list users failed: {e}')
def main():
"""Main function to start the FTP server."""
# Load the configuration settings
cfg = setting.config()
try:
# Initialize the authorizer and handler
authorizer = DummySha256Authorizer(cfg)
handler = ASEHandler
handler.cfg = cfg
handler.authorizer = authorizer
handler.masquerade_address = cfg.proxyaddr
# Set the range of passive ports for the FTP server
_range = list(range(cfg.firstport, cfg.firstport + cfg.portrangewidth))
handler.passive_ports = _range
# Configure logging
logging.basicConfig(
format="%(asctime)s %(message)s",
filename=cfg.logfilename,
level=logging.INFO,
)
# Create and start the FTP server
server = FTPServer(("0.0.0.0", 2121), handler)
server.serve_forever()
except KeyboardInterrupt:
logging.info(
"Info: Shutdown requested...exiting"
)
except Exception:
print(
f"{ts.timestamp("log")} - PID {os.getpid():>5} >> Error: {sys.exc_info()[1]}."
)
if __name__ == "__main__":
main()

View File

@@ -1,38 +0,0 @@
CREATE TABLE public.dataraw
(
id serial4 NOT NULL,
unit_name text NULL,
unit_type text NULL,
tool_name text NULL,
tool_type text NULL,
unit_ip text NULL,
unit_subnet text NULL,
unit_gateway text NULL,
event_timestamp timestamp NULL,
battery_level float8 NULL,
temperature float8 NULL,
nodes_jsonb jsonb NULL,
created_at timestamp DEFAULT CURRENT_TIMESTAMP NULL,
updated_at timestamp NULL,
CONSTRAINT dataraw_pk PRIMARY KEY (id),
CONSTRAINT dataraw_unique UNIQUE (unit_name, tool_name, event_timestamp)
);
CREATE OR REPLACE FUNCTION public.update_updated_at_column()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$function$
;
CREATE TRIGGER update_updated_at BEFORE
UPDATE
ON dataraw FOR EACH ROW
EXECUTE PROCEDURE
update_updated_at_column();

34
dbddl/rawdatacor.ddl Normal file
View File

@@ -0,0 +1,34 @@
CREATE TABLE `RAWDATACOR` (
`id` int NOT NULL AUTO_INCREMENT,
`UnitName` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ToolNameID` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`NodeNum` int NOT NULL,
`EventDate` date NOT NULL,
`EventTime` time NOT NULL,
`BatLevel` decimal(4,2) NOT NULL,
`Temperature` decimal(5,2) NOT NULL,
`Val0` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val1` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val2` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val3` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val4` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val5` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val6` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val7` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val8` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`Val9` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValA` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValB` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValC` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValD` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValE` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ValF` varchar(8) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`BatLevelModule` decimal(4,2) DEFAULT NULL,
`TemperatureModule` decimal(5,2) DEFAULT NULL,
`RssiModule` int DEFAULT NULL,
PRIMARY KEY (`id`,`EventDate`),
UNIQUE KEY `idx_ToolNodeDateTime` (`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`),
KEY `UnitToolName` (`UnitName`,`ToolNameID`) USING BTREE,
KEY `ToolNameNameNode` (`ToolNameID`,`NodeNum`,`UnitName`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci

View File

@@ -1,17 +1,24 @@
DROP TABLE public.received;
DROP TABLE ase_lar.received;
CREATE TABLE `received` (
`id` int NOT NULL AUTO_INCREMENT,
`username` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`filename` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`unit_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`unit_type` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_type` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_info` json DEFAULT NULL,
`locked` int DEFAULT '0',
`status` int DEFAULT '0',
`inserted_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`loaded_at` timestamp NULL DEFAULT NULL,
`start_elab_at` timestamp NULL DEFAULT NULL,
`elaborated_at` timestamp NULL DEFAULT NULL,
`sent_raw_at` timestamp NULL DEFAULT NULL,
`sent_elab_at` timestamp NULL DEFAULT NULL,
`last_update_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
CREATE TABLE public.received
(
id serial4 NOT NULL,
filename text NULL,
unit_name text NULL,
unit_type text NULL,
tool_name text NULL,
tool_type text NULL,
tool_data text NULL,
"locked" int2 DEFAULT 0 NULL,
status int2 DEFAULT 0 NULL,
created_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
loaded_at timestamptz NULL,
CONSTRAINT received_pk PRIMARY KEY (id)
);

View File

@@ -1,14 +1,13 @@
DROP TABLE public.virtusers
DROP TABLE ase_lar.virtusers
CREATE TABLE public.virtusers
(
id serial4 NOT NULL,
ftpuser text NOT NULL,
hash text NOT NULL,
virtpath text NOT NULL,
perm text NOT NULL,
defined_at timestamptz DEFAULT CURRENT_TIMESTAMP NULL,
deleted_at timestamptz NULL,
CONSTRAINT virtusers_pk PRIMARY KEY (id),
CONSTRAINT virtusers_unique UNIQUE (ftpuser)
);
CREATE TABLE `virtusers` (
`id` int NOT NULL AUTO_INCREMENT,
`ftpuser` varchar(20) COLLATE utf8mb4_general_ci NOT NULL,
`hash` varchar(100) COLLATE utf8mb4_general_ci NOT NULL,
`virtpath` varchar(100) COLLATE utf8mb4_general_ci NOT NULL,
`perm` varchar(20) COLLATE utf8mb4_general_ci NOT NULL,
`defined_at` datetime DEFAULT CURRENT_TIMESTAMP,
`disabled_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `virtusers_unique` (`ftpuser`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;

View File

@@ -1,207 +0,0 @@
#!/usr/bin/env python3
# Copyright (C) 2007 Giampaolo Rodola' <g.rodola@gmail.com>.
# Use of this source code is governed by MIT license that can be
# found in the LICENSE file.
"""A basic unix daemon using the python-daemon library:
http://pypi.python.org/pypi/python-daemon
Example usages:
$ python unix_daemon.py start
$ python unix_daemon.py stop
$ python unix_daemon.py status
$ python unix_daemon.py # foreground (no daemon)
$ python unix_daemon.py --logfile /var/log/ftpd.log start
$ python unix_daemon.py --pidfile /var/run/ftpd.pid start
This is just a proof of concept which demonstrates how to daemonize
the FTP server.
You might want to use this as an example and provide the necessary
customizations.
Parts you might want to customize are:
- UMASK, WORKDIR, HOST, PORT constants
- get_server() function (to define users and customize FTP handler)
Authors:
- Ben Timby - btimby <at> gmail.com
- Giampaolo Rodola' - g.rodola <at> gmail.com
"""
import atexit
import errno
import optparse
import os
import signal
import sys
import time
from pyftpdlib.authorizers import UnixAuthorizer
from pyftpdlib.filesystems import UnixFilesystem
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
# overridable options
HOST = ""
PORT = 21
PID_FILE = "/var/run/pyftpdlib.pid"
LOG_FILE = "/var/log/pyftpdlib.log"
WORKDIR = os.getcwd()
UMASK = 0
def pid_exists(pid):
"""Return True if a process with the given PID is currently running."""
try:
os.kill(pid, 0)
except OSError as err:
return err.errno == errno.EPERM
else:
return True
def get_pid():
"""Return the PID saved in the pid file if possible, else None."""
try:
with open(PID_FILE) as f:
return int(f.read().strip())
except IOError as err:
if err.errno != errno.ENOENT:
raise
def stop():
"""Keep attempting to stop the daemon for 5 seconds, first using
SIGTERM, then using SIGKILL.
"""
pid = get_pid()
if not pid or not pid_exists(pid):
sys.exit("daemon not running")
sig = signal.SIGTERM
i = 0
while True:
sys.stdout.write('.')
sys.stdout.flush()
try:
os.kill(pid, sig)
except OSError as err:
if err.errno == errno.ESRCH:
print("\nstopped (pid %s)" % pid)
return
else:
raise
i += 1
if i == 25:
sig = signal.SIGKILL
elif i == 50:
sys.exit("\ncould not kill daemon (pid %s)" % pid)
time.sleep(0.1)
def status():
"""Print daemon status and exit."""
pid = get_pid()
if not pid or not pid_exists(pid):
print("daemon not running")
else:
print("daemon running with pid %s" % pid)
sys.exit(0)
def get_server():
"""Return a pre-configured FTP server instance."""
handler = FTPHandler
handler.authorizer = UnixAuthorizer()
handler.abstracted_fs = UnixFilesystem
server = FTPServer((HOST, PORT), handler)
return server
def daemonize():
"""A wrapper around python-daemonize context manager."""
def _daemonize():
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
# decouple from parent environment
os.chdir(WORKDIR)
os.setsid()
os.umask(0)
# do second fork
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = open(LOG_FILE, 'r')
so = open(LOG_FILE, 'a+')
se = open(LOG_FILE, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
pid = str(os.getpid())
with open(PID_FILE, 'w') as f:
f.write("%s\n" % pid)
atexit.register(lambda: os.remove(PID_FILE))
pid = get_pid()
if pid and pid_exists(pid):
sys.exit('daemon already running (pid %s)' % pid)
# instance FTPd before daemonizing, so that in case of problems we
# get an exception here and exit immediately
server = get_server()
_daemonize()
server.serve_forever()
def main():
global PID_FILE, LOG_FILE
USAGE = "python [-p PIDFILE] [-l LOGFILE]\n\n" \
"Commands:\n - start\n - stop\n - status"
parser = optparse.OptionParser(usage=USAGE)
parser.add_option('-l', '--logfile', dest='logfile',
help='the log file location')
parser.add_option('-p', '--pidfile', dest='pidfile', default=PID_FILE,
help='file to store/retreive daemon pid')
options, args = parser.parse_args()
if options.pidfile:
PID_FILE = options.pidfile
if options.logfile:
LOG_FILE = options.logfile
if not args:
server = get_server()
server.serve_forever()
else:
if len(args) != 1:
sys.exit('too many commands')
elif args[0] == 'start':
daemonize()
elif args[0] == 'stop':
stop()
elif args[0] == 'restart':
try:
stop()
finally:
daemonize()
elif args[0] == 'status':
status()
else:
sys.exit('invalid command')
if __name__ == '__main__':
sys.exit(main())

91
docs/gen_ref_pages.py Normal file
View File

@@ -0,0 +1,91 @@
"""Genera le pagine di riferimento per l'API."""
from pathlib import Path
import mkdocs_gen_files
nav = mkdocs_gen_files.Nav()
# File e directory da escludere
EXCLUDE_PATTERNS = {
".env",
".env.*",
"__pycache__",
".git",
".pytest_cache",
".venv",
"venv",
"node_modules",
"docs", # Escludi tutta la directory docs
"build",
"dist",
"*.egg-info",
".mypy_cache",
".coverage",
"htmlcov"
}
def should_exclude(path: Path) -> bool:
"""Verifica se un percorso deve essere escluso."""
# Escludi file .env
if path.name.startswith('.env'):
return True
# Escludi lo script stesso
if path.name == "gen_ref_pages.py":
return True
# Escludi tutta la directory docs
if "old_script" in path.parts:
return True
# Escludi tutta la directory docs
if "docs" in path.parts:
return True
# Escludi pattern comuni
for pattern in EXCLUDE_PATTERNS:
if pattern in str(path):
return True
return False
# Cerca i file Python nella directory corrente
for path in sorted(Path(".").rglob("*.py")):
# Salta i file esclusi
if should_exclude(path):
continue
# Salta i file che iniziano con un punto
if any(part.startswith('.') for part in path.parts):
continue
# Salta i file che iniziano con prova
if any(part.startswith('prova') for part in path.parts):
continue
if any(part.startswith('matlab_elab') for part in path.parts):
continue
module_path = path.with_suffix("")
doc_path = path.with_suffix(".md")
full_doc_path = Path("reference", doc_path)
parts = tuple(module_path.parts)
if parts[-1] == "__init__":
parts = parts[:-1]
doc_path = doc_path.with_name("index.md")
full_doc_path = full_doc_path.with_name("index.md")
elif parts[-1] == "__main__":
continue
nav[parts] = doc_path.as_posix()
with mkdocs_gen_files.open(full_doc_path, "w") as fd:
ident = ".".join(parts)
fd.write(f"::: {ident}")
mkdocs_gen_files.set_edit_path(full_doc_path, path)
with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())

36
docs/index.md Normal file
View File

@@ -0,0 +1,36 @@
# Benvenuto nella documentazione
Questa è la documentazione automatica dell'applicazione Python ASE per la gestione delle file CSV ricevuti via FTP.
## Funzionalità
- Ricezione di file csv via FTP e salvataggio in database.
- Caricamnento dei dati in database con moduli dedicati per:
- tipologia di centralina e sensore
- nome di centralina e sensore
- Esecuzione elaborazione MatLab.
- Gestione utenti FTP
- Caricamento massivo utenti FTP da database
## Setup
- personalizzazione dei file env:
- env/db.ini
- env/ftp.ini
- env/load.ini
- env/elab.ini
- esecuzione del server FTP -> "python ftp_csv_receiver.py"
- esecuzione dell'orchestratore del caricamenti dei file csv -> "python load_orchestrator.py"
- esecuzione dell'orchestratore delle elaborazioni MatLab -> "python elab_orchestrator.py"
E' possibile creare servizi systemd per gestire l'esecuzione automatica delle funzionalità.
Viene usato il virtualenv quindi python deve essere eseguito con i dovuti setting
## Installazione
Installare il pacchetto ase-x.x.x-py3-none-any.whl
- pip install ase-x.x.x-py3-none-any.whl

6
env/config.ini vendored Normal file
View File

@@ -0,0 +1,6 @@
[mysql]
host = 10.211.114.173
database = ase_lar
user = root
password = batt1l0

16
env/db.ini vendored Normal file
View File

@@ -0,0 +1,16 @@
# to generete adminuser password hash:
# python3 -c 'from hashlib import sha256;print(sha256("????password???".encode("UTF-8")).hexdigest())'
[db]
hostname = 10.211.114.173
port = 3306
user = root
password = batt1l0
dbName = ase_lar
maxRetries = 10
[tables]
userTableName = virtusers
recTableName = received
rawTableName = RAWDATACOR
nodesTableName = nodes

20
env/elab.ini vendored Normal file
View File

@@ -0,0 +1,20 @@
[logging]
logFilename = ../logs/elab_data.log
[threads]
max_num = 10
[tool]
# stati in minuscolo
elab_status = active|manual upload
[matlab]
#runtime = /usr/local/MATLAB/MATLAB_Runtime/v93
#func_path = /usr/local/matlab_func/
runtime = /home/alex/matlab_sym/
func_path = /home/alex/matlab_sym/
timeout = 1800
error = ""
error_path = /tmp/

59
env/email.ini vendored Normal file
View File

@@ -0,0 +1,59 @@
[smtp]
address = smtp.aseltd.eu
port = 587
user = alert@aseltd.eu
password = Ase#2013!20@bat
[address]
from = ASE Alert System<alert@aseltd.eu>
to1 = andrea.carri@aseltd.eu,alessandro.battilani@gmail.com,alessandro.valletta@aseltd.eu,alberto.sillani@aseltd.eu,majd.saidani@aseltd.eu
to = alessandro.battilani@aseltd.eu
cc = alessandro.battilani@gmail.com
bcc =
[msg]
subject = ASE Alert System
body = <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Alert from ASE</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body style="margin: 0; padding: 0;">
<table bgcolor="#ffffff" border="0" cellpadding="0" cellspacing="0" width="100%%">
<tr>
<td align="center">
<img src="https://www2.aseltd.eu/static/img/logo_ASE_small.png" alt="ASE" style="display: block;" />
</td>
</tr>
<tr>
<td align="center">
<h1 style="margin: 5px;">Alert from ASE:</h1>
</td>
</tr>
<tr>
<td align="center">
<h3 style="margin: 5px;">Matlab function {matlab_cmd} failed on unit => {unit} - tool => {tool}</h3>
</td>
</tr>
<tr>
<td align="center">
<h4 style="margin: 5px;">{matlab_error}</h4>
</td>
</tr>
<tr>
<td style="padding: 20px; padding-bottom: 0px; color: red">
{MatlabErrors}
</td>
</tr>
<tr>
<td style="padding: 20px;">
{MatlabWarnings}
</td>
</tr>
</table>
</body>
</html>

37
env/ftp.ini vendored Normal file
View File

@@ -0,0 +1,37 @@
# to generete adminuser password hash:
# python3 -c 'from hashlib import sha256;print(sha256("????password???".encode("UTF-8")).hexdigest())'
[ftpserver]
service_port = 2121
firstPort = 40000
proxyAddr = 0.0.0.0
portRangeWidth = 500
virtpath = /home/alex/aseftp/
adminuser = admin|87b164c8d4c0af8fbab7e05db6277aea8809444fb28244406e489b66c92ba2bd|/home/alex/aseftp/|elradfmwMT
servertype = FTPHandler
certfile = /home/alex/aseftp/keycert.pem
fileext = .CSV|.TXT
defaultUserPerm = elmw
#servertype = FTPHandler/TLS_FTPHandler
[csvfs]
path = /home/alex/aseftp/csvfs/
[logging]
logFilename = ../logs/ftp_csv_rec.log
[unit]
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
[tool]
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = CO_:CO|HEALTH-:HEALTH|READINGS-:READINGS|MESSPUNKTEPINI_:MESSPUNKTEPINI
[csv]
Infos = IP|Subnet|Gateway
[ts_pini]:
path_match = [276_208_TS0003]:TS0003|[Neuchatel_CDP]:TS7|[TS0006_EP28]:=|[TS0007_ChesaArcoiris]:=|[TS0006_EP28_3]:=|[TS0006_EP28_4]:TS0006_EP28_4|[TS0006_EP28_5]:TS0006_EP28_5|[TS18800]:=|[Granges_19 100]:=|[Granges_19 200]:=|[Chesa_Arcoiris_2]:=|[TS0006_EP28_1]:=|[TS_PS_Petites_Croisettes]:=|[_Chesa_Arcoiris_1]:=|[TS_test]:=|[TS-VIME]:=

5
env/load.ini vendored Normal file
View File

@@ -0,0 +1,5 @@
[logging]:
logFilename = ../logs/load_raw_data.log
[threads]:
max_num = 5

5
env/send.ini vendored Normal file
View File

@@ -0,0 +1,5 @@
[logging]
logFilename = ../logs/send_data.log
[threads]
max_num = 30

View File

@@ -1,43 +0,0 @@
# to generete adminuser password hash:
# python3 -c 'from hashlib import sha256;print(sha256("????password???".encode("UTF-8")).hexdigest())'
[ftpserver]
firstPort = 40000
logFilename = ./ftppylog.log
proxyAddr = 0.0.0.0
portRangeWidth = 500
virtpath = /home/alex/aseftp/
adminuser = admin|87b164c8d4c0af8fbab7e05db6277aea8809444fb28244406e489b66c92ba2bd|/home/alex/aseftp/|elradfmwMT
servertype = FTPHandler
certfile = /home/alex/aseftp/keycert.pem
fileext = .CSV|.TXT
defaultUserPerm = elmw
#servertype = FTPHandler/TLS_FTPHandler
[csvfs]
path = /home/alex/aseftp/csvfs/
[csvelab]
logFilename = csvElab.log
[db]
hostname = 10.211.114.173
port = 3306
user = root
password = batt1l0
dbName = ase_lar
dbSchema = public
userTableName = virtusers
recTableName = received
rawTableName = dataraw
[unit]
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1
Names = ID[0-9]{4}|IX[0-9]{4}
[tool]
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|measurement
[csv]
Infos = IP|Subnet|Gateway

66
mkdocs.yml Normal file
View File

@@ -0,0 +1,66 @@
site_name: Ase receiver
site_description: Documentazione automatica della app Python ASE
theme:
name: material
features:
- navigation.tabs
- navigation.sections
- toc.integrate
- navigation.top
- search.suggest
- search.highlight
- content.tabs.link
- content.code.annotation
- content.code.copy
plugins:
- offline
- search
- mkdocstrings:
handlers:
python:
paths: ["."]
options:
docstring_style: google
show_source: true
show_root_heading: true
show_root_toc_entry: true
show_symbol_type_heading: true
show_symbol_type_toc: true
filters:
- "!^docs" # Escludi tutto ciò che inizia con "docs"
- gen-files:
scripts:
- docs/gen_ref_pages.py
- literate-nav:
nav_file: SUMMARY.md
nav:
- Home: index.md
- API Reference: reference/
markdown_extensions:
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.inlinehilite
- pymdownx.snippets
- pymdownx.superfences
- pymdownx.tabbed:
alternate_style: true
- admonition
- pymdownx.details
- attr_list
- md_in_html
# Escludi file dalla build
exclude_docs: |
.env*
__pycache__/
.git/
.pytest_cache/
.venv/
venv/
test/
.vscode/

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env python3
import paho.mqtt.client as mqtt
import time
import ssl
version = '5' # or '3'
mytransport = 'tcp' # or 'websockets'
if version == '5':
mqttc = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2,
client_id="myPy",
transport=mytransport,
protocol=mqtt.MQTTv5)
if version == '3':
mqttc = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2,
client_id="myPy",
transport=mytransport,
protocol=mqtt.MQTTv311,
clean_session=True)
mqttc.username_pw_set("alex", "BatManu#171017")
'''client.tls_set(certfile=None,
keyfile=None,
cert_reqs=ssl.CERT_REQUIRED)'''
def on_message(client, obj, message, properties=None):
print(" Received message " + str(message.payload)
+ " on topic '" + message.topic
+ "' with QoS " + str(message.qos))
'''
def on_connect(client, obj, flags, reason_code, properties):
print("reason_code: " + str(reason_code))
def on_publish(client, obj, mid, reason_code, properties):
print("mid: " + str(mid))
def on_log(client, obj, level, string):
print(string)
'''
mqttc.on_message = on_message;
'''
client.on_connect = mycallbacks.on_connect;
client.on_publish = mycallbacks.on_publish;
client.on_subscribe = mycallbacks.on_subscribe;
'''
broker = 'mqtt'
myport = 1883
if version == '5':
from paho.mqtt.properties import Properties
from paho.mqtt.packettypes import PacketTypes
properties=Properties(PacketTypes.CONNECT)
properties.SessionExpiryInterval=30*60 # in seconds
mqttc.connect(broker,
port=myport,
clean_start=mqtt.MQTT_CLEAN_START_FIRST_ONLY,
properties=properties,
keepalive=60);
elif version == '3':
mqttc.connect(broker,port=myport,keepalive=60);
mqttc.loop_start();

View File

@@ -1,53 +0,0 @@
import mysql.connector
import utils.datefmt.date_check as date_check
righe = ["17/03/2022 15:10;13.7;14.8;|;401;832;17373;-8;920;469;9;|;839;133;17116;675;941;228;10;|;-302;-1252;17165;288;75;-940;10;|;739;76;17203;562;879;604;9;|;1460;751;16895;672;1462;132;10;|;-1088;-1883;16675;244;1071;518;10;|;-29;-1683;16923;384;1039;505;11;|;1309;-1095;17066;-36;324;-552;10;|;-36;-713;16701;-121;372;122;10;|;508;-1318;16833;475;1154;405;10;|;1178;878;17067;636;1114;428;10;|;1613;-573;17243;291;-234;-473;9;|;-107;-259;17287;94;421;369;10;|;-900;-647;16513;168;1330;252;10;|;1372;286;17035;202;263;469;10;|;238;-2006;17142;573;1201;492;9;|;2458;589;17695;356;187;208;11;|;827;-1085;17644;308;233;66;10;|;1;-1373;17214;557;1279;298;9;|;-281;-244;17071;209;517;-36;10;|;-486;-961;17075;467;440;367;10;|;1264;-339;16918;374;476;116;8;|;661;-1330;16789;-37;478;15;9;|;1208;-724;16790;558;1303;335;8;|;-236;-1404;16678;309;426;376;8;|;367;-1402;17308;-32;428;-957;7;|;-849;-360;17640;1;371;635;7;|;-784;90;17924;533;128;-661;5;|;-723;-1062;16413;270;-79;702;7;|;458;-1235;16925;354;-117;194;5;|;-411;-1116;17403;280;777;530;1;;;;;;;;;;;;;;",
"17/03/2022 15:13;13.6;14.8;|;398;836;17368;-3;924;472;9;|;838;125;17110;675;938;230;10;|;-298;-1253;17164;290;75;-942;10;|;749;78;17221;560;883;601;9;|;1463;752;16904;673;1467;134;10;|;-1085;-1884;16655;239;1067;520;10;|;-27;-1680;16923;393;1032;507;10;|;1308;-1095;17065;-43;328;-548;10;|;-38;-712;16704;-124;373;122;10;|;512;-1318;16830;473;1155;408;10;|;1181;879;17070;637;1113;436;10;|;1610;-567;17239;287;-240;-462;10;|;-108;-250;17297;94;420;370;10;|;-903;-652;16518;169;1326;257;9;|;1371;282;17047;198;263;471;10;|;244;-2006;17137;570;1205;487;9;|;2461;589;17689;354;199;210;11;|;823;-1081;17642;310;235;68;10;|;1;-1370;17214;560;1278;290;9;|;-280;-245;17062;209;517;-31;9;|;-484;-963;17074;463;440;374;10;|;1271;-340;16912;374;477;125;8;|;668;-1331;16786;-37;478;7;9;|;1209;-724;16784;557;1301;329;8;|;-237;-1406;16673;316;425;371;8;|;371;-1401;17307;-30;429;-961;7;|;-854;-356;17647;7;368;631;7;|;-781;85;17934;531;130;-664;5;|;-726;-1062;16400;274;-79;707;6;|;460;-1233;16931;355;-113;196;5;|;-413;-1119;17405;280;780;525;1",
"17/03/2022 15:28;13.6;14.3;|;396;832;17379;-3;919;470;10;|;837;128;17114;670;945;233;10;|;-304;-1246;17167;292;77;-931;10;|;744;70;17211;567;888;601;9;|;1459;748;16893;672;1480;141;10;|;-1084;-1887;16658;236;1068;522;10;|;-29;-1686;16912;388;1035;500;10;|;1312;-1092;17062;-35;328;-545;10;|;-40;-709;16701;-120;374;121;10;|;515;-1327;16826;475;1148;402;10;|;1179;881;17063;635;1114;430;9;|;1613;-568;17246;293;-230;-461;9;|;-103;-265;17289;96;420;363;10;|;-896;-656;16522;167;1320;250;10;|;1368;288;17039;195;263;471;9;|;239;-2003;17129;578;1203;490;9;|;2461;586;17699;356;202;209;11;|;823;-1092;17649;310;237;65;10;|;-7;-1369;17215;550;1279;288;9;|;-290;-249;17072;208;515;-33;9;|;-488;-965;17071;472;439;372;10;|;1270;-342;16923;377;476;120;8;|;671;-1337;16788;-33;482;14;9;|;1206;-725;16783;556;1306;344;9;|;-232;-1404;16681;309;423;379;8;|;364;-1400;17305;-28;432;-952;7;|;-854;-363;17644;1;369;626;8;|;-782;89;17931;529;134;-661;5;|;-723;-1057;16407;269;-82;700;6;|;459;-1235;16929;358;-119;193;5;|;-414;-1122;17400;282;775;526;2"]
#Dividi la riga principale usando il primo delimitatore ';'
sql_insert_RAWDATA = '''
INSERT IGNORE INTO ase_lar.RAWDATACOR (
`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,
`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,
`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`,
`BatLevelModule`,`TemperatureModule`, `RssiModule`
)
VALUES (
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s
)
'''
def make_matrix(righe):
UnitName = 'ID0003'
ToolNameID = 'DT0002'
matrice_valori = []
for riga in righe:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
EventDate, EventTime = timestamp.split(' ')
valori_nodi = rilevazioni.rstrip(';').split(';|;')[1:] # Toglie eventuali ';' finali, dividi per '|' e prendi gli elementi togliendo il primo che è vuoto
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
valori = valori_nodo.split(';')[1:-1]
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori
matrice_valori = make_matrix(righe)
with mysql.connector.connect(user='root', password='batt1l0', host='10.211.114.173', port=3306) as conn:
cur = conn.cursor()
try:
cur.executemany(sql_insert_RAWDATA, matrice_valori)
conn.commit()
except Exception as e:
conn.rollback()
print(f'Error: {e}')

View File

@@ -1,62 +0,0 @@
#import mysql.connector
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.orm import declarative_base, Session
from sqlalchemy.exc import IntegrityError
import pandas as pd
import utils.datefmt.date_check as date_check
righe = ["17/03/2022 15:10;13.7;14.8;|;401;832;17373;-8;920;469;9;|;839;133;17116;675;941;228;10;|;-302;-1252;17165;288;75;-940;10;|;739;76;17203;562;879;604;9;|;1460;751;16895;672;1462;132;10;|;-1088;-1883;16675;244;1071;518;10;|;-29;-1683;16923;384;1039;505;11;|;1309;-1095;17066;-36;324;-552;10;|;-36;-713;16701;-121;372;122;10;|;508;-1318;16833;475;1154;405;10;|;1178;878;17067;636;1114;428;10;|;1613;-573;17243;291;-234;-473;9;|;-107;-259;17287;94;421;369;10;|;-900;-647;16513;168;1330;252;10;|;1372;286;17035;202;263;469;10;|;238;-2006;17142;573;1201;492;9;|;2458;589;17695;356;187;208;11;|;827;-1085;17644;308;233;66;10;|;1;-1373;17214;557;1279;298;9;|;-281;-244;17071;209;517;-36;10;|;-486;-961;17075;467;440;367;10;|;1264;-339;16918;374;476;116;8;|;661;-1330;16789;-37;478;15;9;|;1208;-724;16790;558;1303;335;8;|;-236;-1404;16678;309;426;376;8;|;367;-1402;17308;-32;428;-957;7;|;-849;-360;17640;1;371;635;7;|;-784;90;17924;533;128;-661;5;|;-723;-1062;16413;270;-79;702;7;|;458;-1235;16925;354;-117;194;5;|;-411;-1116;17403;280;777;530;1",
"19/03/2022 15:13;13.6;14.8;|;398;836;17368;-3;924;472;9;|;838;125;17110;675;938;230;10;|;-298;-1253;17164;290;75;-942;10;|;749;78;17221;560;883;601;9;|;1463;752;16904;673;1467;134;10;|;-1085;-1884;16655;239;1067;520;10;|;-27;-1680;16923;393;1032;507;10;|;1308;-1095;17065;-43;328;-548;10;|;-38;-712;16704;-124;373;122;10;|;512;-1318;16830;473;1155;408;10;|;1181;879;17070;637;1113;436;10;|;1610;-567;17239;287;-240;-462;10;|;-108;-250;17297;94;420;370;10;|;-903;-652;16518;169;1326;257;9;|;1371;282;17047;198;263;471;10;|;244;-2006;17137;570;1205;487;9;|;2461;589;17689;354;199;210;11;|;823;-1081;17642;310;235;68;10;|;1;-1370;17214;560;1278;290;9;|;-280;-245;17062;209;517;-31;9;|;-484;-963;17074;463;440;374;10;|;1271;-340;16912;374;477;125;8;|;668;-1331;16786;-37;478;7;9;|;1209;-724;16784;557;1301;329;8;|;-237;-1406;16673;316;425;371;8;|;371;-1401;17307;-30;429;-961;7;|;-854;-356;17647;7;368;631;7;|;-781;85;17934;531;130;-664;5;|;-726;-1062;16400;274;-79;707;6;|;460;-1233;16931;355;-113;196;5;|;-413;-1119;17405;280;780;525;1",
"19/03/2022 15:28;13.6;14.3;|;396;832;17379;-3;919;470;10;|;837;128;17114;670;945;233;10;|;-304;-1246;17167;292;77;-931;10;|;744;70;17211;567;888;601;9;|;1459;748;16893;672;1480;141;10;|;-1084;-1887;16658;236;1068;522;10;|;-29;-1686;16912;388;1035;500;10;|;1312;-1092;17062;-35;328;-545;10;|;-40;-709;16701;-120;374;121;10;|;515;-1327;16826;475;1148;402;10;|;1179;881;17063;635;1114;430;9;|;1613;-568;17246;293;-230;-461;9;|;-103;-265;17289;96;420;363;10;|;-896;-656;16522;167;1320;250;10;|;1368;288;17039;195;263;471;9;|;239;-2003;17129;578;1203;490;9;|;2461;586;17699;356;202;209;11;|;823;-1092;17649;310;237;65;10;|;-7;-1369;17215;550;1279;288;9;|;-290;-249;17072;208;515;-33;9;|;-488;-965;17071;472;439;372;10;|;1270;-342;16923;377;476;120;8;|;671;-1337;16788;-33;482;14;9;|;1206;-725;16783;556;1306;344;9;|;-232;-1404;16681;309;423;379;8;|;364;-1400;17305;-28;432;-952;7;|;-854;-363;17644;1;369;626;8;|;-782;89;17931;529;134;-661;5;|;-723;-1057;16407;269;-82;700;6;|;459;-1235;16929;358;-119;193;5;|;-414;-1122;17400;282;775;526;2"]
'''
righe = ["17/03/2022 15:10;13.7;14.8;|;401;832;17373;-8;920;469;9;|;839;133;17116;675;941;228;10;|;-302;-1252;17165;288;75;-940;10;|;739;76;17203;562;879;604;9;|;1460;751;16895;672;1462;132;10;|;-1088;-1883;16675;244;1071;518;10;|;-29;-1683;16923;384;1039;505;11;|;1309;-1095;17066;-36;324;-552;10;|;-36;-713;16701;-121;372;122;10;|;508;-1318;16833;475;1154;405;10;|;1178;878;17067;636;1114;428;10;|;1613;-573;17243;291;-234;-473;9;|;-107;-259;17287;94;421;369;10;|;-900;-647;16513;168;1330;252;10;|;1372;286;17035;202;263;469;10;|;238;-2006;17142;573;1201;492;9;|;2458;589;17695;356;187;208;11;|;827;-1085;17644;308;233;66;10;|;1;-1373;17214;557;1279;298;9;|;-281;-244;17071;209;517;-36;10;|;-486;-961;17075;467;440;367;10;|;1264;-339;16918;374;476;116;8;|;661;-1330;16789;-37;478;15;9;|;1208;-724;16790;558;1303;335;8;|;-236;-1404;16678;309;426;376;8;|;367;-1402;17308;-32;428;-957;7;|;-849;-360;17640;1;371;635;7;|;-784;90;17924;533;128;-661;5;|;-723;-1062;16413;270;-79;702;7;|;458;-1235;16925;354;-117;194;5;|;-411;-1116;17403;280;777;530;1",
"17/03/2022 15:13;13.6;14.8;|;398;836;17368;-3;924;472;9;|;838;125;17110;675;938;230;10;|;-298;-1253;17164;290;75;-942;10;|;749;78;17221;560;883;601;9;|;1463;752;16904;673;1467;134;10;|;-1085;-1884;16655;239;1067;520;10;|;-27;-1680;16923;393;1032;507;10;|;1308;-1095;17065;-43;328;-548;10;|;-38;-712;16704;-124;373;122;10;|;512;-1318;16830;473;1155;408;10;|;1181;879;17070;637;1113;436;10;|;1610;-567;17239;287;-240;-462;10;|;-108;-250;17297;94;420;370;10;|;-903;-652;16518;169;1326;257;9;|;1371;282;17047;198;263;471;10;|;244;-2006;17137;570;1205;487;9;|;2461;589;17689;354;199;210;11;|;823;-1081;17642;310;235;68;10;|;1;-1370;17214;560;1278;290;9;|;-280;-245;17062;209;517;-31;9;|;-484;-963;17074;463;440;374;10;|;1271;-340;16912;374;477;125;8;|;668;-1331;16786;-37;478;7;9;|;1209;-724;16784;557;1301;329;8;|;-237;-1406;16673;316;425;371;8;|;371;-1401;17307;-30;429;-961;7;|;-854;-356;17647;7;368;631;7;|;-781;85;17934;531;130;-664;5;|;-726;-1062;16400;274;-79;707;6;|;460;-1233;16931;355;-113;196;5;|;-413;-1119;17405;280;780;525;1",
"17/03/2022 15:28;13.6;14.3;|;396;832;17379;-3;919;470;10;|;837;128;17114;670;945;233;10;|;-304;-1246;17167;292;77;-931;10;|;744;70;17211;567;888;601;9;|;1459;748;16893;672;1480;141;10;|;-1084;-1887;16658;236;1068;522;10;|;-29;-1686;16912;388;1035;500;10;|;1312;-1092;17062;-35;328;-545;10;|;-40;-709;16701;-120;374;121;10;|;515;-1327;16826;475;1148;402;10;|;1179;881;17063;635;1114;430;9;|;1613;-568;17246;293;-230;-461;9;|;-103;-265;17289;96;420;363;10;|;-896;-656;16522;167;1320;250;10;|;1368;288;17039;195;263;471;9;|;239;-2003;17129;578;1203;490;9;|;2461;586;17699;356;202;209;11;|;823;-1092;17649;310;237;65;10;|;-7;-1369;17215;550;1279;288;9;|;-290;-249;17072;208;515;-33;9;|;-488;-965;17071;472;439;372;10;|;1270;-342;16923;377;476;120;8;|;671;-1337;16788;-33;482;14;9;|;1206;-725;16783;556;1306;344;9;|;-232;-1404;16681;309;423;379;8;|;364;-1400;17305;-28;432;-952;7;|;-854;-363;17644;1;369;626;8;|;-782;89;17931;529;134;-661;5;|;-723;-1057;16407;269;-82;700;6;|;459;-1235;16929;358;-119;193;5;|;-414;-1122;17400;282;775;526;2"]
'''
#Dividi la riga principale usando il primo delimitatore ';'
UnitName = ''
ToolNameID = ''
matrice_valori = []
for riga in righe:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
EventDate, EventTime = timestamp.split(' ')
valori_nodi = rilevazioni.split('|')[1:-1] # Dividi per '|' e prendi gli elementi interni togliendo primo e ultimo
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
valori = valori_nodo.split(';')[1:-1]
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (16 - len(valori))))
# Crea un DataFrame pandas per visualizzare la matrice in forma tabellare
colonne = ['UnitName', 'ToolNameID', 'NodeNum', 'EventDate', 'EventTime', 'BatLevel', 'Temperature', 'Val0', 'Val1', 'Val2', 'Val3', 'Val4', 'Val5', 'Val6', 'Val7', 'Val8', 'Val9', 'ValA', 'ValB', 'ValC', 'ValD', 'ValE', 'ValF']
df = pd.DataFrame(matrice_valori, columns=colonne)
# Stampa il DataFrame
#print(df.to_string())
engine = create_engine('mysql+mysqlconnector://root:batt1l0@10.211.114.173/ase_lar')
metadata = MetaData()
table = Table('RAWDATACOR', metadata, autoload_with=engine)
Base = declarative_base()
class RawDataCor(Base):
__table__ = table
with Session(engine) as session:
for index, row in df.iterrows():
try:
nuova_riga = RawDataCor(**row.to_dict())
session.add(nuova_riga)
session.commit()
except IntegrityError:
session.rollback() # Ignora l'errore di chiave duplicata
print(f"Riga con chiavi duplicate ignorata: {row.to_dict()}")
except Exception as e:
session.rollback()
print(f"Errore inatteso durante l'inserimento: {e}, riga: {row.to_dict()}")
#df.to_sql('RAWDATACOR', con=engine, if_exists='', index=False)

31
pyproject.toml Normal file
View File

@@ -0,0 +1,31 @@
[project]
name = "ase"
version = "0.9.0"
description = "ASE backend"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"aiomysql>=0.2.0",
"cryptography>=45.0.3",
"mysql-connector-python>=9.3.0",
"pyftpdlib>=2.0.1",
"pyproj>=3.7.1",
"utm>=0.8.1",
]
[dependency-groups]
dev = [
"mkdocs>=1.6.1",
"mkdocs-gen-files>=0.5.0",
"mkdocs-literate-nav>=0.6.2",
"mkdocs-material>=9.6.15",
"mkdocstrings[python]>=0.29.1",
"ruff>=0.12.11",
]
[tool.setuptools]
package-dir = {"" = "src"}
[tool.setuptools.packages.find]
exclude = ["test","build"]
where = ["src"]

View File

@@ -1,4 +0,0 @@
for (( i=1; i<=29000; i++ ))
do
./transform_file.py
done

121
src/elab_orchestrator.py Executable file
View File

@@ -0,0 +1,121 @@
#!.venv/bin/python
"""
Orchestratore dei worker che lanciano le elaborazioni
"""
# Import necessary libraries
import logging
import asyncio
# Import custom modules for configuration and database connection
from utils.config import loader_matlab_elab as setting
from utils.database import WorkflowFlags
from utils.database.action_query import get_tool_info, check_flag_elab
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
from utils.database.loader_action import update_status, unlock
from utils.connect.send_email import send_error_email
from utils.general import read_error_lines_from_logs
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
ELAB_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 60
async def worker(worker_id: int, cfg: object, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'elaborazione dei dati caricati.
Il worker preleva un record dal database che indica dati pronti per
l'elaborazione, esegue un comando Matlab associato e attende
prima di iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (object): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato")
while True:
try:
logger.info("Inizio elaborazione")
if not await check_flag_elab(pool):
record = await get_next_csv_atomic(pool, cfg.dbrectable, WorkflowFlags.DATA_LOADED, WorkflowFlags.DATA_ELABORATED)
if record:
rec_id, _, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record]
if tool_type.lower() != "gd": # i tool GD non devono essere elaborati ???
tool_elab_info = await get_tool_info(WorkflowFlags.DATA_ELABORATED, unit_name.upper(), tool_name.upper(), pool)
if tool_elab_info:
if tool_elab_info['statustools'].lower() in cfg.elab_status:
logger.info("Elaborazione ID %s per %s %s", rec_id, unit_name, tool_name)
await update_status(cfg, rec_id, WorkflowFlags.START_ELAB, pool)
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name.upper()} {tool_name.upper()}"
proc = await asyncio.create_subprocess_shell(
matlab_cmd,
cwd=cfg.matlab_func_path,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
logger.error("Errore durante l'elaborazione")
logger.error(stderr.decode().strip())
if proc.returncode == 124:
error_type = f"Matlab elab excessive duration: killed after {cfg.matlab_timeout} seconds."
else:
error_type = f"Matlab elab failed: {proc.returncode}."
# da verificare i log dove prenderli
# with open(f"{cfg.matlab_error_path}{unit_name}{tool_name}_output_error.txt", "w") as f:
# f.write(stderr.decode().strip())
# errors = [line for line in stderr.decode().strip() if line.startswith("Error")]
# warnings = [line for line in stderr.decode().strip() if not line.startswith("Error")]
errors, warnings = await read_error_lines_from_logs(cfg.matlab_error_path, f"_{unit_name}_{tool_name}*_*_output_error.txt")
await send_error_email(unit_name.upper(), tool_name.upper(), tool_elab_info['matcall'], error_type, errors, warnings)
else:
logger.info(stdout.decode().strip())
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
await asyncio.sleep(ELAB_PROCESSING_DELAY)
else:
logger.info("ID %s %s - %s %s: MatLab calc by-passed.", rec_id, unit_name, tool_name, tool_elab_info['statustools'])
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, rec_id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
else:
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, rec_id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
else:
logger.info("Flag fermo elaborazione attivato")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore durante l'esecuzione: %s", e, exc_info=debug_mode)
await asyncio.sleep(1)
async def main():
"""Funzione principale che avvia l'elab_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

183
src/ftp_csv_receiver.py Executable file
View File

@@ -0,0 +1,183 @@
#!.venv/bin/python
"""
This module implements an FTP server with custom commands for
managing virtual users and handling CSV file uploads.
"""
import os
import logging
from hashlib import sha256
from pathlib import Path
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
from utils.config import loader_ftp_csv as setting
from utils.database.connection import connetti_db
from utils.connect import user_admin, file_management
# Configure logging (moved inside main function)
logger = logging.getLogger(__name__)
class DummySha256Authorizer(DummyAuthorizer):
"""Custom authorizer that uses SHA256 for password hashing and manages users from a database."""
def __init__(self: object, cfg: dict) -> None:
"""Initializes the authorizer, adds the admin user, and loads users from the database.
Args:
cfg: The configuration object.
"""
super().__init__()
self.add_user(
cfg.adminuser[0], cfg.adminuser[1], cfg.adminuser[2], perm=cfg.adminuser[3]
)
# Define the database connection
conn = connetti_db(cfg)
# Create a cursor
cur = conn.cursor()
cur.execute(
f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL"
)
for ftpuser, user_hash, virtpath, perm in cur.fetchall():
# Create the user's directory if it does not exist.
try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
self.add_user(ftpuser, user_hash, virtpath, perm)
except Exception as e: # pylint: disable=broad-except
self.responde(f"551 Error in create virtual user path: {e}")
def validate_authentication(
self: object, username: str, password: str, handler: object
) -> None:
# Validate the user's password against the stored user_hash
user_hash = sha256(password.encode("UTF-8")).hexdigest()
try:
if self.user_table[username]["pwd"] != user_hash:
raise KeyError
except KeyError:
raise AuthenticationFailed
class ASEHandler(FTPHandler):
"""Custom FTP handler that extends FTPHandler with custom commands and file handling."""
def __init__(
self: object, conn: object, server: object, ioloop: object = None
) -> None:
"""Initializes the handler, adds custom commands, and sets up command permissions.
Args:
conn (object): The connection object.
server (object): The FTP server object.
ioloop (object): The I/O loop object.
"""
super().__init__(conn, server, ioloop)
self.proto_cmds = FTPHandler.proto_cmds.copy()
# Add custom FTP commands for managing virtual users - command in lowercase
self.proto_cmds.update(
{
"SITE ADDU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> ADDU USERNAME PASSWORD (add virtual user).",
)
}
)
self.proto_cmds.update(
{
"SITE DISU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> DISU USERNAME (disable virtual user).",
)
}
)
self.proto_cmds.update(
{
"SITE ENAU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> ENAU USERNAME (enable virtual user).",
)
}
)
self.proto_cmds.update(
{
"SITE LSTU": dict(
perm="M",
auth=True,
arg=None,
help="Syntax: SITE <SP> LSTU (list virtual users).",
)
}
)
def on_file_received(self: object, file: str) -> None:
return file_management.on_file_received(self, file)
def on_incomplete_file_received(self: object, file: str) -> None:
"""Removes partially uploaded files.
Args:
file: The path to the incomplete file.
"""
os.remove(file)
def ftp_SITE_ADDU(self: object, line: str) -> None:
return user_admin.ftp_SITE_ADDU(self, line)
def ftp_SITE_DISU(self: object, line: str) -> None:
return user_admin.ftp_SITE_DISU(self, line)
def ftp_SITE_ENAU(self: object, line: str) -> None:
return user_admin.ftp_SITE_ENAU(self, line)
def ftp_SITE_LSTU(self: object, line: str) -> None:
return user_admin.ftp_SITE_LSTU(self, line)
def main():
"""Main function to start the FTP server."""
# Load the configuration settings
cfg = setting.Config()
try:
# Initialize the authorizer and handler
authorizer = DummySha256Authorizer(cfg)
handler = ASEHandler
handler.cfg = cfg
handler.authorizer = authorizer
handler.masquerade_address = cfg.proxyaddr
# Set the range of passive ports for the FTP server
_range = list(range(cfg.firstport, cfg.firstport + cfg.portrangewidth))
handler.passive_ports = _range
# Configure logging
logging.basicConfig(
format="%(asctime)s - PID: %(process)d.%(name)s.%(levelname)s: %(message)s ",
# Use cfg.logfilename directly without checking its existence
filename=cfg.logfilename,
level=logging.INFO,
)
# Create and start the FTP server
server = FTPServer(("0.0.0.0", cfg.service_port), handler)
server.serve_forever()
except Exception as e:
logger.error("Exit with error: %s.", e)
if __name__ == "__main__":
main()

152
src/load_ftp_users.py Normal file
View File

@@ -0,0 +1,152 @@
#!.venv/bin/python
"""
Script per prelevare dati da MySQL e inviare comandi SITE FTP
"""
from ftplib import FTP
import logging
import sys
from typing import List, Tuple
import mysql.connector
from utils.database.connection import connetti_db
from utils.config import users_loader as setting
# Configurazione logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Configurazione server FTP
FTP_CONFIG = {
'host': 'localhost',
'user': 'admin',
'password': 'batt1l0',
'port': 2121
}
def connect_ftp() -> FTP:
"""
Establishes a connection to the FTP server using the predefined configuration.
Returns:
FTP: An active FTP connection object.
"""
try:
ftp = FTP()
ftp.connect(FTP_CONFIG['host'], FTP_CONFIG['port'])
ftp.login(FTP_CONFIG['user'], FTP_CONFIG['password'])
logger.info("Connessione FTP stabilita")
return ftp
except Exception as e: # pylint: disable=broad-except
logger.error("Errore connessione FTP: %s", e)
sys.exit(1)
def fetch_data_from_db(connection: mysql.connector.MySQLConnection) -> List[Tuple]:
"""
Fetches username and password data from the 'ftp_accounts' table in the database.
Args:
connection (mysql.connector.MySQLConnection): The database connection object.
Returns:
List[Tuple]: A list of tuples, where each tuple contains (username, password).
"""
try:
cursor = connection.cursor()
# Modifica questa query secondo le tue esigenze
query = """
SELECT username, password
FROM ase_lar.ftp_accounts
"""
cursor.execute(query)
results = cursor.fetchall()
logger.info("Prelevate %s righe dal database", len(results))
return results
except mysql.connector.Error as e:
logger.error("Errore query database: %s", e)
return []
finally:
cursor.close()
def send_site_command(ftp: FTP, command: str) -> bool:
"""
Sends a SITE command to the FTP server.
Args:
ftp (FTP): The FTP connection object.
command (str): The SITE command string to send (e.g., "ADDU username password").
Returns:
bool: True if the command was sent successfully, False otherwise.
"""
try:
# Il comando SITE viene inviato usando sendcmd
response = ftp.sendcmd(f"SITE {command}")
logger.info("Comando SITE %s inviato. Risposta: %s", command, response)
return True
except Exception as e: # pylint: disable=broad-except
logger.error("Errore invio comando SITE %s: %s", command, e)
return False
def main():
"""
Main function to connect to the database, fetch FTP user data, and send SITE ADDU commands to the FTP server.
"""
logger.info("Avvio script caricamento utenti FTP")
cfg = setting.Config()
# Connessioni
db_connection = connetti_db(cfg)
ftp_connection = connect_ftp()
try:
# Preleva dati dal database
data = fetch_data_from_db(db_connection)
if not data:
logger.warning("Nessun dato trovato nel database")
return
success_count = 0
error_count = 0
# Processa ogni riga
for row in data:
username, password = row
# Costruisci il comando SITE completo
ftp_site_command = f'addu {username} {password}'
logger.info("Sending ftp command: %s", ftp_site_command)
# Invia comando SITE
if send_site_command(ftp_connection, ftp_site_command):
success_count += 1
else:
error_count += 1
logger.info("Elaborazione completata. Successi: %s, Errori: %s", success_count, error_count)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore generale: %s", e)
finally:
# Chiudi connessioni
try:
ftp_connection.quit()
logger.info("Connessione FTP chiusa")
except Exception as e: # pylint: disable=broad-except
logger.error("Errore chiusura connessione FTP: %s", e)
try:
db_connection.close()
logger.info("Connessione MySQL chiusa")
except Exception as e: # pylint: disable=broad-except
logger.error("Errore chiusura connessione MySQL: %s", e)
if __name__ == "__main__":
main()

136
src/load_orchestrator.py Executable file
View File

@@ -0,0 +1,136 @@
#!.venv/bin/python
"""
Orchestratore dei worker che caricano i dati su dataraw
"""
# Import necessary libraries
import logging
import importlib
import asyncio
# Import custom modules for configuration and database connection
from utils.config import loader_load_data as setting
from utils.database import WorkflowFlags
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
CSV_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 60
async def worker(worker_id: int, cfg: dict, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'elaborazione dei file CSV.
Il worker preleva un record CSV dal database, ne elabora il contenuto
e attende prima di iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (dict): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
logger.info("Avviato")
while True:
try:
logger.info("Inizio elaborazione")
record = await get_next_csv_atomic(
pool,
cfg.dbrectable,
WorkflowFlags.CSV_RECEIVED,
WorkflowFlags.DATA_LOADED,
)
if record:
success = await load_csv(record, cfg, pool)
if not success:
logger.error("Errore durante l'elaborazione")
await asyncio.sleep(CSV_PROCESSING_DELAY)
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore durante l'esecuzione: %s", e, exc_info=1)
await asyncio.sleep(1)
async def load_csv(record: tuple, cfg: object, pool: object) -> bool:
"""Carica ed elabora un record CSV utilizzando il modulo di parsing appropriato.
Args:
record: Una tupla contenente i dettagli del record CSV da elaborare
(rec_id, unit_type, tool_type, unit_name, tool_name).
cfg: L'oggetto di configurazione contenente i parametri del sistema.
pool (object): Il pool di connessioni al database.
Returns:
True se l'elaborazione del CSV è avvenuta con successo, False altrimenti.
"""
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.debug("Inizio ricerca nuovo CSV da elaborare")
rec_id, unit_type, tool_type, unit_name, tool_name = [
x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record
]
logger.info(
"Trovato CSV da elaborare: ID=%s, Tipo=%s_%s, Nome=%s_%s",
rec_id,
unit_type,
tool_type,
unit_name,
tool_name,
)
# Costruisce il nome del modulo da caricare dinamicamente
module_names = [
f"utils.parsers.by_name.{unit_name}_{tool_name}",
f"utils.parsers.by_name.{unit_name}_{tool_type}",
f"utils.parsers.by_name.{unit_name}_all",
f"utils.parsers.by_type.{unit_type}_{tool_type}",
]
modulo = None
for module_name in module_names:
try:
logger.debug("Caricamento dinamico del modulo: %s", module_name)
modulo = importlib.import_module(module_name)
logger.info("Funzione 'main_loader' caricata dal modulo %s", module_name)
break
except (ImportError, AttributeError) as e:
logger.debug(
"Modulo %s non presente o non valido. %s",
module_name,
e,
exc_info=debug_mode,
)
if not modulo:
logger.error("Nessun modulo trovato %s", module_names)
return False
# Ottiene la funzione 'main_loader' dal modulo
funzione = getattr(modulo, "main_loader")
# Esegui la funzione
logger.info("Elaborazione con modulo %s per ID=%s", modulo, rec_id)
await funzione(cfg, rec_id, pool)
logger.info("Elaborazione completata per ID=%s", rec_id)
return True
async def main():
"""Funzione principale che avvia il load_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

2587
src/old_scripts/TS_PiniScript.py Executable file

File diff suppressed because one or more lines are too long

15
src/old_scripts/dbconfig.py Executable file
View File

@@ -0,0 +1,15 @@
from configparser import ConfigParser
def read_db_config(filename='../env/config.ini', section='mysql'):
parser = ConfigParser()
parser.read(filename)
db = {}
if parser.has_section(section):
items = parser.items(section)
for item in items:
db[item[0]] = item[1]
else:
raise Exception('{0} not found in the {1} file'.format(section, filename))
return db

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from decimal import Decimal
from datetime import datetime
import ezodf
def getDataFromCsv(pathFile):
try:
folder_path, file_with_extension = os.path.split(pathFile)
unit_name = os.path.basename(folder_path)#unitname
tool_name, _ = os.path.splitext(file_with_extension)#toolname
tool_name = tool_name.replace("HIRPINIA_", "")
tool_name = tool_name.split("_")[0]
print(unit_name, tool_name)
datiRaw = []
doc = ezodf.opendoc(pathFile)
for sheet in doc.sheets:
node_num = sheet.name.replace("S-", "")
print(f"Sheet Name: {sheet.name}")
rows_to_skip = 2
for i, row in enumerate(sheet.rows()):
if i < rows_to_skip:
continue
row_data = [cell.value for cell in row]
date_time = datetime.strptime(row_data[0], "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S").split(" ")
date = date_time[0]
time = date_time[1]
val0 = row_data[2]
val1 = row_data[4]
val2 = row_data[6]
val3 = row_data[8]
datiRaw.append((unit_name, tool_name, node_num, date, time, -1, -273, val0, val1, val2, val3))
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor(dictionary=True)
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0,Val1,Val2,Val3) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.executemany(queryRaw, datiRaw)
conn.commit()
except Error as e:
print('Error:', e)
finally:
queryMatlab = "select m.matcall from tools as t join units as u on u.id=t.unit_id join matfuncs as m on m.id=t.matfunc where u.name=%s and t.name=%s"
cursor.execute(queryMatlab, [unit_name, tool_name])
resultMatlab = cursor.fetchall()
if(resultMatlab):
print("Avvio "+str(resultMatlab[0]["matcall"]))
os.system("cd /usr/local/matlab_func/; ./run_"+str(resultMatlab[0]["matcall"])+".sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+str(unit_name)+" "+str(tool_name)+"")
cursor.close()
conn.close()
except Exception as e:
print(f"An unexpected error occurred: {str(e)}\n")
def main():
print("Avviato.")
getDataFromCsv(sys.argv[1])
print("Finito.")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,305 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from decimal import Decimal
from datetime import datetime
def insertData(dati):
#print(dati)
#print(len(dati))
if(len(dati) > 0):
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
if(len(dati) == 2):
u = ""
t = ""
rawdata = dati[0]
elabdata = dati[1]
if(len(rawdata) > 0):
for r in rawdata:
#print(r)
#print(len(r))
if(len(r) == 6):#nodo1
unitname = r[0]
toolname = r[1]
nodenum = r[2]
pressure = Decimal(r[3])*100
date = r[4]
time = r[5]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][8] is None):
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
dateDiff = datetimeNew - datetimeOld
if(dateDiff.total_seconds() / 3600 >= 5):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "UPDATE RAWDATACOR SET val0=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [pressure, date, time, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][8] is not None):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
else:#altri 2->5
unitname = r[0]
toolname = r[1]
nodenum = r[2]
freqinhz = r[3]
therminohms = r[4]
freqindigit = r[5]
date = r[6]
time = r[7]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][8] is None):
query = "UPDATE RAWDATACOR SET val0=%s, val1=%s, val2=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [freqinhz, therminohms, freqindigit, date, time, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][8] is not None):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
if(len(elabdata) > 0):
for e in elabdata:
#print(e)
#print(len(e))
if(len(e) == 6):#nodo1
unitname = e[0]
toolname = e[1]
nodenum = e[2]
pressure = Decimal(e[3])*100
date = e[4]
time = e[5]
try:
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [unitname, toolname, nodenum, date, time, pressure])
conn.commit()
except Error as e:
print('Error:', e)
else:#altri 2->5
unitname = e[0]
toolname = e[1]
u = unitname
t = toolname
nodenum = e[2]
pch = e[3]
tch = e[4]
date = e[5]
time = e[6]
try:
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift, T_node) VALUES(%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [unitname, toolname, nodenum, date, time, pch, tch])
conn.commit()
except Error as e:
print('Error:', e)
#os.system("cd /usr/local/matlab_func/; ./run_ATD_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+u+" "+t+"")
else:
for r in dati:
#print(r)
unitname = r[0]
toolname = r[1]
nodenum = r[2]
date = r[3]
time = r[4]
battery = r[5]
temperature = r[6]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][25] is None or result[0][25] == -1.00):
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
dateDiff = datetimeNew - datetimeOld
#print(dateDiff.total_seconds() / 3600)
if(dateDiff.total_seconds() / 3600 >= 5):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "UPDATE RAWDATACOR SET BatLevelModule=%s, TemperatureModule=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND (BatLevelModule is NULL or BatLevelModule = -1.00) ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [battery, temperature, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][25] is not None and result[0][25] != -1.00):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
cursor.close()
conn.close()
def getDataFromCsv(pathFile):
with open(pathFile, 'r') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
serial_number = data[0].split(",")[1]
data = data[10:] #rimuove righe header
dati = []
rawDatiReadings = []#tmp
elabDatiReadings = []#tmp
datiReadings = []
i = 0
unit = ""
tool = ""
#row = data[0]#quando non c'era il for solo 1 riga
for row in data:#se ci sono righe multiple
row = row.split(",")
if i == 0:
query = "SELECT unit_name, tool_name FROM sisgeo_tools WHERE serial_number='"+serial_number+"'"
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
cursor.execute(query)
result = cursor.fetchall()
except Error as e:
print('Error:', e)
unit = result[0][0]
tool = result[0][1]
#print(result[0][0])
#print(result[0][1])
if("health" in pathFile):
datetime = str(row[0]).replace("\"", "").split(" ")
date = datetime[0]
time = datetime[1]
battery = row[1]
temperature = row[2]
dati.append((unit, tool, 1, date, time, battery, temperature))
dati.append((unit, tool, 2, date, time, battery, temperature))
dati.append((unit, tool, 3, date, time, battery, temperature))
dati.append((unit, tool, 4, date, time, battery, temperature))
dati.append((unit, tool, 5, date, time, battery, temperature))
else:
datetime = str(row[0]).replace("\"", "").split(" ")
date = datetime[0]
time = datetime[1]
atmpressure = row[1]#nodo1
#raw
freqinhzch1 = row[2]#nodo2
freqindigitch1 = row[3]#nodo2
thermResInOhmsch1 = row[4]#nodo2
freqinhzch2 = row[5]#nodo3
freqindigitch2 = row[6]#nodo3
thermResInOhmsch2 = row[7]#nodo3
freqinhzch3 = row[8]#nodo4
freqindigitch3 = row[9]#nodo4
thermResInOhmsch3 = row[10]#nodo4
freqinhzch4 = row[11]#nodo5
freqindigitch4 = row[12]#nodo5
thermResInOhmsch4 = row[13]#nodo5
#elab
pch1 = row[18]#nodo2
tch1 = row[19]#nodo2
pch2 = row[20]#nodo3
tch2 = row[21]#nodo3
pch3 = row[22]#nodo4
tch3 = row[23]#nodo4
pch4 = row[24]#nodo5
tch4 = row[25]#nodo5
rawDatiReadings.append((unit, tool, 1, atmpressure, date, time))
rawDatiReadings.append((unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time))
rawDatiReadings.append((unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time))
rawDatiReadings.append((unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time))
rawDatiReadings.append((unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time))
elabDatiReadings.append((unit, tool, 1, atmpressure, date, time))
elabDatiReadings.append((unit, tool, 2, pch1, tch1, date, time))
elabDatiReadings.append((unit, tool, 3, pch2, tch2, date, time))
elabDatiReadings.append((unit, tool, 4, pch3, tch3, date, time))
elabDatiReadings.append((unit, tool, 5, pch4, tch4, date, time))
#[ram],[elab]#quando c'era solo 1 riga
#dati = [
# [
# (unit, tool, 1, atmpressure, date, time),
# (unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time),
# (unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time),
# (unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time),
# (unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time),
# ], [
# (unit, tool, 1, atmpressure, date, time),
# (unit, tool, 2, pch1, tch1, date, time),
# (unit, tool, 3, pch2, tch2, date, time),
# (unit, tool, 4, pch3, tch3, date, time),
# (unit, tool, 5, pch4, tch4, date, time),
# ]
# ]
i+=1
#print(dati)
if(len(rawDatiReadings) > 0 or len(elabDatiReadings) > 0):
datiReadings = [rawDatiReadings, elabDatiReadings]
if(len(datiReadings) > 0):
return datiReadings
return dati
def main():
insertData(getDataFromCsv(sys.argv[1]))
if __name__ == '__main__':
main()

306
src/old_scripts/sorotecPini.py Executable file
View File

@@ -0,0 +1,306 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from datetime import datetime
import math
import shutil
def removeDuplicates(lst):
return list(set([i for i in lst]))
def getDataFromCsvAndInsert(pathFile):
try:
print(pathFile)
folder_name = pathFile.split("/")[-2]#cartella
with open(pathFile, 'r') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
if(len(data) > 0 and data is not None):
if(folder_name == "ID0247"):
unit_name = "ID0247"
tool_name = "DT0001"
data.pop(0) #rimuove header
data.pop(0)
data.pop(0)
data.pop(0)
data = [element for element in data if element != ""]
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
queryElab = "insert ignore into ELABDATADISP(UnitName,ToolNameID,NodeNum,EventDate,EventTime,load_value) values (%s,%s,%s,%s,%s,%s)"
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0) values (%s,%s,%s,%s,%s,%s,%s,%s)"
if("_1_" in pathFile):
print("File tipo 1.\n")
#print(unit_name, tool_name)
dataToInsertElab = []
dataToInsertRaw = []
for row in data:
rowSplitted = row.replace("\"","").split(";")
eventTimestamp = rowSplitted[0].split(" ")
date = eventTimestamp[0].split("-")
date = date[2]+"-"+date[1]+"-"+date[0]
time = eventTimestamp[1]
an3 = rowSplitted[1]
an4 = rowSplitted[2]#V unit battery
OUTREG2 = rowSplitted[3]
E8_181_CH2 = rowSplitted[4]#2
E8_181_CH3 = rowSplitted[5]#3
E8_181_CH4 = rowSplitted[6]#4
E8_181_CH5 = rowSplitted[7]#5
E8_181_CH6 = rowSplitted[8]#6
E8_181_CH7 = rowSplitted[9]#7
E8_181_CH8 = rowSplitted[10]#8
E8_182_CH1 = rowSplitted[11]#9
E8_182_CH2 = rowSplitted[12]#10
E8_182_CH3 = rowSplitted[13]#11
E8_182_CH4 = rowSplitted[14]#12
E8_182_CH5 = rowSplitted[15]#13
E8_182_CH6 = rowSplitted[16]#14
E8_182_CH7 = rowSplitted[17]#15
E8_182_CH8 = rowSplitted[18]#16
E8_183_CH1 = rowSplitted[19]#17
E8_183_CH2 = rowSplitted[20]#18
E8_183_CH3 = rowSplitted[21]#19
E8_183_CH4 = rowSplitted[22]#20
E8_183_CH5 = rowSplitted[23]#21
E8_183_CH6 = rowSplitted[24]#22
E8_183_CH7 = rowSplitted[25]#23
E8_183_CH8 = rowSplitted[26]#24
E8_184_CH1 = rowSplitted[27]#25
E8_184_CH2 = rowSplitted[28]#26
E8_184_CH3 = rowSplitted[29]#27 mv/V
E8_184_CH4 = rowSplitted[30]#28 mv/V
E8_184_CH5 = rowSplitted[31]#29 mv/V
E8_184_CH6 = rowSplitted[32]#30 mv/V
E8_184_CH7 = rowSplitted[33]#31 mv/V
E8_184_CH8 = rowSplitted[34]#32 mv/V
E8_181_CH1 = rowSplitted[35]#1
an1 = rowSplitted[36]
an2 = rowSplitted[37]
#print(unit_name, tool_name, 1, E8_181_CH1)
#print(unit_name, tool_name, 2, E8_181_CH2)
#print(unit_name, tool_name, 3, E8_181_CH3)
#print(unit_name, tool_name, 4, E8_181_CH4)
#print(unit_name, tool_name, 5, E8_181_CH5)
#print(unit_name, tool_name, 6, E8_181_CH6)
#print(unit_name, tool_name, 7, E8_181_CH7)
#print(unit_name, tool_name, 8, E8_181_CH8)
#print(unit_name, tool_name, 9, E8_182_CH1)
#print(unit_name, tool_name, 10, E8_182_CH2)
#print(unit_name, tool_name, 11, E8_182_CH3)
#print(unit_name, tool_name, 12, E8_182_CH4)
#print(unit_name, tool_name, 13, E8_182_CH5)
#print(unit_name, tool_name, 14, E8_182_CH6)
#print(unit_name, tool_name, 15, E8_182_CH7)
#print(unit_name, tool_name, 16, E8_182_CH8)
#print(unit_name, tool_name, 17, E8_183_CH1)
#print(unit_name, tool_name, 18, E8_183_CH2)
#print(unit_name, tool_name, 19, E8_183_CH3)
#print(unit_name, tool_name, 20, E8_183_CH4)
#print(unit_name, tool_name, 21, E8_183_CH5)
#print(unit_name, tool_name, 22, E8_183_CH6)
#print(unit_name, tool_name, 23, E8_183_CH7)
#print(unit_name, tool_name, 24, E8_183_CH8)
#print(unit_name, tool_name, 25, E8_184_CH1)
#print(unit_name, tool_name, 26, E8_184_CH2)
#print(unit_name, tool_name, 27, E8_184_CH3)
#print(unit_name, tool_name, 28, E8_184_CH4)
#print(unit_name, tool_name, 29, E8_184_CH5)
#print(unit_name, tool_name, 30, E8_184_CH6)
#print(unit_name, tool_name, 31, E8_184_CH7)
#print(unit_name, tool_name, 32, E8_184_CH8)
#---------------------------------------------------------------------------------------
dataToInsertRaw.append((unit_name, tool_name, 1, date, time, an4, -273, E8_181_CH1))
dataToInsertRaw.append((unit_name, tool_name, 2, date, time, an4, -273, E8_181_CH2))
dataToInsertRaw.append((unit_name, tool_name, 3, date, time, an4, -273, E8_181_CH3))
dataToInsertRaw.append((unit_name, tool_name, 4, date, time, an4, -273, E8_181_CH4))
dataToInsertRaw.append((unit_name, tool_name, 5, date, time, an4, -273, E8_181_CH5))
dataToInsertRaw.append((unit_name, tool_name, 6, date, time, an4, -273, E8_181_CH6))
dataToInsertRaw.append((unit_name, tool_name, 7, date, time, an4, -273, E8_181_CH7))
dataToInsertRaw.append((unit_name, tool_name, 8, date, time, an4, -273, E8_181_CH8))
dataToInsertRaw.append((unit_name, tool_name, 9, date, time, an4, -273, E8_182_CH1))
dataToInsertRaw.append((unit_name, tool_name, 10, date, time, an4, -273, E8_182_CH2))
dataToInsertRaw.append((unit_name, tool_name, 11, date, time, an4, -273, E8_182_CH3))
dataToInsertRaw.append((unit_name, tool_name, 12, date, time, an4, -273, E8_182_CH4))
dataToInsertRaw.append((unit_name, tool_name, 13, date, time, an4, -273, E8_182_CH5))
dataToInsertRaw.append((unit_name, tool_name, 14, date, time, an4, -273, E8_182_CH6))
dataToInsertRaw.append((unit_name, tool_name, 15, date, time, an4, -273, E8_182_CH7))
dataToInsertRaw.append((unit_name, tool_name, 16, date, time, an4, -273, E8_182_CH8))
dataToInsertRaw.append((unit_name, tool_name, 17, date, time, an4, -273, E8_183_CH1))
dataToInsertRaw.append((unit_name, tool_name, 18, date, time, an4, -273, E8_183_CH2))
dataToInsertRaw.append((unit_name, tool_name, 19, date, time, an4, -273, E8_183_CH3))
dataToInsertRaw.append((unit_name, tool_name, 20, date, time, an4, -273, E8_183_CH4))
dataToInsertRaw.append((unit_name, tool_name, 21, date, time, an4, -273, E8_183_CH5))
dataToInsertRaw.append((unit_name, tool_name, 22, date, time, an4, -273, E8_183_CH6))
dataToInsertRaw.append((unit_name, tool_name, 23, date, time, an4, -273, E8_183_CH7))
dataToInsertRaw.append((unit_name, tool_name, 24, date, time, an4, -273, E8_183_CH8))
dataToInsertRaw.append((unit_name, tool_name, 25, date, time, an4, -273, E8_184_CH1))
dataToInsertRaw.append((unit_name, tool_name, 26, date, time, an4, -273, E8_184_CH2))
#---------------------------------------------------------------------------------------
dataToInsertElab.append((unit_name, tool_name, 1, date, time, E8_181_CH1))
dataToInsertElab.append((unit_name, tool_name, 2, date, time, E8_181_CH2))
dataToInsertElab.append((unit_name, tool_name, 3, date, time, E8_181_CH3))
dataToInsertElab.append((unit_name, tool_name, 4, date, time, E8_181_CH4))
dataToInsertElab.append((unit_name, tool_name, 5, date, time, E8_181_CH5))
dataToInsertElab.append((unit_name, tool_name, 6, date, time, E8_181_CH6))
dataToInsertElab.append((unit_name, tool_name, 7, date, time, E8_181_CH7))
dataToInsertElab.append((unit_name, tool_name, 8, date, time, E8_181_CH8))
dataToInsertElab.append((unit_name, tool_name, 9, date, time, E8_182_CH1))
dataToInsertElab.append((unit_name, tool_name, 10, date, time, E8_182_CH2))
dataToInsertElab.append((unit_name, tool_name, 11, date, time, E8_182_CH3))
dataToInsertElab.append((unit_name, tool_name, 12, date, time, E8_182_CH4))
dataToInsertElab.append((unit_name, tool_name, 13, date, time, E8_182_CH5))
dataToInsertElab.append((unit_name, tool_name, 14, date, time, E8_182_CH6))
dataToInsertElab.append((unit_name, tool_name, 15, date, time, E8_182_CH7))
dataToInsertElab.append((unit_name, tool_name, 16, date, time, E8_182_CH8))
dataToInsertElab.append((unit_name, tool_name, 17, date, time, E8_183_CH1))
dataToInsertElab.append((unit_name, tool_name, 18, date, time, E8_183_CH2))
dataToInsertElab.append((unit_name, tool_name, 19, date, time, E8_183_CH3))
dataToInsertElab.append((unit_name, tool_name, 20, date, time, E8_183_CH4))
dataToInsertElab.append((unit_name, tool_name, 21, date, time, E8_183_CH5))
dataToInsertElab.append((unit_name, tool_name, 22, date, time, E8_183_CH6))
dataToInsertElab.append((unit_name, tool_name, 23, date, time, E8_183_CH7))
dataToInsertElab.append((unit_name, tool_name, 24, date, time, E8_183_CH8))
dataToInsertElab.append((unit_name, tool_name, 25, date, time, E8_184_CH1))
dataToInsertElab.append((unit_name, tool_name, 26, date, time, E8_184_CH2))
#---------------------------------------------------------------------------------------
cursor.executemany(queryElab, dataToInsertElab)
cursor.executemany(queryRaw, dataToInsertRaw)
conn.commit()
#print(dataToInsertElab)
#print(dataToInsertRaw)
elif("_2_" in pathFile):
print("File tipo 2.\n")
#print(unit_name, tool_name)
dataToInsertElab = []
dataToInsertRaw = []
for row in data:
rowSplitted = row.replace("\"","").split(";")
eventTimestamp = rowSplitted[0].split(" ")
date = eventTimestamp[0].split("-")
date = date[2]+"-"+date[1]+"-"+date[0]
time = eventTimestamp[1]
an2 = rowSplitted[1]
an3 = rowSplitted[2]
an1 = rowSplitted[3]
OUTREG2 = rowSplitted[4]
E8_181_CH1 = rowSplitted[5]#33 mv/V
E8_181_CH2 = rowSplitted[6]#34 mv/V
E8_181_CH3 = rowSplitted[7]#35 mv/V
E8_181_CH4 = rowSplitted[8]#36 mv/V
E8_181_CH5 = rowSplitted[9]#37 mv/V
E8_181_CH6 = rowSplitted[10]#38 mv/V
E8_181_CH7 = rowSplitted[11]#39 mv/V
E8_181_CH8 = rowSplitted[12]#40 mv/V
E8_182_CH1 = rowSplitted[13]#41
E8_182_CH2 = rowSplitted[14]#42
E8_182_CH3 = rowSplitted[15]#43
E8_182_CH4 = rowSplitted[16]#44
E8_182_CH5 = rowSplitted[17]#45 mv/V
E8_182_CH6 = rowSplitted[18]#46 mv/V
E8_182_CH7 = rowSplitted[19]#47 mv/V
E8_182_CH8 = rowSplitted[20]#48 mv/V
E8_183_CH1 = rowSplitted[21]#49
E8_183_CH2 = rowSplitted[22]#50
E8_183_CH3 = rowSplitted[23]#51
E8_183_CH4 = rowSplitted[24]#52
E8_183_CH5 = rowSplitted[25]#53 mv/V
E8_183_CH6 = rowSplitted[26]#54 mv/V
E8_183_CH7 = rowSplitted[27]#55 mv/V
E8_183_CH8 = rowSplitted[28]#56
E8_184_CH1 = rowSplitted[29]#57
E8_184_CH2 = rowSplitted[30]#58
E8_184_CH3 = rowSplitted[31]#59
E8_184_CH4 = rowSplitted[32]#60
E8_184_CH5 = rowSplitted[33]#61
E8_184_CH6 = rowSplitted[34]#62
E8_184_CH7 = rowSplitted[35]#63 mv/V
E8_184_CH8 = rowSplitted[36]#64 mv/V
an4 = rowSplitted[37]#V unit battery
#print(unit_name, tool_name, 33, E8_181_CH1)
#print(unit_name, tool_name, 34, E8_181_CH2)
#print(unit_name, tool_name, 35, E8_181_CH3)
#print(unit_name, tool_name, 36, E8_181_CH4)
#print(unit_name, tool_name, 37, E8_181_CH5)
#print(unit_name, tool_name, 38, E8_181_CH6)
#print(unit_name, tool_name, 39, E8_181_CH7)
#print(unit_name, tool_name, 40, E8_181_CH8)
#print(unit_name, tool_name, 41, E8_182_CH1)
#print(unit_name, tool_name, 42, E8_182_CH2)
#print(unit_name, tool_name, 43, E8_182_CH3)
#print(unit_name, tool_name, 44, E8_182_CH4)
#print(unit_name, tool_name, 45, E8_182_CH5)
#print(unit_name, tool_name, 46, E8_182_CH6)
#print(unit_name, tool_name, 47, E8_182_CH7)
#print(unit_name, tool_name, 48, E8_182_CH8)
#print(unit_name, tool_name, 49, E8_183_CH1)
#print(unit_name, tool_name, 50, E8_183_CH2)
#print(unit_name, tool_name, 51, E8_183_CH3)
#print(unit_name, tool_name, 52, E8_183_CH4)
#print(unit_name, tool_name, 53, E8_183_CH5)
#print(unit_name, tool_name, 54, E8_183_CH6)
#print(unit_name, tool_name, 55, E8_183_CH7)
#print(unit_name, tool_name, 56, E8_183_CH8)
#print(unit_name, tool_name, 57, E8_184_CH1)
#print(unit_name, tool_name, 58, E8_184_CH2)
#print(unit_name, tool_name, 59, E8_184_CH3)
#print(unit_name, tool_name, 60, E8_184_CH4)
#print(unit_name, tool_name, 61, E8_184_CH5)
#print(unit_name, tool_name, 62, E8_184_CH6)
#print(unit_name, tool_name, 63, E8_184_CH7)
#print(unit_name, tool_name, 64, E8_184_CH8)
#print(rowSplitted)
#---------------------------------------------------------------------------------------
dataToInsertRaw.append((unit_name, tool_name, 41, date, time, an4, -273, E8_182_CH1))
dataToInsertRaw.append((unit_name, tool_name, 42, date, time, an4, -273, E8_182_CH2))
dataToInsertRaw.append((unit_name, tool_name, 43, date, time, an4, -273, E8_182_CH3))
dataToInsertRaw.append((unit_name, tool_name, 44, date, time, an4, -273, E8_182_CH4))
dataToInsertRaw.append((unit_name, tool_name, 49, date, time, an4, -273, E8_183_CH1))
dataToInsertRaw.append((unit_name, tool_name, 50, date, time, an4, -273, E8_183_CH2))
dataToInsertRaw.append((unit_name, tool_name, 51, date, time, an4, -273, E8_183_CH3))
dataToInsertRaw.append((unit_name, tool_name, 52, date, time, an4, -273, E8_183_CH4))
dataToInsertRaw.append((unit_name, tool_name, 56, date, time, an4, -273, E8_183_CH8))
dataToInsertRaw.append((unit_name, tool_name, 57, date, time, an4, -273, E8_184_CH1))
dataToInsertRaw.append((unit_name, tool_name, 58, date, time, an4, -273, E8_184_CH2))
dataToInsertRaw.append((unit_name, tool_name, 59, date, time, an4, -273, E8_184_CH3))
dataToInsertRaw.append((unit_name, tool_name, 60, date, time, an4, -273, E8_184_CH4))
dataToInsertRaw.append((unit_name, tool_name, 61, date, time, an4, -273, E8_184_CH5))
dataToInsertRaw.append((unit_name, tool_name, 62, date, time, an4, -273, E8_184_CH6))
#---------------------------------------------------------------------------------------
dataToInsertElab.append((unit_name, tool_name, 41, date, time, E8_182_CH1))
dataToInsertElab.append((unit_name, tool_name, 42, date, time, E8_182_CH2))
dataToInsertElab.append((unit_name, tool_name, 43, date, time, E8_182_CH3))
dataToInsertElab.append((unit_name, tool_name, 44, date, time, E8_182_CH4))
dataToInsertElab.append((unit_name, tool_name, 49, date, time, E8_183_CH1))
dataToInsertElab.append((unit_name, tool_name, 50, date, time, E8_183_CH2))
dataToInsertElab.append((unit_name, tool_name, 51, date, time, E8_183_CH3))
dataToInsertElab.append((unit_name, tool_name, 52, date, time, E8_183_CH4))
dataToInsertElab.append((unit_name, tool_name, 56, date, time, E8_183_CH8))
dataToInsertElab.append((unit_name, tool_name, 57, date, time, E8_184_CH1))
dataToInsertElab.append((unit_name, tool_name, 58, date, time, E8_184_CH2))
dataToInsertElab.append((unit_name, tool_name, 59, date, time, E8_184_CH3))
dataToInsertElab.append((unit_name, tool_name, 60, date, time, E8_184_CH4))
dataToInsertElab.append((unit_name, tool_name, 61, date, time, E8_184_CH5))
dataToInsertElab.append((unit_name, tool_name, 62, date, time, E8_184_CH6))
#---------------------------------------------------------------------------------------
cursor.executemany(queryElab, dataToInsertElab)
cursor.executemany(queryRaw, dataToInsertRaw)
conn.commit()
#print(dataToInsertElab)
#print(dataToInsertRaw)
except Error as e:
print('Error:', e)
finally:
cursor.close()
conn.close()
except Exception as e:
print(f"An unexpected error occurred: {str(e)}\n")
def main():
getDataFromCsvAndInsert(sys.argv[1])
if __name__ == '__main__':
main()

171
src/old_scripts/vulinkScript.py Executable file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from datetime import datetime
import json
def checkBatteryLevel(db_conn, db_cursor, unit, date_time, battery_perc):
print(date_time, battery_perc)
if(float(battery_perc) < 25):#sotto il 25%
query = "select unit_name, date_time from alarms where unit_name=%s and date_time < %s and type_id=2 order by date_time desc limit 1"
db_cursor.execute(query, [unit, date_time])
result = db_cursor.fetchall()
if(len(result) > 0):
alarm_date_time = result[0]["date_time"]#datetime not str
format1 = "%Y-%m-%d %H:%M"
dt1 = datetime.strptime(date_time, format1)
time_difference = abs(dt1 - alarm_date_time)
if time_difference.total_seconds() > 24 * 60 * 60:
print("The difference is above 24 hours. Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
else:
print("Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
soglie = json.loads(soglie_str)
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
ph = soglia["data"]["ph"]
ph_uno = soglia["data"]["ph_uno"]
ph_due = soglia["data"]["ph_due"]
ph_tre = soglia["data"]["ph_tre"]
ph_uno_value = soglia["data"]["ph_uno_value"]
ph_due_value = soglia["data"]["ph_due_value"]
ph_tre_value = soglia["data"]["ph_tre_value"]
ph_uno_sms = soglia["data"]["ph_uno_sms"]
ph_due_sms = soglia["data"]["ph_due_sms"]
ph_tre_sms = soglia["data"]["ph_tre_sms"]
ph_uno_email = soglia["data"]["ph_uno_email"]
ph_due_email = soglia["data"]["ph_due_email"]
ph_tre_email = soglia["data"]["ph_tre_email"]
alert_uno = 0
alert_due = 0
alert_tre = 0
ph_value_prev = 0
#print(unit, tool, node_num, date_time)
query = "select XShift, EventDate, EventTime from ELABDATADISP where UnitName=%s and ToolNameID=%s and NodeNum=%s and concat(EventDate, ' ', EventTime) < %s order by concat(EventDate, ' ', EventTime) desc limit 1"
db_cursor.execute(query, [unit, tool, node_num, date_time])
resultPhPrev = db_cursor.fetchall()
if(len(resultPhPrev) > 0):
ph_value_prev = float(resultPhPrev[0]["XShift"])
#ph_value = random.uniform(7, 10)
print(tool, unit, node_num, date_time, ph_value)
#print(ph_value_prev, ph_value)
if(ph == 1):
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
if(ph_value_prev <= float(ph_tre_value)):
alert_tre = 1
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
if(ph_value_prev <= float(ph_due_value)):
alert_due = 1
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
if(ph_value_prev <= float(ph_uno_value)):
alert_uno = 1
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
if(alert_tre == 1):
print("level3",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
db_conn.commit()
elif(alert_due == 1):
print("level2",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
db_conn.commit()
elif(alert_uno == 1):
print("level1",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
db_conn.commit()
def getDataFromCsv(pathFile):
try:
folder_path, file_with_extension = os.path.split(pathFile)
file_name, _ = os.path.splitext(file_with_extension)#toolname
serial_number = file_name.split("_")[0]
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
query_nodes = "SELECT t.soglie, n.num as node_num, n.nodetype_id FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor(dictionary=True)
cursor.execute(query, [serial_number])
result = cursor.fetchall()
unit = result[0]["unit_name"]
tool = result[0]["tool_name"]
cursor.execute(query_node_depth, [unit, tool])
resultNode = cursor.fetchall()
cursor.execute(query_nodes, [unit, tool])
resultAllNodes = cursor.fetchall()
#print(resultAllNodes)
node_num_piezo = next((item for item in resultAllNodes if item.get('nodetype_id') == 2), None)["node_num"]
node_num_baro = next((item for item in resultAllNodes if item.get('nodetype_id') == 3), None)["node_num"]
node_num_conductivity = next((item for item in resultAllNodes if item.get('nodetype_id') == 94), None)["node_num"]
node_num_ph = next((item for item in resultAllNodes if item.get('nodetype_id') == 97), None)["node_num"]
#print(node_num_piezo, node_num_baro, node_num_conductivity, node_num_ph)
# 2 piezo
# 3 baro
# 94 conductivity
# 97 ph
node_depth = float(resultNode[0]["depth"]) #node piezo depth
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
for row in data:
row = row.split(",")
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
date_time = date_time.split(" ")
date = date_time[0]
time = date_time[1]
temperature_unit = float(row[2])
battery_perc = float(row[3])
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
conductivity = float(row[6])
ph = float(row[11])
temperature_piezo = float(row[14])
pressure = float(row[16])*1000
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_baro, date, time, battery_perc, temperature_unit, pressure_baro])
cursor.execute(queryInsElab, [unit, tool, node_num_baro, date, time, pressure_baro])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_conductivity, date, time, battery_perc, temperature_unit, conductivity])
cursor.execute(queryInsElab, [unit, tool, node_num_conductivity, date, time, conductivity])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_ph, date, time, battery_perc, temperature_unit, ph])
cursor.execute(queryInsElab, [unit, tool, node_num_ph, date, time, ph])
conn.commit()
checkSogliePh(conn, cursor, unit, tool, node_num_ph, date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_piezo, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
cursor.execute(queryInsElab, [unit, tool, node_num_piezo, date, time, temperature_piezo, depth, pressure])
conn.commit()
checkBatteryLevel(conn, cursor, unit, date_time[0]+" "+date_time[1], battery_perc)
except Error as e:
print('Error:', e)
def main():
getDataFromCsv(sys.argv[1])
if __name__ == '__main__':
main()

80
src/send_orchestrator.py Executable file
View File

@@ -0,0 +1,80 @@
#!.venv/bin/python
"""
Orchestratore dei worker che inviano i dati ai clienti
"""
# Import necessary libraries
import logging
import asyncio
# Import custom modules for configuration and database connection
from utils.config import loader_send_data as setting
from utils.database import WorkflowFlags
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
from utils.connect.send_data import process_workflow_record
from utils.general import alterna_valori
# from utils.ftp.send_data import ftp_send_elab_csv_to_customer, api_send_elab_csv_to_customer, \
# ftp_send_raw_csv_to_customer, api_send_raw_csv_to_customer
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
ELAB_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 30
async def worker(worker_id: int, cfg: dict, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'invio dei dati.
Il worker preleva un record dal database che indica dati pronti per
l'invio (sia raw che elaborati), li processa e attende prima di
iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (dict): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato")
alternatore = alterna_valori(
[WorkflowFlags.CSV_RECEIVED, WorkflowFlags.SENT_RAW_DATA],
[WorkflowFlags.DATA_ELABORATED, WorkflowFlags.SENT_ELAB_DATA],
)
while True:
try:
logger.info("Inizio elaborazione")
status, fase = next(alternatore)
record = await get_next_csv_atomic(pool, cfg.dbrectable, status, fase)
if record:
await process_workflow_record(record, fase, cfg, pool)
await asyncio.sleep(ELAB_PROCESSING_DELAY)
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore durante l'esecuzione: %s", e, exc_info=debug_mode)
await asyncio.sleep(1)
async def main():
"""Funzione principale che avvia il send_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,3 @@
"""Config ini setting"""
from pathlib import Path
ENV_PARENT_PATH = Path(__file__).resolve().parent.parent.parent.parent

View File

@@ -0,0 +1,25 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/email.ini"])
# email setting
self.from_addr = c.get("address", "from")
self.to_addr = c.get("address", "to")
self.cc_addr = c.get("address", "cc")
self.bcc_addr = c.get("address", "bcc")
self.subject = c.get("msg", "subject")
self.body = c.get("msg", "body")
self.smtp_addr = c.get("smtp", "address")
self.smtp_port = c.getint("smtp", "port")
self.smtp_user = c.get("smtp", "user")
self.smtp_passwd = c.get("smtp", "password")

View File

@@ -2,16 +2,21 @@
"""
from configparser import ConfigParser
import json
from . import ENV_PARENT_PATH
class config:
class Config:
def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'ftp.ini' and 'db.ini' for FTP server, CSV, logging, and database.
"""
c = ConfigParser()
c.read(["/etc/aseftp/ftpcsvreceiver.ini", "./ftpcsvreceiver.ini",
"./ftpReceiver/ftpcsvreceiver.ini"])
c.read([f"{ENV_PARENT_PATH}/env/ftp.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# FTP setting
self.service_port = c.getint("ftpserver", "service_port")
self.firstport = c.getint("ftpserver", "firstPort")
self.logfilename = c.get("ftpserver", "logFilename")
self.proxyaddr = c.get("ftpserver", "proxyAddr")
self.portrangewidth = c.getint("ftpserver", "portRangeWidth")
self.virtpath = c.get("ftpserver", "virtpath")
@@ -24,8 +29,8 @@ class config:
# CSV FILE setting
self.csvfs = c.get("csvfs", "path")
# LOADER setting
self.elablog = c.get("csvelab", "logFilename")
# LOG setting
self.logfilename = c.get("logging", "logFilename")
# DB setting
self.dbhost = c.get("db", "hostname")
@@ -33,19 +38,41 @@ class config:
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.dbschema = c.get("db", "dbSchema")
self.dbusertable = c.get("db", "userTableName")
self.dbrectable = c.get("db", "recTableName")
self.dbdataraw = c.get("db", "rawTableName")
self.max_retries = c.getint("db", "maxRetries")
# Tables
self.dbusertable = c.get("tables", "userTableName")
self.dbrectable = c.get("tables", "recTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName")
# unit setting
self.units_name = [part for part in c.get("unit", "Names").split('|')]
self.units_type = [part for part in c.get("unit", "Types").split('|')]
self.units_alias = {
key: value
for item in c.get("unit", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
#self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]}
# tool setting
self.tools_name = [part for part in c.get("tool", "Names").split('|')]
self.tools_type = [part for part in c.get("tool", "Types").split('|')]
self.tools_alias = {
key: key if value == '=' else value
for item in c.get("tool", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
# csv info
self.csv_infos = [part for part in c.get("csv", "Infos").split('|')]
# TS pini path match
self.ts_pini_path_match = {
key: key[1:-1] if value == '=' else value
for item in c.get("ts_pini", "path_match").split('|')
for key, value in [item.split(':', 1)]
}

View File

@@ -0,0 +1,36 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'load.ini' and 'db.ini' for logging, worker, database, and table configurations.
"""
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/load.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting
self.logfilename = c.get("logging", "logFilename")
# Worker setting
self.max_threads = c.getint("threads", "max_num")
# DB setting
self.dbhost = c.get("db", "hostname")
self.dbport = c.getint("db", "port")
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries")
# Tables
self.dbusertable = c.get("tables", "userTableName")
self.dbrectable = c.get("tables", "recTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName")

View File

@@ -0,0 +1,46 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'elab.ini' and 'db.ini' for logging, worker, database, table, tool, and Matlab configurations.
"""
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/elab.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting
self.logfilename = c.get("logging", "logFilename")
# Worker setting
self.max_threads = c.getint("threads", "max_num")
# DB setting
self.dbhost = c.get("db", "hostname")
self.dbport = c.getint("db", "port")
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries")
# Tables
self.dbusertable = c.get("tables", "userTableName")
self.dbrectable = c.get("tables", "recTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName")
# Tool
self.elab_status = [part for part in c.get("tool", "elab_status").split('|')]
# Matlab
self.matlab_runtime = c.get("matlab", "runtime")
self.matlab_func_path = c.get("matlab", "func_path")
self.matlab_timeout = c.getint("matlab", "timeout")
self.matlab_error = c.get("matlab", "error")
self.matlab_error_path = c.get("matlab", "error_path")

View File

@@ -0,0 +1,36 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'send.ini' and 'db.ini' for logging, worker, database, and table configurations.
"""
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/send.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting
self.logfilename = c.get("logging", "logFilename")
# Worker setting
self.max_threads = c.getint("threads", "max_num")
# DB setting
self.dbhost = c.get("db", "hostname")
self.dbport = c.getint("db", "port")
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries")
# Tables
self.dbusertable = c.get("tables", "userTableName")
self.dbrectable = c.get("tables", "recTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName")

View File

@@ -0,0 +1,22 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
"""
Handles configuration loading for database settings to load ftp users.
"""
def __init__(self):
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/db.ini"])
# DB setting
self.dbhost = c.get("db", "hostname")
self.dbport = c.getint("db", "port")
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries")

View File

@@ -0,0 +1,91 @@
import os
from datetime import datetime
import logging
import re
import mysql.connector
from utils.database.connection import connetti_db
from utils.csv.parser import extract_value
logger = logging.getLogger(__name__)
def on_file_received(self: object, file: str) -> None:
"""
Processes a received file, extracts relevant information, and inserts it into the database.
If the file is empty, it is removed. Otherwise, it extracts unit and tool
information from the filename and the first few lines of the CSV, handles
aliases, and then inserts the data into the configured database table.
Args:
file (str): The path to the received file."""
if not os.stat(file).st_size:
os.remove(file)
logger.info(f'File {file} is empty: removed.')
else:
cfg = self.cfg
path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
new_filename = f"{filename}_{timestamp}{fileExtension}"
os.rename(file, f"{path}/{new_filename}")
if (fileExtension.upper() in (cfg.fileext)):
with open(f"{path}/{new_filename}", 'r', encoding='utf-8', errors='ignore') as csvfile:
lines = csvfile.readlines()
unit_name = extract_value(cfg.units_name, filename, str(lines[0:10]))
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
tool_info = "{}"
# se esiste l'alias in alias_unit_type, allora prende il valore dell'alias... verifica sia lo unit_type completo che i primi 3 caratteri per CO_xxxxx
upper_unit_type = unit_type.upper()
unit_type = cfg.units_alias.get(upper_unit_type) or \
cfg.units_alias.get(upper_unit_type[:3]) or \
upper_unit_type
upper_tool_type = tool_type.upper()
tool_type = cfg.tools_alias.get(upper_tool_type) or \
cfg.tools_alias.get(upper_tool_type[:3]) or \
upper_tool_type
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
logger.error(f'{e}')
# Create a cursor
cur = conn.cursor()
# da estrarre in un modulo
if (unit_type.upper() == "ISI CSV LOG" and tool_type.upper() == "VULINK" ):
serial_number = filename.split('_')[0]
tool_info = f'{{"serial_number": {serial_number}}}'
try:
cur.execute(f"SELECT unit_name, tool_name FROM {cfg.dbname}.vulink_tools WHERE serial_number = '{serial_number}'")
unit_name, tool_name = cur.fetchone()
except Exception as e:
logger.warning(f'{tool_type} serial number {serial_number} not found in table vulink_tools. {e}')
# da estrarre in un modulo
if (unit_type.upper() == "STAZIONETOTALE" and tool_type.upper() == "INTEGRITY MONITOR" ):
escaped_keys = [re.escape(key) for key in cfg.ts_pini_path_match.keys()]
stazione = extract_value(escaped_keys, filename)
if stazione:
tool_info = f'{{"Stazione": "{cfg.ts_pini_path_match.get(stazione)}"}}'
try:
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (username, filename, unit_name, unit_type, tool_name, tool_type, tool_data, tool_info) VALUES (%s,%s, %s, %s, %s, %s, %s, %s)", (self.username, new_filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines), tool_info))
conn.commit()
conn.close()
except Exception as e:
logger.error(f'File {new_filename} not loaded. Held in user path.')
logger.error(f'{e}')
"""
else:
os.remove(file)
logger.info(f'File {new_filename} removed.')
"""

View File

@@ -0,0 +1,473 @@
from ftplib import FTP, FTP_TLS, all_errors
from io import BytesIO
import logging
import aiomysql
from datetime import datetime
from utils.database.loader_action import update_status, unlock
from utils.database.action_query import get_data_as_csv, get_tool_info, get_elab_timestamp
from utils.database import WorkflowFlags
logger = logging.getLogger(__name__)
class FTPConnection:
"""
Manages an FTP or FTP_TLS connection, providing a context manager for automatic disconnection.
"""
def __init__(self, host, port=21, use_tls=False, user='', passwd='',
passive=True, timeout=None, debug=0, context=None):
self.use_tls = use_tls
if use_tls:
self.ftp = FTP_TLS(context=context, timeout=timeout) if context else FTP_TLS(timeout=timeout)
else:
self.ftp = FTP(timeout=timeout)
if debug > 0:
self.ftp.set_debuglevel(debug)
self.ftp.connect(host, port)
self.ftp.login(user, passwd)
self.ftp.set_pasv(passive)
if use_tls:
self.ftp.prot_p()
def __getattr__(self, name):
"""Delega tutti i metodi non definiti all'oggetto FTP sottostante"""
return getattr(self.ftp, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.ftp.quit()
async def ftp_send_raw_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, pool: object) -> bool:
None
return True
async def ftp_send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, csv_data: str, pool: object) -> bool:
"""
Sends elaborated CSV data to a customer via FTP.
Retrieves FTP connection details from the database based on the unit name,
then establishes an FTP connection and uploads the CSV data.
Args:
cfg (dict): Configuration dictionary (not directly used in this function but passed for consistency).
id (int): The ID of the record being processed (used for logging).
unit (str): The name of the unit associated with the data.
tool (str): The name of the tool associated with the data.
csv_data (str): The CSV data as a string to be sent.
pool (object): The database connection pool.
Returns:
bool: True if the CSV data was sent successfully, False otherwise.
"""
query = """
select ftp_addrs, ftp_user, ftp_passwd, ftp_parm, ftp_filename, ftp_target, duedate from units
where name = '%s'";'
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
try:
await cur.execute(query, (unit,))
send_ftp_info = await cur.fetchone()
logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp")
except Exception as e:
logger.error(f"id {id} - {unit} - {tool} - errore nella query per invio ftp: {e}")
try:
# Converti in bytes
csv_bytes = csv_data.encode('utf-8')
csv_buffer = BytesIO(csv_bytes)
ftp_parms = await parse_ftp_parms(send_ftp_info["ftp_parm"])
use_tls = 'ssl_version' in ftp_parms
passive = ftp_parms.get('passive', True)
port = ftp_parms.get('port', 21)
# Connessione FTP
with FTPConnection(host=send_ftp_info["ftp_addrs"], port=port, use_tls=use_tls, user=send_ftp_info["ftp_user"], passwd=send_ftp_info["ftp_passwd"], passive=passive) as ftp:
# Cambia directory
if send_ftp_info["ftp_target"] != "/":
ftp.cwd(send_ftp_info["ftp_target"])
# Invia il file
result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer)
if result.startswith('226'):
logger.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo")
return True
else:
logger.error(f"Errore nell'invio: {result}")
return False
except all_errors as e:
logger.error(f"Errore FTP: {e}")
return False
except Exception as e:
logger.error(f"Errore generico: {e}")
return False
finally:
csv_buffer.close()
async def parse_ftp_parms(ftp_parms: str) -> dict:
"""
Parses a string of FTP parameters into a dictionary.
Args:
ftp_parms (str): A string containing key-value pairs separated by commas,
with keys and values separated by '=>'.
Returns:
dict: A dictionary where keys are parameter names (lowercase) and values are their parsed values.
"""
# Rimuovere spazi e dividere per virgola
pairs = ftp_parms.split(',')
result = {}
for pair in pairs:
if '=>' in pair:
key, value = pair.split('=>', 1)
key = key.strip().lower()
value = value.strip().lower()
# Convertire i valori appropriati
if value.isdigit():
value = int(value)
elif value == '':
value = None
result[key] = value
return result
async def process_workflow_record(record: tuple, fase: int, cfg: dict, pool: object):
"""
Elabora un singolo record del workflow in base alla fase specificata.
Args:
record: Tupla contenente i dati del record
fase: Fase corrente del workflow
cfg: Configurazione
pool: Pool di connessioni al database
"""
# Estrazione e normalizzazione dei dati del record
id, unit_type, tool_type, unit_name, tool_name = [
x.lower().replace(" ", "_") if isinstance(x, str) else x
for x in record
]
try:
# Recupero informazioni principali
tool_elab_info = await get_tool_info(fase, unit_name.upper(), tool_name.upper(), pool)
if tool_elab_info:
timestamp_matlab_elab = await get_elab_timestamp(id, pool)
# Verifica se il processing può essere eseguito
if not _should_process(tool_elab_info, timestamp_matlab_elab):
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['duedate']}: "
"invio dati non eseguito - due date raggiunta.")
await update_status(cfg, id, fase, pool)
return
# Routing basato sulla fase
success = await _route_by_phase(fase, tool_elab_info, cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if success:
await update_status(cfg, id, fase, pool)
else:
await update_status(cfg, id, fase, pool)
except Exception as e:
logger.error(f"Errore durante elaborazione id {id} - {unit_name} - {tool_name}: {e}")
raise
finally:
await unlock(cfg, id, pool)
def _should_process(tool_elab_info: dict, timestamp_matlab_elab: datetime) -> bool:
"""
Determines if a record should be processed based on its due date.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its due date.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
Returns:
bool: True if the record should be processed, False otherwise."""
"""Verifica se il record può essere processato basandosi sulla due date."""
duedate = tool_elab_info.get("duedate")
# Se non c'è duedate o è vuota/nulla, può essere processato
if not duedate or duedate in ('0000-00-00 00:00:00', ''):
return True
# Se timestamp_matlab_elab è None/null, usa il timestamp corrente
comparison_timestamp = timestamp_matlab_elab if timestamp_matlab_elab is not None else datetime.now()
# Converti duedate in datetime se è una stringa
if isinstance(duedate, str):
duedate = datetime.strptime(duedate, '%Y-%m-%d %H:%M:%S')
# Assicurati che comparison_timestamp sia datetime
if isinstance(comparison_timestamp, str):
comparison_timestamp = datetime.strptime(comparison_timestamp, '%Y-%m-%d %H:%M:%S')
return duedate > comparison_timestamp
async def _route_by_phase(fase: int, tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str,
timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Routes the processing of a workflow record based on the current phase.
This function acts as a dispatcher, calling the appropriate handler function
for sending elaborated data or raw data based on the `fase` (phase) parameter.
Args:
fase (int): The current phase of the workflow (e.g., WorkflowFlags.SENT_ELAB_DATA, WorkflowFlags.SENT_RAW_DATA).
tool_elab_info (dict): A dictionary containing information about the tool and its elaboration status.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
if fase == WorkflowFlags.SENT_ELAB_DATA:
return await _handle_elab_data_phase(tool_elab_info, cfg, id, unit_name,
tool_name, timestamp_matlab_elab, pool)
elif fase == WorkflowFlags.SENT_RAW_DATA:
return await _handle_raw_data_phase(tool_elab_info, cfg, id, unit_name,
tool_name, pool)
else:
logger.info(f"id {id} - {unit_name} - {tool_name}: nessuna azione da eseguire.")
return True
async def _handle_elab_data_phase(tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str,
timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Handles the phase of sending elaborated data.
This function checks if elaborated data needs to be sent via FTP or API
based on the `tool_elab_info` and calls the appropriate sending function.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its elaboration status,
including flags for FTP and API sending.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
# FTP send per dati elaborati
if tool_elab_info.get('ftp_send'):
return await _send_elab_data_ftp(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
# API send per dati elaborati
elif _should_send_elab_api(tool_elab_info):
return await _send_elab_data_api(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
return True
async def _handle_raw_data_phase(tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Handles the phase of sending raw data.
This function checks if raw data needs to be sent via FTP or API
based on the `tool_elab_info` and calls the appropriate sending function.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its raw data sending status,
including flags for FTP and API sending.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
# FTP send per dati raw
if tool_elab_info.get('ftp_send_raw'):
return await _send_raw_data_ftp(cfg, id, unit_name, tool_name, pool)
# API send per dati raw
elif _should_send_raw_api(tool_elab_info):
return await _send_raw_data_api(cfg, id, unit_name, tool_name, pool)
return True
def _should_send_elab_api(tool_elab_info: dict) -> bool:
"""Verifica se i dati elaborati devono essere inviati via API."""
return (tool_elab_info.get('inoltro_api') and
tool_elab_info.get('api_send') and
tool_elab_info.get('inoltro_api_url', '').strip())
def _should_send_raw_api(tool_elab_info: dict) -> bool:
"""Verifica se i dati raw devono essere inviati via API."""
return (tool_elab_info.get('inoltro_api_raw') and
tool_elab_info.get('api_send_raw') and
tool_elab_info.get('inoltro_api_url_raw', '').strip())
async def _send_elab_data_ftp(cfg: dict, id: int, unit_name: str, tool_name: str, timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Sends elaborated data via FTP.
This function retrieves the elaborated CSV data and attempts to send it
to the customer via FTP. It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the FTP sending was successful, False otherwise.
"""
try:
elab_csv = await get_data_as_csv(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if not elab_csv:
return False
print(elab_csv)
# if await send_elab_csv_to_customer(cfg, id, unit_name, tool_name, elab_csv, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio FTP fallito.")
return False
except Exception as e:
logger.error(f"Errore invio FTP elab data id {id}: {e}")
return False
async def _send_elab_data_api(cfg: dict, id: int, unit_name: str, tool_name: str, timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Sends elaborated data via API.
This function retrieves the elaborated CSV data and attempts to send it
to the customer via an API. It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the API sending was successful, False otherwise.
"""
try:
elab_csv = await get_data_as_csv(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if not elab_csv:
return False
print(elab_csv)
# if await send_elab_csv_to_customer(cfg, id, unit_name, tool_name, elab_csv, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio API fallito.")
return False
except Exception as e:
logger.error(f"Errore invio API elab data id {id}: {e}")
return False
async def _send_raw_data_ftp(cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Sends raw data via FTP.
This function attempts to send raw CSV data to the customer via FTP.
It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the FTP sending was successful, False otherwise.
"""
try:
# if await ftp_send_raw_csv_to_customer(cfg, id, unit_name, tool_name, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio FTP raw fallito.")
return False
except Exception as e:
logger.error(f"Errore invio FTP raw data id {id}: {e}")
return False
async def _send_raw_data_api(cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Sends raw data via API.
This function attempts to send raw CSV data to the customer via an API.
It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the API sending was successful, False otherwise.
"""
try:
# if await api_send_raw_csv_to_customer(cfg, id, unit_name, tool_name, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio API raw fallito.")
return False
except Exception as e:
logger.error(f"Errore invio API raw data id {id}: {e}")
return False

View File

@@ -0,0 +1,47 @@
import smtplib
import logging
from email.message import EmailMessage
from utils.config import loader_email as setting
cfg = setting.Config()
logger = logging.getLogger(__name__)
async def send_error_email(unit_name: str, tool_name: str, matlab_cmd: str, matlab_error: str, errors: list, warnings: list) -> None:
"""
Sends an error email containing details about a MATLAB processing failure.
The email includes information about the unit, tool, MATLAB command, error message,
and lists of specific errors and warnings encountered.
Args:
unit_name (str): The name of the unit involved in the processing.
tool_name (str): The name of the tool involved in the processing.
matlab_cmd (str): The MATLAB command that was executed.
matlab_error (str): The main MATLAB error message.
errors (list): A list of detailed error messages from MATLAB.
warnings (list): A list of detailed warning messages from MATLAB.
"""
# Creazione dell'oggetto messaggio
msg = EmailMessage()
msg['Subject'] = cfg.subject
msg['From'] = cfg.from_addr
msg['To'] = cfg.to_addr
msg['Cc'] = cfg.cc_addr
msg['Bcc'] = cfg.bcc_addr
MatlabErrors = "<br/>".join(errors)
MatlabWarnings = "<br/>".join(dict.fromkeys(warnings))
# Imposta il contenuto del messaggio come HTML
msg.add_alternative(cfg.body.format(unit=unit_name, tool=tool_name, matlab_cmd=matlab_cmd, matlab_error=matlab_error,
MatlabErrors=MatlabErrors, MatlabWarnings=MatlabWarnings), subtype='html')
try:
# Connessione al server SMTP
with smtplib.SMTP(cfg.smtp_addr, cfg.smtp_port) as server:
server.starttls() # Avvia la crittografia TLS per una connessione sicura
server.login(cfg.smtp_user, cfg.smtp_passwd) # Autenticazione con il server
server.send_message(msg) # Invio dell'email
logger.info("Email inviata con successo!")
except Exception as e:
logger.error(f"Errore durante l'invio dell'email: {e}")

View File

@@ -0,0 +1,159 @@
import os
import mysql.connector
import logging
from hashlib import sha256
from pathlib import Path
from utils.database.connection import connetti_db
logger = logging.getLogger(__name__)
def ftp_SITE_ADDU(self: object, line: str) -> None:
"""
Adds a virtual user, creates their directory, and saves their details to the database.
Args:
line (str): A string containing the username and password separated by a space.
"""
cfg = self.cfg
try:
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
password = parms[1] # Get the password
hash = sha256(password.encode("UTF-8")).hexdigest() # Hash the password
except IndexError:
self.respond('501 SITE ADDU failed. Command needs 2 arguments')
else:
try:
# Create the user's directory
Path(cfg.virtpath + user).mkdir(parents=True, exist_ok=True)
except Exception as e:
self.respond(f'551 Error in create virtual user path: {e}')
else:
try:
# Add the user to the authorizer
self.authorizer.add_user(str(user),
hash, cfg.virtpath + "/" + user, perm=cfg.defperm)
# Save the user to the database
# Define the database connection
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
print(f"Error: {e}")
logger.error(f'{e}')
# Create a cursor
cur = conn.cursor()
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')")
conn.commit()
conn.close()
logger.info(f"User {user} created.")
self.respond('200 SITE ADDU successful.')
except Exception as e:
self.respond(f'501 SITE ADDU failed: {e}.')
print(e)
def ftp_SITE_DISU(self: object, line: str) -> None:
"""
Removes a virtual user from the authorizer and marks them as deleted in the database.
Args:
line (str): A string containing the username to be disabled.
"""
cfg = self.cfg
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
try:
# Remove the user from the authorizer
self.authorizer.remove_user(str(user))
# Delete the user from database
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
print(f"Error: {e}")
logger.error(f'{e}')
# Crea un cursore
cur = conn.cursor()
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = now() WHERE ftpuser = '{user}'")
conn.commit()
conn.close()
logger.info(f"User {user} deleted.")
self.respond('200 SITE DISU successful.')
except Exception as e:
self.respond('501 SITE DISU failed.')
print(e)
def ftp_SITE_ENAU(self: object, line: str) -> None:
"""
Restores a virtual user by updating their status in the database and adding them back to the authorizer.
Args:
line (str): A string containing the username to be enabled.
"""
cfg = self.cfg
parms = line.split()
user = os.path.basename(parms[0]) # Extract the username
try:
# Restore the user into database
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
print(f"Error: {e}")
logger.error(f'{e}')
# Crea un cursore
cur = conn.cursor()
try:
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'")
conn.commit()
except Exception as e:
logger.error(f"Update DB failed: {e}")
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'")
ftpuser, hash, virtpath, perm = cur.fetchone()
self.authorizer.add_user(ftpuser, hash, virtpath, perm)
try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
except Exception as e:
self.responde(f'551 Error in create virtual user path: {e}')
conn.close()
logger.info(f"User {user} restored.")
self.respond('200 SITE ENAU successful.')
except Exception as e:
self.respond('501 SITE ENAU failed.')
print(e)
def ftp_SITE_LSTU(self: object, line: str) -> None:
"""
Lists all virtual users from the database.
Args:
line (str): An empty string (no arguments needed for this command).
"""
cfg = self.cfg
users_list = []
try:
# Connect to the SQLite database to fetch users
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
print(f"Error: {e}")
logger.error(f'{e}')
# Crea un cursore
cur = conn.cursor()
self.push("214-The following virtual users are defined:\r\n")
cur.execute(f'SELECT ftpuser, perm, disabled_at FROM {cfg.dbname}.{cfg.dbusertable}')
[users_list.append(f'Username: {ftpuser}\tPerms: {perm}\tDisabled: {disabled_at}\r\n') for ftpuser, perm, disabled_at in cur.fetchall()]
self.push(''.join(users_list))
self.respond("214 LSTU SITE command successful.")
except Exception as e:
self.respond(f'501 list users failed: {e}')

View File

@@ -0,0 +1,236 @@
#!.venv/bin/python
from utils.database.nodes_query import get_nodes_type
from utils.timestamp.date_check import normalizza_data, normalizza_orario
from utils.database.loader_action import find_nearest_timestamp
import logging
import re
from itertools import islice
from datetime import datetime, timedelta
logger = logging.getLogger(__name__)
async def get_data(cfg: object, id: int, pool: object) -> tuple:
"""
Retrieves unit name, tool name, and tool data for a given record ID from the database.
Args:
cfg (object): Configuration object containing database table name.
id (int): The ID of the record to retrieve.
pool (object): The database connection pool.
Returns:
tuple: A tuple containing unit_name, tool_name, and tool_data.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute(f'select filename, unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}')
filename, unit_name, tool_name, tool_data = await cur.fetchone()
return filename, unit_name, tool_name, tool_data
async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes pipe-separated data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines()
matrice_valori = []
"""
Ciclo su tutte le righe del file CSV, escludendo quelle che:
non hanno il pattern ';|;' perché non sono dati ma è la header
che hanno il pattern 'No RX' perché sono letture non pervenute o in errore
che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola
che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina
"""
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
EventDate, EventTime = timestamp.split(' ')
if batlevel == '|':
batlevel = temperature
temperature, rilevazioni = rilevazioni.split(';',1)
''' in alcune letture mancano temperatura e livello batteria'''
if temperature == '':
temperature = 0
if batlevel == '':
batlevel = 0
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;'
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
valori = valori_nodo.split(';')
matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori
async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes analog and digital input data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
pattern = r'^(?:\d{4}\/\d{2}\/\d{2}|\d{2}\/\d{2}\/\d{4}) \d{2}:\d{2}:\d{2}(?:;\d+\.\d+){2}(?:;\d+){4}$'
if node_ains or node_dins:
for riga in [riga for riga in righe if re.match(pattern, riga)]:
timestamp, batlevel, temperature, analog_input1, analog_input2, digital_input1, digital_input2 = riga.split(';')
EventDate, EventTime = timestamp.split(' ')
if any(node_ains):
for node_num, analog_act in enumerate([analog_input1, analog_input2], start=1):
matrice_valori.append([UnitName, ToolNameID, node_num, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + [analog_act] + ([None] * (19 - 1)))
else:
logger.info(f"Nessun Ingresso analogico per {UnitName} {ToolNameID}")
if any(node_dins):
start_node = 3 if any(node_ains) else 1
for node_num, digital_act in enumerate([digital_input1, digital_input2], start=start_node):
matrice_valori.append([UnitName, ToolNameID, node_num, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + [digital_act] + ([None] * (19 - 1)))
else:
logger.info(f"Nessun Ingresso digitale per {UnitName} {ToolNameID}")
return matrice_valori
async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes channel-based data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
EventDate, EventTime = timestamp.split(' ')
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
valori_iter = iter(valori_splitted)
valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels]
for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori
async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes 'Musa' specific data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines()
matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
if timestamp == '':
continue
EventDate, EventTime = timestamp.split(' ')
temperature = rilevazioni.split(';')[0]
logger.info(f'{temperature}, {rilevazioni}')
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
valori_iter = iter(valori_splitted)
valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels]
for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori
async def make_tlp_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes 'TLP' specific data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines()
valori_x_nodo = 2
matrice_valori = []
for riga in righe:
timestamp, batlevel, temperature, barometer, rilevazioni = riga.split(';',4)
EventDate, EventTime = timestamp.split(' ')
lista_rilevazioni = rilevazioni.strip(';').split(';')
lista_rilevazioni.append(barometer)
valori_nodi = [lista_rilevazioni[i:i + valori_x_nodo] for i in range(0, len(lista_rilevazioni), valori_x_nodo)]
for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori
async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
"""
Processes 'GD' specific data from a CSV record into a structured matrix.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record.
pool (object): The database connection pool.
Returns:
list: A list of lists, where each inner list represents a row in the matrix.
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines()
matrice_valori = []
pattern = r';-?\d+dB$'
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, rilevazioni = riga.split(';|;',1)
EventDate, EventTime = timestamp.split(' ')
#logger.debug(f"GD id {id}: {pattern} {rilevazioni}")
if re.search(pattern, rilevazioni):
if len(matrice_valori) == 0:
matrice_valori.append(['RSSI'])
batlevel, temperature, rssi = rilevazioni.split(';')
#logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {rssi}")
gd_timestamp = datetime.strptime(f"{normalizza_data(EventDate)} {normalizza_orario(EventTime)}", "%Y-%m-%d %H:%M:%S")
start_timestamp = gd_timestamp - timedelta(seconds=45)
end_timestamp = gd_timestamp + timedelta(seconds=45)
matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 1, f"{start_timestamp:%Y-%m-%d %H:%M:%S}", f"{end_timestamp:%Y-%m-%d %H:%M:%S}", f"{gd_timestamp:%Y-%m-%d %H:%M:%S}", batlevel, temperature, int(rssi[:-2])])
elif all(char == ';' for char in rilevazioni):
pass
elif ';|;' in rilevazioni:
unit_metrics, data = rilevazioni.split(';|;')
batlevel, temperature = unit_metrics.split(';')
#logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {data}")
dt_timestamp, dt_batlevel, dt_temperature = await find_nearest_timestamp(cfg, {"timestamp": f"{normalizza_data(EventDate)} {normalizza_orario(EventTime)}", "unit": UnitName, "tool": ToolNameID.replace("GD", "DT"), "node_num": 1}, pool)
EventDate, EventTime = dt_timestamp.strftime('%Y-%m-%d %H:%M:%S').split(' ')
valori = data.split(';')
matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 2, EventDate, EventTime, float(dt_batlevel), float(dt_temperature)] + valori + ([None] * (16 - len(valori))) + [batlevel, temperature, None])
else:
logger.warning(f"GD id {id}: dati non trattati - {rilevazioni}")
return matrice_valori

141
src/utils/csv/loaders.py Normal file
View File

@@ -0,0 +1,141 @@
import asyncio
import tempfile
import os
from utils.database.loader_action import load_data, update_status, unlock
from utils.database import WorkflowFlags
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix, make_tlp_matrix, make_gd_matrix, make_musa_matrix, get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object, action: str) -> None:
"""
Main loader function to process CSV data based on the specified action.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record to process.
pool (object): The database connection pool.
action (str): The type of data processing to perform (e.g., "pipe_separator", "analogic_digital").
"""
type_matrix_mapping = {
"pipe_separator": make_pipe_sep_matrix,
"analogic_digital": make_ain_din_matrix,
"channels": make_channels_matrix,
"tlp": make_tlp_matrix,
"gd": make_gd_matrix,
"musa": make_musa_matrix
}
if action in type_matrix_mapping:
function_to_call = type_matrix_mapping[action]
# Create a matrix of values from the data
matrice_valori = await function_to_call(cfg, id, pool)
logger.info("matrice valori creata")
# Load the data into the database
if await load_data(cfg, matrice_valori, pool, type=action):
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await unlock(cfg, id, pool)
else:
logger.warning(f"Action '{action}' non riconosciuta.")
async def get_next_csv_atomic(pool: object, table_name: str, status: int, next_status: int) -> tuple:
"""
Retrieves the next available CSV record for processing in an atomic manner.
This function acquires a database connection from the pool, begins a transaction,
and attempts to select and lock a single record from the specified table that
matches the given status and has not yet reached the next_status. It uses
`SELECT FOR UPDATE SKIP LOCKED` to ensure atomicity and prevent other workers
from processing the same record concurrently.
Args:
pool (object): The database connection pool.
table_name (str): The name of the table to query.
status (int): The current status flag that the record must have.
next_status (int): The status flag that the record should NOT have yet.
Returns:
tuple: The next available received record if found, otherwise None.
"""
async with pool.acquire() as conn:
# IMPORTANTE: Disabilita autocommit per questa transazione
await conn.begin()
try:
async with conn.cursor() as cur:
# Usa SELECT FOR UPDATE per lock atomico
await cur.execute(f"""
SELECT id, unit_type, tool_type, unit_name, tool_name
FROM {table_name}
WHERE locked = 0
AND ((status & %s) > 0 OR %s = 0)
AND (status & %s) = 0
ORDER BY id
LIMIT 1
FOR UPDATE SKIP LOCKED
""", (status, status, next_status))
result = await cur.fetchone()
if result:
await cur.execute(f"""
UPDATE {table_name}
SET locked = 1
WHERE id = %s
""", (result[0],))
# Commit esplicito per rilasciare il lock
await conn.commit()
return result
except Exception as e:
# Rollback in caso di errore
await conn.rollback()
raise e
async def main_old_script_loader(cfg: object, id: int, pool: object, script_name: str) -> None:
"""
This function retrieves CSV data, writes it to a temporary file,
executes an external Python script to process it,
and then updates the workflow status in the database.
Args:
cfg (object): The configuration object.
id (int): The ID of the CSV record to process.
pool (object): The database connection pool.
script_name (str): The name of the script to execute (without the .py extension).
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', f'old_scripts/{script_name}.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma {script_name}.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info(f"Programma {script_name}.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

26
src/utils/csv/parser.py Normal file
View File

@@ -0,0 +1,26 @@
import re
def extract_value(patterns: list, primary_source: str, secondary_source: str = None, default: str='Not Defined') -> str:
"""
Extracts a value from a given source (or sources) based on a list of regex patterns.
It iterates through the provided patterns and attempts to find a match in the
primary source first, then in the secondary source if provided. The first
successful match is returned. If no match is found after checking all sources
with all patterns, a default value is returned.
Args:
patterns (list): A list of regular expression strings to search for.
primary_source (str): The main string to search within.
secondary_source (str, optional): An additional string to search within if no match is found in the primary source. Defaults to None.
default (str, optional): The value to return if no match is found. Defaults to 'Not Defined'.
Returns:
str: The first matched value, or the default value if no match is found.
"""
for source in [source for source in (primary_source, secondary_source) if source is not None]:
for pattern in patterns:
matches = re.findall(pattern, source, re.IGNORECASE)
if matches:
return matches[0] # Return the first match immediately
return default # Return default if no matches are found

View File

@@ -0,0 +1,36 @@
class WorkflowFlags:
"""
Defines integer flags representing different stages in a data processing workflow.
Each flag is a power of 2, allowing them to be combined using bitwise operations
to represent multiple states simultaneously.
"""
CSV_RECEIVED = 0 # 0000
DATA_LOADED = 1 # 0001
START_ELAB = 2 # 0010
DATA_ELABORATED = 4 # 0100
SENT_RAW_DATA = 8 # 1000
SENT_ELAB_DATA = 16 # 10000
DUMMY_ELABORATED = 32 # 100000 (Used for testing or specific dummy elaborations)
# Mappatura flag -> colonna timestamp
FLAG_TO_TIMESTAMP = {
WorkflowFlags.CSV_RECEIVED: "inserted_at",
WorkflowFlags.DATA_LOADED: "loaded_at",
WorkflowFlags.START_ELAB: "start_elab_at",
WorkflowFlags.DATA_ELABORATED: "elaborated_at",
WorkflowFlags.SENT_RAW_DATA: "sent_raw_at",
WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at",
WorkflowFlags.DUMMY_ELABORATED: "elaborated_at" # Shares the same timestamp column as DATA_ELABORATED
}
"""
A dictionary mapping each WorkflowFlag to the corresponding database column
name that stores the timestamp when that workflow stage was reached.
"""
# Dimensione degli split della matrice per il caricamento
BATCH_SIZE = 1000
"""
The number of records to process in a single batch when loading data into the database.
This helps manage memory usage and improve performance for large datasets.
"""

View File

@@ -0,0 +1,147 @@
import logging
import aiomysql
import csv
from io import StringIO
from utils.database import WorkflowFlags
logger = logging.getLogger(__name__)
sub_select = {
WorkflowFlags.DATA_ELABORATED:
"""m.matcall, s.`desc` AS statustools""",
WorkflowFlags.SENT_RAW_DATA:
"""t.ftp_send, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, s.`desc` AS statustools, IFNULL(u.duedate, "") AS duedate""",
WorkflowFlags.SENT_ELAB_DATA:
"""t.ftp_send_raw, IFNULL(u.ftp_mode_raw, "") AS ftp_mode_raw,
IFNULL(u.ftp_addrs_raw, "") AS ftp_addrs_raw, IFNULL(u.ftp_user_raw, "") AS ftp_user_raw,
IFNULL(u.ftp_passwd_raw, "") AS ftp_passwd_raw, IFNULL(u.ftp_filename_raw, "") AS ftp_filename_raw,
IFNULL(u.ftp_parm_raw, "") AS ftp_parm_raw, IFNULL(u.ftp_target_raw, "") AS ftp_target_raw,
t.unit_id, s.`desc` AS statustools, u.inoltro_ftp_raw, u.inoltro_api_raw,
IFNULL(u.inoltro_api_url_raw, "") AS inoltro_api_url_raw,
IFNULL(u.inoltro_api_bearer_token_raw, "") AS inoltro_api_bearer_token_raw,
t.api_send_raw, IFNULL(u.duedate, "") AS duedate
"""
}
async def get_tool_info(next_status: int, unit: str, tool: str, pool: object) -> tuple:
"""
Retrieves tool-specific information from the database based on the next workflow status,
unit name, and tool name.
This function dynamically selects columns based on the `next_status` provided,
joining `matfuncs`, `tools`, `units`, and `statustools` tables.
Args:
next_status (int): The next workflow status flag (e.g., WorkflowFlags.DATA_ELABORATED).
This determines which set of columns to select from the database.
unit (str): The name of the unit associated with the tool.
tool (str): The name of the tool.
pool (object): The database connection pool.
Returns:
tuple: A dictionary-like object (aiomysql.DictCursor result) containing the tool information,
or None if no information is found for the given unit and tool.
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
try:
await cur.execute(f"""
SELECT {sub_select[next_status]}
FROM matfuncs AS m
INNER JOIN tools AS t ON t.matfunc = m.id
INNER JOIN units AS u ON u.id = t.unit_id
INNER JOIN statustools AS s ON t.statustool_id = s.id
WHERE t.name = '{tool}' AND u.name = '{unit}';
""")
result = await cur.fetchone()
if not result:
logger.warning(f"{unit} - {tool}: Tool info not found.")
return None
else:
return result
except Exception as e:
logger.error(f"Error: {e}")
async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_timestamp: float, pool: object) -> str:
"""
Retrieves elaborated data from the database and formats it as a CSV string.
The query selects data from the `ElabDataView` based on `UnitName`, `ToolNameID`,
and a `updated_at` timestamp, then orders it. The first row of the CSV will be
the column headers.
Args:
cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency).
id_recv (int): The ID of the record being processed (used for logging).
pool (object): The database connection pool.
unit (str): The name of the unit to filter the data.
tool (str): The ID of the tool to filter the data.
matlab_timestamp (float): A timestamp used to filter data updated after this time.
Returns:
str: A string containing the elaborated data in CSV format.
"""
query = """
select * from (
select 'ToolNameID', 'EventDate', 'EventTime', 'NodeNum', 'NodeType', 'NodeDepth',
'XShift', 'YShift', 'ZShift' , 'X', 'Y', 'Z', 'HShift', 'HShiftDir', 'HShift_local',
'speed', 'speed_local', 'acceleration', 'acceleration_local', 'T_node', 'water_level', 'pressure', 'load_value', 'AlfaX', 'AlfaY', 'CalcErr'
union all
select ToolNameID, EventDate, EventTime, NodeNum, NodeType, NodeDepth,
XShift, YShift, ZShift , X, Y, Z, HShift, HShiftDir, HShift_local,
speed, speed_local, acceleration, acceleration_local, T_node, water_level, pressure, load_value, AlfaX, AlfaY, calcerr
from ElabDataView
where UnitName = %s and ToolNameID = %s and updated_at > %s
order by ToolNameID DESC, concat(EventDate, EventTime), convert(`NodeNum`, UNSIGNED INTEGER) DESC
) resulting_set
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(query, (unit, tool, matlab_timestamp))
results = await cur.fetchall()
logger.info(f"id {id_recv} - {unit} - {tool}: estratti i dati per invio CSV")
logger.info(f"Numero di righe estratte: {len(results)}")
# Creare CSV in memoria
output = StringIO()
writer = csv.writer(output, delimiter=",", lineterminator="\n", quoting=csv.QUOTE_MINIMAL)
for row in results:
writer.writerow(row)
csv_data = output.getvalue()
output.close()
return csv_data
except Exception as e:
logger.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}")
return None
async def get_elab_timestamp(id_recv: int, pool: object) -> float:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f"""SELECT start_elab_at from received where id = {id_recv}""")
results = await cur.fetchone()
return results[0]
except Exception as e:
logger.error(f"id {id_recv} - Errore nella query timestamp elaborazione: {e}")
return None
async def check_flag_elab(pool: object) -> None:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute("SELECT stop_elab from admin_panel")
results = await cur.fetchone()
return results[0]
except Exception as e:
logger.error(f"Errore nella query check flag stop elaborazioni: {e}")
return None

View File

@@ -0,0 +1,34 @@
import logging
import mysql.connector
from mysql.connector import Error
logger = logging.getLogger(__name__)
def connetti_db(cfg: object) -> object:
"""
Establishes a connection to a MySQL database.
Args:
cfg: A configuration object containing database connection parameters.
It should have the following attributes:
- dbuser: The database username.
- dbpass: The database password.
- dbhost: The database host address.
- dbport: The database port number.
- dbname: The name of the database to connect to.
Returns:
A MySQL connection object if the connection is successful, otherwise None.
"""
try:
conn = mysql.connector.connect(user=cfg.dbuser,
password=cfg.dbpass,
host=cfg.dbhost,
port=cfg.dbport,
database=cfg.dbname)
conn.autocommit = True
logger.info("Connected")
return conn
except Error as e:
logger.error(f"Database connection error: {e}")
raise # Re-raise the exception to be handled by the caller

View File

@@ -0,0 +1,232 @@
#!.venv/bin/python
import logging
import asyncio
from utils.database import FLAG_TO_TIMESTAMP, BATCH_SIZE
from datetime import datetime, timedelta
logger = logging.getLogger(__name__)
async def load_data(cfg: object, matrice_valori: list, pool: object, type: str) -> bool:
"""Carica una lista di record di dati grezzi nel database.
Esegue un'operazione di inserimento massivo (executemany) per caricare i dati.
Utilizza la clausola 'ON DUPLICATE KEY UPDATE' per aggiornare i record esistenti.
Implementa una logica di re-tentativo in caso di deadlock.
Args:
cfg (object): L'oggetto di configurazione contenente i nomi delle tabelle e i parametri di re-tentativo.
matrice_valori (list): Una lista di tuple, dove ogni tupla rappresenta una riga da inserire.
pool (object): Il pool di connessioni al database.
type (str): tipo di caricamento dati. Per GD fa l'update del tool DT corrispondente
Returns:
bool: True se il caricamento ha avuto successo, False altrimenti.
"""
if not matrice_valori:
logger.info("Nulla da caricare.")
return True
if type == "gd" and matrice_valori[0][0] == "RSSI":
matrice_valori.pop(0)
sql_load_RAWDATA = f"""
UPDATE {cfg.dbrawdata} t1
JOIN (
SELECT id
FROM {cfg.dbrawdata}
WHERE UnitName = %s AND ToolNameID = %s AND NodeNum = %s
AND TIMESTAMP(`EventDate`, `EventTime`) BETWEEN %s AND %s
ORDER BY ABS(TIMESTAMPDIFF(SECOND, TIMESTAMP(`EventDate`, `EventTime`), %s))
LIMIT 1
) t2 ON t1.id = t2.id
SET t1.BatLevelModule = %s, t1.TemperatureModule = %s, t1.RssiModule = %s
"""
else:
sql_load_RAWDATA = f"""
INSERT INTO {cfg.dbrawdata} (
`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,
`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,
`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`,
`BatLevelModule`,`TemperatureModule`, `RssiModule`
)
VALUES (
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s
) as new_data
ON DUPLICATE KEY UPDATE
`BatLevel` = IF({cfg.dbrawdata}.`BatLevel` != new_data.`BatLevel`, new_data.`BatLevel`, {cfg.dbrawdata}.`BatLevel`),
`Temperature` = IF({cfg.dbrawdata}.`Temperature` != new_data.Temperature, new_data.Temperature, {cfg.dbrawdata}.`Temperature`),
`Val0` = IF({cfg.dbrawdata}.`Val0` != new_data.Val0 AND new_data.`Val0` IS NOT NULL, new_data.Val0, {cfg.dbrawdata}.`Val0`),
`Val1` = IF({cfg.dbrawdata}.`Val1` != new_data.Val1 AND new_data.`Val1` IS NOT NULL, new_data.Val1, {cfg.dbrawdata}.`Val1`),
`Val2` = IF({cfg.dbrawdata}.`Val2` != new_data.Val2 AND new_data.`Val2` IS NOT NULL, new_data.Val2, {cfg.dbrawdata}.`Val2`),
`Val3` = IF({cfg.dbrawdata}.`Val3` != new_data.Val3 AND new_data.`Val3` IS NOT NULL, new_data.Val3, {cfg.dbrawdata}.`Val3`),
`Val4` = IF({cfg.dbrawdata}.`Val4` != new_data.Val4 AND new_data.`Val4` IS NOT NULL, new_data.Val4, {cfg.dbrawdata}.`Val4`),
`Val5` = IF({cfg.dbrawdata}.`Val5` != new_data.Val5 AND new_data.`Val5` IS NOT NULL, new_data.Val5, {cfg.dbrawdata}.`Val5`),
`Val6` = IF({cfg.dbrawdata}.`Val6` != new_data.Val6 AND new_data.`Val6` IS NOT NULL, new_data.Val6, {cfg.dbrawdata}.`Val6`),
`Val7` = IF({cfg.dbrawdata}.`Val7` != new_data.Val7 AND new_data.`Val7` IS NOT NULL, new_data.Val7, {cfg.dbrawdata}.`Val7`),
`Val8` = IF({cfg.dbrawdata}.`Val8` != new_data.Val8 AND new_data.`Val8` IS NOT NULL, new_data.Val8, {cfg.dbrawdata}.`Val8`),
`Val9` = IF({cfg.dbrawdata}.`Val9` != new_data.Val9 AND new_data.`Val9` IS NOT NULL, new_data.Val9, {cfg.dbrawdata}.`Val9`),
`ValA` = IF({cfg.dbrawdata}.`ValA` != new_data.ValA AND new_data.`ValA` IS NOT NULL, new_data.ValA, {cfg.dbrawdata}.`ValA`),
`ValB` = IF({cfg.dbrawdata}.`ValB` != new_data.ValB AND new_data.`ValB` IS NOT NULL, new_data.ValB, {cfg.dbrawdata}.`ValB`),
`ValC` = IF({cfg.dbrawdata}.`ValC` != new_data.ValC AND new_data.`ValC` IS NOT NULL, new_data.ValC, {cfg.dbrawdata}.`ValC`),
`ValD` = IF({cfg.dbrawdata}.`ValD` != new_data.ValD AND new_data.`ValD` IS NOT NULL, new_data.ValD, {cfg.dbrawdata}.`ValD`),
`ValE` = IF({cfg.dbrawdata}.`ValE` != new_data.ValE AND new_data.`ValE` IS NOT NULL, new_data.ValE, {cfg.dbrawdata}.`ValE`),
`ValF` = IF({cfg.dbrawdata}.`ValF` != new_data.ValF AND new_data.`ValF` IS NOT NULL, new_data.ValF, {cfg.dbrawdata}.`ValF`),
`BatLevelModule` = IF({cfg.dbrawdata}.`BatLevelModule` != new_data.BatLevelModule, new_data.BatLevelModule, {cfg.dbrawdata}.`BatLevelModule`),
`TemperatureModule` = IF({cfg.dbrawdata}.`TemperatureModule` != new_data.TemperatureModule, new_data.TemperatureModule, {cfg.dbrawdata}.`TemperatureModule`),
`RssiModule` = IF({cfg.dbrawdata}.`RssiModule` != new_data.RssiModule, new_data.RssiModule, {cfg.dbrawdata}.`RssiModule`),
`Created_at` = NOW()
"""
#logger.info(f"Query insert: {sql_load_RAWDATA}.")
#logger.info(f"Matrice valori da inserire: {matrice_valori}.")
rc = False
async with pool.acquire() as conn:
async with conn.cursor() as cur:
for attempt in range(cfg.max_retries):
try:
logger.info(f"Loading data attempt {attempt + 1}.")
for i in range(0, len(matrice_valori), BATCH_SIZE):
batch = matrice_valori[i:i + BATCH_SIZE]
await cur.executemany(sql_load_RAWDATA, batch)
await conn.commit()
logger.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}")
logger.info("Data loaded.")
rc = True
break
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}.")
# logger.error(f"Matrice valori da inserire: {batch}.")
if e.args[0] == 1213: # Deadlock detected
logger.warning(
f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}"
)
if attempt < cfg.max_retries - 1:
delay = 2 * attempt
await asyncio.sleep(delay)
continue
else:
logger.error("Max retry attempts reached for deadlock")
raise
return rc
async def update_status(cfg: object, id: int, status: str, pool: object) -> None:
"""Aggiorna lo stato di un record nella tabella dei record CSV.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da aggiornare.
status (int): Il nuovo stato da impostare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"""update {cfg.dbrectable} set
status = status | {status},
{FLAG_TO_TIMESTAMP[status]} = now()
where id = {id}
"""
)
await conn.commit()
logger.info(f"Status updated id {id}.")
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}")
async def unlock(cfg: object, id: int, pool: object) -> None:
"""Sblocca un record nella tabella dei record CSV.
Imposta il campo 'locked' a 0 per un dato ID.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da sbloccare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"update {cfg.dbrectable} set locked = 0 where id = {id}"
)
await conn.commit()
logger.info(f"id {id} unlocked.")
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}")
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple:
"""Recupera le informazioni per l'esecuzione di un comando Matlab dal database.
Args:
cfg (object): L'oggetto di configurazione.
unit (str): Il nome dell'unità.
tool (str): Il nome dello strumento.
pool (object): Il pool di connessioni al database.
Returns:
tuple: Una tupla contenente le informazioni del comando Matlab, o None in caso di errore.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate
from matfuncs as m
inner join tools as t on t.matfunc = m.id
inner join units as u on u.id = t.unit_id
inner join statustools as s on t.statustool_id = s.id
where t.name = "{tool}" and u.name = "{unit}"''')
return await cur.fetchone()
except Exception as e:
logger.error(f"Error: {e}")
async def find_nearest_timestamp(cfg: object, unit_tool_data: dict, pool: object) -> tuple:
"""
Finds the nearest timestamp in the raw data table based on a reference timestamp
and unit/tool/node information.
Args:
cfg (object): Configuration object containing database table name (`cfg.dbrawdata`).
unit_tool_data (dict): A dictionary containing:
- "timestamp" (str): The reference timestamp string in "%Y-%m-%d %H:%M:%S" format.
- "unit" (str): The UnitName to filter by.
- "tool" (str): The ToolNameID to filter by.
- "node_num" (int): The NodeNum to filter by.
pool (object): The database connection pool.
Returns:
tuple: A tuple containing the event timestamp, BatLevel, and Temperature of the
nearest record, or None if an error occurs or no record is found.
"""
ref_timestamp = datetime.strptime(unit_tool_data["timestamp"], "%Y-%m-%d %H:%M:%S")
start_timestamp = ref_timestamp - timedelta(seconds=45)
end_timestamp = ref_timestamp + timedelta(seconds=45)
logger.info(f"Find nearest timestamp: {ref_timestamp}")
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''SELECT TIMESTAMP(`EventDate`, `EventTime`) AS event_timestamp, BatLevel, Temperature
FROM {cfg.dbrawdata}
WHERE UnitName = "{unit_tool_data["unit"]}" AND ToolNameID = "{unit_tool_data["tool"]}" AND NodeNum = {unit_tool_data["node_num"]}
AND TIMESTAMP(`EventDate`, `EventTime`) BETWEEN "{start_timestamp}" AND "{end_timestamp}"
ORDER BY ABS(TIMESTAMPDIFF(SECOND, TIMESTAMP(`EventDate`, `EventTime`), "{ref_timestamp}"))
LIMIT 1
''')
return await cur.fetchone()
except Exception as e:
logger.error(f"Error: {e}")

View File

@@ -0,0 +1,46 @@
import aiomysql
import logging
logger = logging.getLogger(__name__)
async def get_nodes_type(cfg: object, tool: str, unit: str, pool: object) -> tuple:
"""Recupera le informazioni sui nodi (tipo, canali, input) per un dato strumento e unità.
Args:
cfg (object): L'oggetto di configurazione.
tool (str): Il nome dello strumento.
unit (str): Il nome dell'unità.
pool (object): Il pool di connessioni al database.
Returns:
tuple: Una tupla contenente quattro liste: canali, tipi, ain, din.
Se non vengono trovati risultati, restituisce (None, None, None, None).
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
await cur.execute(f"""
SELECT t.name AS name, n.seq AS seq, n.num AS num, n.channels AS channels, y.type AS type, n.ain AS ain, n.din AS din
FROM {cfg.dbname}.{cfg.dbnodes} AS n
INNER JOIN tools AS t ON t.id = n.tool_id
INNER JOIN units AS u ON u.id = t.unit_id
INNER JOIN nodetypes AS y ON n.nodetype_id = y.id
WHERE y.type NOT IN ('Anchor Link', 'None') AND t.name = '{tool}' AND u.name = '{unit}'
ORDER BY n.num;
""")
results = await cur.fetchall()
logger.info(f"{unit} - {tool}: {cur.rowcount} rows selected to get node type/Ain/Din/channels.")
if not results:
logger.info(f"{unit} - {tool}: Node/Channels/Ain/Din not defined.")
return None, None, None, None
else:
channels, types, ains, dins = [], [], [], []
for row in results:
channels.append(row['channels'])
types.append(row['type'])
ains.append(row['ain'])
dins.append(row['din'])
return channels, types, ains, dins

78
src/utils/general.py Normal file
View File

@@ -0,0 +1,78 @@
import glob
import os
from itertools import cycle, chain
import logging
logger = logging.getLogger()
def alterna_valori(*valori: any, ping_pong: bool = False) -> any:
"""
Genera una sequenza ciclica di valori, con opzione per una sequenza "ping-pong".
Args:
*valori (any): Uno o più valori da ciclare.
ping_pong (bool, optional): Se True, la sequenza sarà valori -> valori al contrario.
Ad esempio, per (1, 2, 3) diventa 1, 2, 3, 2, 1, 2, 3, ...
Se False, la sequenza è semplicemente ciclica.
Defaults to False.
Yields:
any: Il prossimo valore nella sequenza ciclica.
"""
if not valori:
return
if ping_pong:
# Crea la sequenza ping-pong: valori + valori al contrario (senza ripetere primo e ultimo)
forward = valori
backward = valori[-2:0:-1] # Esclude ultimo e primo elemento
ping_pong_sequence = chain(forward, backward)
yield from cycle(ping_pong_sequence)
else:
yield from cycle(valori)
async def read_error_lines_from_logs(base_path: str, pattern: str) -> tuple[list[str], list[str]]:
"""
Reads error and warning lines from log files matching a given pattern within a base path.
This asynchronous function searches for log files, reads their content, and categorizes
lines starting with 'Error' as errors and all other non-empty lines as warnings.
Args:
base_path (str): The base directory where log files are located.
pattern (str): The glob-style pattern to match log filenames (e.g., "*.txt", "prefix_*_output_error.txt").
Returns:
tuple[list[str], list[str]]: A tuple containing two lists:
- The first list contains all extracted error messages.
- The second list contains all extracted warning messages."""
# Costruisce il path completo con il pattern
search_pattern = os.path.join(base_path, pattern)
# Trova tutti i file che corrispondono al pattern
matching_files = glob.glob(search_pattern)
if not matching_files:
logger.warning(f"Nessun file trovato per il pattern: {search_pattern}")
return [], []
errors = []
warnings = []
for file_path in matching_files:
try:
with open(file_path, 'r', encoding='utf-8') as file:
lines = file.readlines()
# Usando dict.fromkeys() per mantenere l'ordine e togliere le righe duplicate per i warnings
non_empty_lines = [line.strip() for line in lines if line.strip()]
errors = [line for line in non_empty_lines if line.startswith('Error')]
warnings = list(dict.fromkeys(line for line in non_empty_lines if not line.startswith('Error')))
except Exception as e:
logger.error(f"Errore durante la lettura del file {file_path}: {e}")
return errors, warnings

View File

@@ -0,0 +1,104 @@
import logging
import asyncio
import os
import aiomysql
import contextvars
from typing import Callable, Coroutine, Any
# Crea una context variable per identificare il worker
worker_context = contextvars.ContextVar("worker_id", default="^-^")
# Formatter personalizzato che include il worker_id
class WorkerFormatter(logging.Formatter):
"""Formatter personalizzato per i log che include l'ID del worker."""
def format(self, record: logging.LogRecord) -> str:
"""Formatta il record di log includendo l'ID del worker.
Args:
record (str): Il record di log da formattare.
Returns:
La stringa formattata del record di log.
"""
record.worker_id = worker_context.get()
return super().format(record)
def setup_logging(log_filename: str, log_level_str: str):
"""Configura il logging globale.
Args:
log_filename (str): Percorso del file di log.
log_level_str (str): Livello di log (es. "INFO", "DEBUG").
"""
logger = logging.getLogger()
handler = logging.FileHandler(log_filename)
formatter = WorkerFormatter(
"%(asctime)s - PID: %(process)d.Worker-%(worker_id)s.%(name)s.%(funcName)s.%(levelname)s: %(message)s"
)
handler.setFormatter(formatter)
# Rimuovi eventuali handler esistenti e aggiungi il nostro
if logger.hasHandlers():
logger.handlers.clear()
logger.addHandler(handler)
log_level = getattr(logging, log_level_str.upper(), logging.INFO)
logger.setLevel(log_level)
logger.info("Logging configurato correttamente")
async def run_orchestrator(
config_class: Any,
worker_coro: Callable[[int, Any, Any], Coroutine[Any, Any, None]],
):
"""Funzione principale che inizializza e avvia un orchestratore.
Args:
config_class: La classe di configurazione da istanziare.
worker_coro: La coroutine del worker da eseguire in parallelo.
"""
logger = logging.getLogger()
logger.info("Avvio del sistema...")
cfg = config_class()
logger.info("Configurazione caricata correttamente")
debug_mode = False
try:
log_level = os.getenv("LOG_LEVEL", "INFO").upper()
setup_logging(cfg.logfilename, log_level)
debug_mode = logger.getEffectiveLevel() == logging.DEBUG
logger.info(f"Avvio di {cfg.max_threads} worker concorrenti")
pool = await aiomysql.create_pool(
host=cfg.dbhost,
user=cfg.dbuser,
password=cfg.dbpass,
db=cfg.dbname,
minsize=cfg.max_threads,
maxsize=cfg.max_threads * 4,
pool_recycle=3600,
)
tasks = [
asyncio.create_task(worker_coro(i, cfg, pool))
for i in range(cfg.max_threads)
]
logger.info("Sistema avviato correttamente. In attesa di nuovi task...")
try:
await asyncio.gather(*tasks, return_exceptions=debug_mode)
finally:
pool.close()
await pool.wait_closed()
except KeyboardInterrupt:
logger.info("Info: Shutdown richiesto... chiusura in corso")
except Exception as e:
logger.error(f"Errore principale: {e}", exc_info=debug_mode)

View File

@@ -0,0 +1 @@
"""Parser delle centraline con le tipologie di unit e tool"""

View File

@@ -0,0 +1 @@
"""Parser delle centraline con nomi di unit e tool"""

View File

@@ -0,0 +1 @@
"""Parser delle centraline"""

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'cr1000x_cr1000x'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'd2w_d2w'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as channels_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g201_g201'.
Questa funzione è un wrapper per `channels_main_loader` e passa il tipo
di elaborazione come "channels".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await channels_main_loader(cfg, id, pool,"channels")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g301_g301'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g801_iptm'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as analog_dig_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g801_loc'.
Questa funzione è un wrapper per `analog_dig_main_loader` e passa il tipo
di elaborazione come "analogic_digital".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await analog_dig_main_loader(cfg, id, pool, "analogic_digital")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g801_mums'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as musa_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g801_musa'.
Questa funzione è un wrapper per `musa_main_loader` e passa il tipo
di elaborazione come "musa".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await musa_main_loader(cfg, id, pool, "musa")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as channels_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g801_mux'.
Questa funzione è un wrapper per `channels_main_loader` e passa il tipo
di elaborazione come "channels".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await channels_main_loader(cfg, id, pool, "channels")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_dsas'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as gd_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_gd'.
Questa funzione è un wrapper per `gd_main_loader` e passa il tipo
di elaborazione come "gd".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await gd_main_loader(cfg, id, pool, "gd")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as analog_dig_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_loc'.
Questa funzione è un wrapper per `analog_dig_main_loader` e passa il tipo
di elaborazione come "analogic_digital".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await analog_dig_main_loader(cfg, id, pool, "analogic_digital")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_modb'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_mums'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as channels_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'g802_mux'.
Questa funzione è un wrapper per `channels_main_loader` e passa il tipo
di elaborazione come "channels".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await channels_main_loader(cfg, id, pool, "channels")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as tlp_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'gs1_gs1'.
Questa funzione è un wrapper per `tlp_main_loader` e passa il tipo
di elaborazione come "tlp".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await tlp_main_loader(cfg, id, pool, "tlp")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as hirpinia_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'hirpinia_hirpinia'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "hirpiniaLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await hirpinia_main_loader(cfg, id, pool, "hirpiniaLoadScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'hortus_hortus'.
Questa funzione è un wrapper per `pipe_sep_main_loader` e passa il tipo
di elaborazione come "pipe_separator".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await pipe_sep_main_loader(cfg, id, pool, "pipe_separator")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as vulink_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'isi_csv_log_vulink'.
Questa funzione è un wrapper per `vulink_main_loader` e passa il nome
dello script di elaborazione come "vulinkScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await vulink_main_loader(cfg, id, pool, "vulinkScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sisgeo_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sisgeo_health'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "sisgeoLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sisgeo_main_loader(cfg, id, pool, "sisgeoLoadScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sisgeo_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sisgeo_readings'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "sisgeoLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sisgeo_main_loader(cfg, id, pool, "sisgeoLoadScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sorotecPini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sorotecpini_co'.
Questa funzione è un wrapper per `sorotecPini_main_loader` e passa il nome
dello script di elaborazione come "sorotecPini".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sorotecPini_main_loader(cfg, id, pool, "sorotecPini")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as ts_pini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'stazionetotale_integrity_monitor'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "TS_PiniScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await ts_pini_main_loader(cfg, id, pool, "TS_PiniScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as ts_pini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'stazionetotale_messpunktepini'.
Questa funzione è un wrapper per `ts_pini_main_loader` e passa il nome
dello script di elaborazione come "TS_PiniScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await ts_pini_main_loader(cfg, id, pool, "TS_PiniScript")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as analog_dig_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'tlp_loc'.
Questa funzione è un wrapper per `analog_dig_main_loader` e passa il tipo
di elaborazione come "analogic_digital".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await analog_dig_main_loader(cfg, id, pool, "analogic_digital")

View File

@@ -0,0 +1,15 @@
from utils.csv.loaders import main_loader as tlp_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'tlp_tlp'.
Questa funzione è un wrapper per `tlp_main_loader` e passa il tipo
di elaborazione come "tlp".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await tlp_main_loader(cfg, id, pool, "tlp")

View File

View File

@@ -0,0 +1,37 @@
from datetime import datetime
def normalizza_data(data_string: str)->str:
"""
Normalizza una stringa di data al formato YYYY-MM-DD, provando diversi formati di input.
Args:
data_string (str): La stringa di data da normalizzare.
Returns:
str: La data normalizzata nel formato YYYY-MM-DD,
o None se la stringa non può essere interpretata come una data.
"""
formato_desiderato = "%Y-%m-%d"
formati_input = ["%Y/%m/%d", "%Y-%m-%d", "%d-%m-%Y","%d/%m/%Y", ] # Ordine importante: prova prima il più probabile
for formato_input in formati_input:
try:
data_oggetto = datetime.strptime(data_string, formato_input)
return data_oggetto.strftime(formato_desiderato)
except ValueError:
continue # Prova il formato successivo se quello attuale fallisce
return None # Se nessun formato ha avuto successo
def normalizza_orario(orario_str):
try:
# Prova prima con HH:MM:SS
dt = datetime.strptime(orario_str, "%H:%M:%S")
return dt.strftime("%H:%M:%S")
except ValueError:
try:
# Se fallisce, prova con HH:MM
dt = datetime.strptime(orario_str, "%H:%M")
return dt.strftime("%H:%M:%S")
except ValueError:
return orario_str # Restituisce originale se non parsabile

View File

@@ -1,546 +0,0 @@
import unittest
import os
import sys
from unittest.mock import patch, MagicMock, mock_open, call, ANY
from hashlib import sha256
from pathlib import Path
from types import SimpleNamespace # Used to create mock config objects
# Add the parent directory to sys.path to allow importing FtpCsvReceiver
# Adjust this path if your test file is located differently
script_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(script_dir)
# If FtpCsvReceiver.py is in the same directory as the test file, you might not need this
# If it's in the parent directory (like /home/alex/devel/ASE/), use this:
sys.path.insert(0, parent_dir)
# Now import the components to test
# We need to import AFTER modifying sys.path if necessary
# Also, mock dependencies BEFORE importing the module that uses them
# Mock mysql.connector BEFORE importing FtpCsvReceiver
mock_mysql_connector = MagicMock()
sys.modules['mysql.connector'] = mock_mysql_connector
# Mock the custom utils modules as well if they aren't available in the test environment
mock_utils_time = MagicMock()
mock_utils_config = MagicMock()
sys.modules['utils.time'] = mock_utils_time
sys.modules['utils.config'] = mock_utils_config
# Mock the setting.config() call specifically
mock_config_instance = MagicMock()
mock_utils_config.set_config.config.return_value = mock_config_instance
# Mock pyftpdlib classes if needed for specific tests, but often mocking methods is enough
# sys.modules['pyftpdlib.handlers'] = MagicMock()
# sys.modules['pyftpdlib.authorizers'] = MagicMock()
# sys.modules['pyftpdlib.servers'] = MagicMock()
# Import the module AFTER mocking dependencies
import FtpCsvReceiver
from FtpCsvReceiver import (
extract_value,
DummySha256Authorizer,
ASEHandler,
conn_db, # Import even though we mock mysql.connector
)
# --- Test Configuration Setup ---
def create_mock_cfg():
"""Creates a mock configuration object for testing."""
cfg = SimpleNamespace()
cfg.adminuser = ['admin', sha256(b'adminpass').hexdigest(), '/fake/admin/path', 'elradfmwMT']
cfg.dbhost = 'mockhost'
cfg.dbport = 3306
cfg.dbuser = 'mockuser'
cfg.dbpass = 'mockpass'
cfg.dbname = 'mockdb'
cfg.dbusertable = 'mock_virtusers'
cfg.dbrectable = 'mock_received'
cfg.virtpath = '/fake/ftp/root/'
cfg.defperm = 'elmw'
cfg.fileext = ['.CSV', '.TXT']
# Add patterns as lists of strings
cfg.units_name = [r'ID\d{4}', r'IX\d{4}']
cfg.units_type = [r'G801', r'G201']
cfg.tools_name = [r'LOC\d{4}', r'DT\d{4}']
cfg.tools_type = [r'MUX', r'MUMS']
# Add other necessary config values
cfg.logfilename = 'test_ftp.log'
cfg.proxyaddr = '0.0.0.0'
cfg.firstport = 40000
cfg.portrangewidth = 10
return cfg
# --- Test Cases ---
class TestExtractValue(unittest.TestCase):
def test_extract_from_primary(self):
patterns = [r'ID(\d+)']
primary = "File_ID1234_data.csv"
secondary = "Some other text"
self.assertEqual(extract_value(patterns, primary, secondary), "ID1234")
def test_extract_from_secondary(self):
patterns = [r'Type(A|B)']
primary = "Filename_without_type.txt"
secondary = "Log data: TypeB found"
self.assertEqual(extract_value(patterns, primary, secondary), "TypeB")
def test_no_match(self):
patterns = [r'XYZ\d+']
primary = "File_ID1234_data.csv"
secondary = "Log data: TypeB found"
self.assertEqual(extract_value(patterns, primary, secondary, default="NotFound"), "NotFound")
def test_case_insensitive(self):
patterns = [r'id(\d+)']
primary = "File_ID1234_data.csv"
secondary = "Some other text"
self.assertEqual(extract_value(patterns, primary, secondary), "ID1234") # Note: re.findall captures original case
def test_multiple_patterns(self):
patterns = [r'Type(A|B)', r'ID(\d+)']
primary = "File_ID1234_data.csv"
secondary = "Log data: TypeB found"
# Should match the first pattern found in the primary source
self.assertEqual(extract_value(patterns, primary, secondary), "ID1234")
def test_multiple_patterns_secondary_match(self):
patterns = [r'XYZ\d+', r'Type(A|B)']
primary = "File_ID1234_data.csv"
secondary = "Log data: TypeB found"
# Should match the second pattern in the secondary source
self.assertEqual(extract_value(patterns, primary, secondary), "TypeB")
class TestDummySha256Authorizer(unittest.TestCase):
def setUp(self):
self.mock_cfg = create_mock_cfg()
# Mock the database connection and cursor
self.mock_conn = MagicMock()
self.mock_cursor = MagicMock()
mock_mysql_connector.connect.return_value = self.mock_conn
self.mock_conn.cursor.return_value = self.mock_cursor
@patch('FtpCsvReceiver.Path') # Mock Path object
def test_init_loads_users(self, mock_path_constructor):
# Mock Path instance methods
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
# Simulate database result
db_users = [
('user1', sha256(b'pass1').hexdigest(), '/fake/ftp/root/user1', 'elr'),
('user2', sha256(b'pass2').hexdigest(), '/fake/ftp/root/user2', 'elmw'),
]
self.mock_cursor.fetchall.return_value = db_users
authorizer = DummySha256Authorizer(self.mock_cfg)
# Verify DB connection
mock_mysql_connector.connect.assert_called_once_with(
user=self.mock_cfg.dbuser, password=self.mock_cfg.dbpass,
host=self.mock_cfg.dbhost, port=self.mock_cfg.dbport
)
# Verify query
self.mock_cursor.execute.assert_called_once_with(
f'SELECT ftpuser, hash, virtpath, perm FROM {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} WHERE deleted_at IS NULL'
)
# Verify admin user added
self.assertIn('admin', authorizer.user_table)
self.assertEqual(authorizer.user_table['admin']['pwd'], self.mock_cfg.adminuser[1])
# Verify DB users added
self.assertIn('user1', authorizer.user_table)
self.assertEqual(authorizer.user_table['user1']['pwd'], db_users[0][1])
self.assertEqual(authorizer.user_table['user1']['home'], db_users[0][2])
self.assertEqual(authorizer.user_table['user1']['perm'], db_users[0][3])
self.assertIn('user2', authorizer.user_table)
# Verify directories were "created"
expected_path_calls = [
call(self.mock_cfg.virtpath + 'user1'),
call(self.mock_cfg.virtpath + 'user2'),
]
mock_path_constructor.assert_has_calls(expected_path_calls, any_order=True)
self.assertEqual(mock_path_instance.mkdir.call_count, 2)
mock_path_instance.mkdir.assert_called_with(parents=True, exist_ok=True)
@patch('FtpCsvReceiver.Path')
def test_init_mkdir_exception(self, mock_path_constructor):
# Simulate database result
db_users = [('user1', sha256(b'pass1').hexdigest(), '/fake/ftp/root/user1', 'elr')]
self.mock_cursor.fetchall.return_value = db_users
# Mock Path to raise an exception
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
mock_path_instance.mkdir.side_effect = OSError("Permission denied")
# We expect initialization to continue, but maybe log an error (though the code uses self.responde which isn't available here)
# For a unit test, we just check that the user is still added
authorizer = DummySha256Authorizer(self.mock_cfg)
self.assertIn('user1', authorizer.user_table)
mock_path_instance.mkdir.assert_called_once()
def test_validate_authentication_success(self):
self.mock_cursor.fetchall.return_value = [] # No DB users for simplicity
authorizer = DummySha256Authorizer(self.mock_cfg)
# Test admin user
authorizer.validate_authentication('admin', 'adminpass', None) # Handler not used in this method
def test_validate_authentication_wrong_password(self):
self.mock_cursor.fetchall.return_value = []
authorizer = DummySha256Authorizer(self.mock_cfg)
with self.assertRaises(FtpCsvReceiver.AuthenticationFailed):
authorizer.validate_authentication('admin', 'wrongpass', None)
def test_validate_authentication_unknown_user(self):
self.mock_cursor.fetchall.return_value = []
authorizer = DummySha256Authorizer(self.mock_cfg)
with self.assertRaises(FtpCsvReceiver.AuthenticationFailed):
authorizer.validate_authentication('unknown', 'somepass', None)
class TestASEHandler(unittest.TestCase):
def setUp(self):
self.mock_cfg = create_mock_cfg()
self.mock_conn = MagicMock() # Mock FTP connection object
self.mock_server = MagicMock() # Mock FTP server object
self.mock_authorizer = MagicMock(spec=DummySha256Authorizer) # Mock authorizer
# Instantiate the handler
# We need to manually set cfg and authorizer as done in main()
self.handler = ASEHandler(self.mock_conn, self.mock_server)
self.handler.cfg = self.mock_cfg
self.handler.authorizer = self.mock_authorizer
self.handler.respond = MagicMock() # Mock the respond method
self.handler.push = MagicMock() # Mock the push method
# Mock database for handler methods
self.mock_db_conn = MagicMock()
self.mock_db_cursor = MagicMock()
# Patch conn_db globally for this test class
self.patcher_conn_db = patch('FtpCsvReceiver.conn_db', return_value=self.mock_db_conn)
self.mock_conn_db = self.patcher_conn_db.start()
self.mock_db_conn.cursor.return_value = self.mock_db_cursor
# Mock logging
self.patcher_logging = patch('FtpCsvReceiver.logging')
self.mock_logging = self.patcher_logging.start()
def tearDown(self):
# Stop the patchers
self.patcher_conn_db.stop()
self.patcher_logging.stop()
# Reset mocks if needed between tests (though setUp does this)
mock_mysql_connector.reset_mock()
@patch('FtpCsvReceiver.os.path.split', return_value=('/fake/ftp/root/user1', 'ID1234_data.CSV'))
@patch('FtpCsvReceiver.os.path.splitext', return_value=('ID1234_data', '.CSV'))
@patch('FtpCsvReceiver.os.stat')
@patch('FtpCsvReceiver.open', new_callable=mock_open, read_data='G801,col2,col3\nval1,val2,val3')
@patch('FtpCsvReceiver.os.remove')
@patch('FtpCsvReceiver.extract_value') # Mock extract_value for focused testing
def test_on_file_received_success(self, mock_extract, mock_os_remove, mock_file_open, mock_os_stat, mock_splitext, mock_split):
mock_os_stat.return_value.st_size = 100 # Non-empty file
test_file_path = '/fake/ftp/root/user1/ID1234_data.CSV'
# Setup mock return values for extract_value
mock_extract.side_effect = ['ID1234', 'G801', 'LOC5678', 'MUX']
self.handler.on_file_received(test_file_path)
# Verify file stats checked
mock_os_stat.assert_called_once_with(test_file_path)
# Verify file opened
mock_file_open.assert_called_once_with(test_file_path, 'r')
# Verify path splitting
mock_split.assert_called_once_with(test_file_path)
mock_splitext.assert_called_once_with('ID1234_data.CSV')
# Verify extract_value calls
expected_extract_calls = [
call(self.mock_cfg.units_name, 'ID1234_data', ANY), # ANY for the lines string
call(self.mock_cfg.units_type, 'ID1234_data', ANY),
call(self.mock_cfg.tools_name, 'ID1234_data', ANY),
call(self.mock_cfg.tools_type, 'ID1234_data', ANY),
]
mock_extract.assert_has_calls(expected_extract_calls)
# Verify DB connection
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
# Verify DB insert
expected_sql = f"INSERT INTO {self.mock_cfg.dbname}.{self.mock_cfg.dbrectable } (filename, unit_name, unit_type, tool_name, tool_type, tool_data) VALUES (%s, %s, %s, %s, %s, %s)"
expected_data = ('ID1234_data', 'ID1234', 'G801', 'LOC5678', 'MUX', 'G801,col2,col3\nval1,val2,val3')
self.mock_db_cursor.execute.assert_called_once_with(expected_sql, expected_data)
self.mock_db_conn.commit.assert_called_once()
self.mock_db_conn.close.assert_called_once()
# Verify file removed
mock_os_remove.assert_called_once_with(test_file_path)
# Verify logging
self.mock_logging.info.assert_called_with(f'File {test_file_path} loaded: removed.')
@patch('FtpCsvReceiver.os.path.split', return_value=('/fake/ftp/root/user1', 'data.WRONGEXT'))
@patch('FtpCsvReceiver.os.path.splitext', return_value=('data', '.WRONGEXT'))
@patch('FtpCsvReceiver.os.stat')
@patch('FtpCsvReceiver.os.remove')
def test_on_file_received_wrong_extension(self, mock_os_remove, mock_os_stat, mock_splitext, mock_split):
mock_os_stat.return_value.st_size = 100
test_file_path = '/fake/ftp/root/user1/data.WRONGEXT'
self.handler.on_file_received(test_file_path)
# Verify only stat, split, and splitext were called
mock_os_stat.assert_called_once_with(test_file_path)
mock_split.assert_called_once_with(test_file_path)
mock_splitext.assert_called_once_with('data.WRONGEXT')
# Verify DB, open, remove were NOT called
self.mock_conn_db.assert_not_called()
mock_os_remove.assert_not_called()
self.mock_logging.info.assert_not_called() # No logging in this path
@patch('FtpCsvReceiver.os.stat')
@patch('FtpCsvReceiver.os.remove')
def test_on_file_received_empty_file(self, mock_os_remove, mock_os_stat):
mock_os_stat.return_value.st_size = 0 # Empty file
test_file_path = '/fake/ftp/root/user1/empty.CSV'
self.handler.on_file_received(test_file_path)
# Verify stat called
mock_os_stat.assert_called_once_with(test_file_path)
# Verify file removed
mock_os_remove.assert_called_once_with(test_file_path)
# Verify logging
self.mock_logging.info.assert_called_with(f'File {test_file_path} was empty: removed.')
# Verify DB not called
self.mock_conn_db.assert_not_called()
@patch('FtpCsvReceiver.os.path.split', return_value=('/fake/ftp/root/user1', 'ID1234_data.CSV'))
@patch('FtpCsvReceiver.os.path.splitext', return_value=('ID1234_data', '.CSV'))
@patch('FtpCsvReceiver.os.stat')
@patch('FtpCsvReceiver.open', new_callable=mock_open, read_data='G801,col2,col3\nval1,val2,val3')
@patch('FtpCsvReceiver.os.remove')
@patch('FtpCsvReceiver.extract_value', side_effect=['ID1234', 'G801', 'LOC5678', 'MUX'])
def test_on_file_received_db_error(self, mock_extract, mock_os_remove, mock_file_open, mock_os_stat, mock_splitext, mock_split):
mock_os_stat.return_value.st_size = 100
test_file_path = '/fake/ftp/root/user1/ID1234_data.CSV'
db_error = Exception("DB connection failed")
self.mock_db_cursor.execute.side_effect = db_error # Simulate DB error
self.handler.on_file_received(test_file_path)
# Verify DB interaction attempted
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
self.mock_db_cursor.execute.assert_called_once()
# Verify commit/close not called after error
self.mock_db_conn.commit.assert_not_called()
self.mock_db_conn.close.assert_not_called() # Should close be called in finally? Original code doesn't.
# Verify file was NOT removed
mock_os_remove.assert_not_called()
# Verify error logging
self.mock_logging.error.assert_any_call(f'File {test_file_path} not loaded. Held in user path.')
self.mock_logging.error.assert_any_call(f'{db_error}')
@patch('FtpCsvReceiver.os.remove')
def test_on_incomplete_file_received(self, mock_os_remove):
test_file_path = '/fake/ftp/root/user1/incomplete.part'
self.handler.on_incomplete_file_received(test_file_path)
mock_os_remove.assert_called_once_with(test_file_path)
@patch('FtpCsvReceiver.Path')
@patch('FtpCsvReceiver.os.path.basename', return_value='newuser')
def test_ftp_SITE_ADDU_success(self, mock_basename, mock_path_constructor):
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
password = 'newpassword'
expected_hash = sha256(password.encode("UTF-8")).hexdigest()
expected_home = self.mock_cfg.virtpath + 'newuser'
self.handler.ftp_SITE_ADDU(f'newuser {password}')
# Verify path creation
mock_path_constructor.assert_called_once_with(expected_home)
mock_path_instance.mkdir.assert_called_once_with(parents=True, exist_ok=True)
# Verify authorizer call
self.handler.authorizer.add_user.assert_called_once_with(
'newuser', expected_hash, expected_home + '/', perm=self.mock_cfg.defperm # Note: Original code adds trailing slash here
)
# Verify DB interaction
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
expected_sql = f"INSERT INTO {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('newuser', '{expected_hash}', '{expected_home}', '{self.mock_cfg.defperm}')"
self.mock_db_cursor.execute.assert_called_once_with(expected_sql)
self.mock_db_conn.commit.assert_called_once()
self.mock_db_conn.close.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with('200 SITE ADDU successful.')
# Verify logging
self.mock_logging.info.assert_called_with('User newuser created.')
def test_ftp_SITE_ADDU_missing_args(self):
self.handler.ftp_SITE_ADDU('newuser') # Missing password
self.handler.respond.assert_called_once_with('501 SITE ADDU failed. Command needs 2 arguments')
self.handler.authorizer.add_user.assert_not_called()
self.mock_conn_db.assert_not_called()
@patch('FtpCsvReceiver.Path')
@patch('FtpCsvReceiver.os.path.basename', return_value='newuser')
def test_ftp_SITE_ADDU_mkdir_error(self, mock_basename, mock_path_constructor):
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
error = OSError("Cannot create dir")
mock_path_instance.mkdir.side_effect = error
self.handler.ftp_SITE_ADDU('newuser newpassword')
self.handler.respond.assert_called_once_with(f'551 Error in create virtual user path: {error}')
self.handler.authorizer.add_user.assert_not_called()
self.mock_conn_db.assert_not_called()
@patch('FtpCsvReceiver.Path')
@patch('FtpCsvReceiver.os.path.basename', return_value='newuser')
def test_ftp_SITE_ADDU_db_error(self, mock_basename, mock_path_constructor):
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
error = Exception("DB insert failed")
self.mock_db_cursor.execute.side_effect = error
self.handler.ftp_SITE_ADDU('newuser newpassword')
# Verify mkdir called
mock_path_instance.mkdir.assert_called_once()
# Verify authorizer called (happens before DB)
self.handler.authorizer.add_user.assert_called_once()
# Verify DB interaction attempted
self.mock_conn_db.assert_called_once()
self.mock_db_cursor.execute.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with(f'501 SITE ADDU failed: {error}.')
@patch('FtpCsvReceiver.os.path.basename', return_value='olduser')
def test_ftp_SITE_DELU_success(self, mock_basename):
self.handler.ftp_SITE_DELU('olduser')
# Verify authorizer call
self.handler.authorizer.remove_user.assert_called_once_with('olduser')
# Verify DB interaction
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
expected_sql = f"UPDATE {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} SET deleted_at = now() WHERE ftpuser = 'olduser'"
self.mock_db_cursor.execute.assert_called_once_with(expected_sql)
self.mock_db_conn.commit.assert_called_once()
self.mock_db_conn.close.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with('200 SITE DELU successful.')
# Verify logging
self.mock_logging.info.assert_called_with('User olduser deleted.')
@patch('FtpCsvReceiver.os.path.basename', return_value='olduser')
def test_ftp_SITE_DELU_error(self, mock_basename):
error = Exception("DB update failed")
self.mock_db_cursor.execute.side_effect = error
self.handler.ftp_SITE_DELU('olduser')
# Verify authorizer call (happens first)
self.handler.authorizer.remove_user.assert_called_once_with('olduser')
# Verify DB interaction attempted
self.mock_conn_db.assert_called_once()
self.mock_db_cursor.execute.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with('501 SITE DELU failed.')
@patch('FtpCsvReceiver.Path')
@patch('FtpCsvReceiver.os.path.basename', return_value='restoreme')
def test_ftp_SITE_RESU_success(self, mock_basename, mock_path_constructor):
mock_path_instance = MagicMock()
mock_path_constructor.return_value = mock_path_instance
user_data = ('restoreme', 'somehash', '/fake/ftp/root/restoreme', 'elmw')
self.mock_db_cursor.fetchone.return_value = user_data
self.handler.ftp_SITE_RESU('restoreme')
# Verify DB interaction
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
expected_update_sql = f"UPDATE {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} SET deleted_at = null WHERE ftpuser = 'restoreme'"
expected_select_sql = f"SELECT ftpuser, hash, virtpath, perm FROM {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} WHERE ftpuser = 'restoreme'"
expected_db_calls = [
call(expected_update_sql),
call(expected_select_sql)
]
self.mock_db_cursor.execute.assert_has_calls(expected_db_calls)
self.mock_db_conn.commit.assert_called_once() # For the update
self.mock_db_cursor.fetchone.assert_called_once()
# Verify authorizer call
self.handler.authorizer.add_user.assert_called_once_with(*user_data)
# Verify path creation
mock_path_constructor.assert_called_once_with(self.mock_cfg.virtpath + 'restoreme')
mock_path_instance.mkdir.assert_called_once_with(parents=True, exist_ok=True)
# Verify DB close
self.mock_db_conn.close.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with('200 SITE RESU successful.')
# Verify logging
self.mock_logging.info.assert_called_with('User restoreme restored.')
@patch('FtpCsvReceiver.os.path.basename', return_value='restoreme')
def test_ftp_SITE_RESU_db_error(self, mock_basename):
error = Exception("DB fetch failed")
# Simulate error on the SELECT statement
self.mock_db_cursor.execute.side_effect = [None, error] # First call (UPDATE) ok, second (SELECT) fails
self.handler.ftp_SITE_RESU('restoreme')
# Verify DB interaction attempted
self.mock_conn_db.assert_called_once()
self.assertEqual(self.mock_db_cursor.execute.call_count, 2) # Both UPDATE and SELECT attempted
self.mock_db_conn.commit.assert_called_once() # Commit for UPDATE happened
# Verify response
self.handler.respond.assert_called_once_with('501 SITE RESU failed.')
# Verify authorizer not called, mkdir not called
self.handler.authorizer.add_user.assert_not_called()
def test_ftp_SITE_LSTU_success(self):
user_list_data = [
('userA', 'elr'),
('userB', 'elmw'),
]
self.mock_db_cursor.fetchall.return_value = user_list_data
self.handler.ftp_SITE_LSTU('') # No argument needed
# Verify DB interaction
self.mock_conn_db.assert_called_once_with(self.mock_cfg)
expected_sql = f'SELECT ftpuser, perm FROM {self.mock_cfg.dbname}.{self.mock_cfg.dbusertable} WHERE deleted_at IS NULL '
self.mock_db_cursor.execute.assert_called_once_with(expected_sql)
self.mock_db_cursor.fetchall.assert_called_once()
# Verify push calls
expected_push_calls = [
call("214-The following virtual users are defined:\r\n"),
call('Username: userA\tPerms: elr\r\nUsername: userB\tPerms: elmw\r\n')
]
self.handler.push.assert_has_calls(expected_push_calls)
# Verify final response
self.handler.respond.assert_called_once_with("214 LSTU SITE command successful.")
def test_ftp_SITE_LSTU_db_error(self):
error = Exception("DB select failed")
self.mock_db_cursor.execute.side_effect = error
self.handler.ftp_SITE_LSTU('')
# Verify DB interaction attempted
self.mock_conn_db.assert_called_once()
self.mock_db_cursor.execute.assert_called_once()
# Verify response
self.handler.respond.assert_called_once_with(f'501 list users failed: {error}')
# Verify push not called
self.handler.push.assert_not_called()
if __name__ == '__main__':
unittest.main(argv=['first-arg-is-ignored'], exit=False)

View File

@@ -1,165 +0,0 @@
#!/usr/bin/env python3
import sys
import os
import re
from datetime import datetime
import json
import mysql.connector as mysql
import logging
from utils.time import timestamp_fmt as ts
from utils.config import set_config as setting
def conn_db(cfg):
return mysql.connect(user=cfg.dbuser, password=cfg.dbpass, host=cfg.dbhost, port=cfg.dbport )
def extract_value(patterns, source, default='Not Defined'):
ip = {}
for pattern in patterns:
s_pattern = rf'{pattern}:\s*(\d{{1,3}}(?:\.\d{{1,3}}){{3}})'
matches = re.search(s_pattern, source, re.IGNORECASE)
if matches:
ip.update({pattern: matches.group(1)})
else:
ip.update({pattern: default})
return ip
def write_db(records, cfg):
insert_values = [
(
record["unit_name"], record["unit_type"], record["tool_name"], record["tool_type"],
record["unit_ip"], record["unit_subnet"], record["unit_gateway"], record["event_timestamp"],
record["battery_level"], record["temperature"], record["nodes_jsonb"]
)
for record in records
]
query = f"""
INSERT IGNORE INTO {cfg.dbname}.{cfg.dbdataraw} (
unit_name, unit_type, tool_name, tool_type, unit_ip, unit_subnet, unit_gateway,
event_timestamp, battery_level, temperature, nodes_jsonb
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
try:
with conn_db(cfg) as conn:
conn.autocommit = True
with conn.cursor() as cur:
try:
cur.executemany(query, insert_values)
cur.close()
conn.commit()
except Exception as e:
logging.error(f'Records not inserted: {e}')
logging.info('Exit')
exit()
except Exception as e:
logging.error(f'Records not inserted: {e}')
exit()
def elab_csv(cfg):
try:
with conn_db(cfg) as conn:
cur = conn.cursor()
cur.execute(f'select id, unit_name, unit_type, tool_name, tool_type, tool_data from {cfg.dbname}.{cfg.dbrectable} where locked = 0 and status = 0 limit 1')
id, unit_name, unit_type, tool_name, tool_type, tool_data = cur.fetchone()
cur.execute(f'update {cfg.dbname}.{cfg.dbrectable} set locked = 1 where id = {id}')
data_list = str(tool_data).strip("('{\"").strip("\"}\',)").split('","')
# Estrarre le informazioni degli ip dalla header
infos = extract_value(cfg.csv_infos, str(data_list[:9]))
except Exception as e:
logging.error(f'{e}')
records = []
# Definizione dei pattern
timestamp_pattern1 = r'(\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2});'
timestamp_pattern2 = r'(\d{2}/\d{2}/\d{4} \d{2}:\d{2}:\d{2});'
# Formato desiderato per il timestamp
output_format = "%Y-%m-%d %H:%M:%S"
for line in list(set(data_list)):
if (match := re.search(timestamp_pattern1, line)):
timestamp = datetime.strptime(match.group(1), "%Y/%m/%d %H:%M:%S").strftime(output_format)
elif (match := re.search(timestamp_pattern2, line)):
timestamp = datetime.strptime(match.group(1), "%d/%m/%Y %H:%M:%S").strftime(output_format)
else:
continue
line_without_timestamp = (line[match.end():]).strip('|;')
match_values = re.findall(r'[-+]?\d*\.\d+|\d+', line_without_timestamp)
battery_level, temperature = match_values[0], match_values[1]
remainder = ";".join(line_without_timestamp.split(";")[2:]).strip('|;')
# Rimuovi spazi bianchi o caratteri di nuova riga
nodes = remainder.strip().replace('\\n', '').split(";|;")
# Estrai i valori di ciascun nodo e formatta i dati come JSON
node_list = []
for i, node_data in enumerate(nodes, start=1):
node_dict = {"num": i}
# Dividi ogni nodo in valori separati da ";"
node_values = node_data.split(';')
for j, value in enumerate(node_values, start=0):
# Imposta i valori a -9999 se trovi "Dis."
node_dict['val' + str(j)] = -9999 if (value == "Dis." or value == "Err1" or value == "Err2" or value == "---" or value == "NotAv" or value == "No RX" or value == "DMUXe" or value == "CH n. Error" or value == "-") else float(value)
node_list.append(node_dict)
# Prepara i dati per l'inserimento/aggiornamento
record = {
"unit_name": unit_name.upper(),
"unit_type": unit_type.upper(),
"tool_name": tool_name.upper(),
"tool_type": tool_type.upper(),
"unit_ip": infos['IP'],
"unit_subnet": infos['Subnet'],
"unit_gateway": infos['Gateway'],
"event_timestamp": timestamp,
"battery_level": float(battery_level),
"temperature": float(temperature),
"nodes_jsonb": json.dumps(node_list) # Converti la lista di dizionari in una stringa JSON
}
records.append(record)
# Se abbiamo raggiunto 500 record, esegui l'inserimento in batch
if len(records) >= 500:
logging.info("Raggiunti 500 record scrivo sul DB")
write_db(records, cfg)
records = []
write_db(records, cfg)
def main():
# Load the configuration settings
cfg = setting.config()
try:
# Configura la connessione al database PostgreSQL
# Configure logging
logging.basicConfig(
format="%(asctime)s %(message)s",
filename=cfg.elablog,
level=logging.INFO,
)
elab_csv(cfg)
except KeyboardInterrupt:
logging.info(
"Info: {}.".format("Shutdown requested...exiting")
)
except Exception:
print(
"{} - PID {:>5} >> Error: {}.".format(
ts.timestamp("log"), os.getpid(), sys.exc_info()[1]
)
)
if __name__ == "__main__":
main()

View File

@@ -1 +0,0 @@
"""Config ini setting"""

View File

@@ -1,24 +0,0 @@
from datetime import datetime
def conforma_data(data_string):
"""
Conforma una stringa di data al formato YYYY-MM-DD, provando diversi formati di input.
Args:
data_string (str): La stringa di data da conformare.
Returns:
str: La data conformata nel formato YYYY-MM-DD,
o None se la stringa non può essere interpretata come una data.
"""
formato_desiderato = "%Y-%m-%d"
formati_input = ["%Y/%m/%d", "%Y-%m-%d", "%d-%m-%Y","%d/%m/%Y", ] # Ordine importante: prova prima il più probabile
for formato_input in formati_input:
try:
data_oggetto = datetime.strptime(data_string, formato_input)
return data_oggetto.strftime(formato_desiderato)
except ValueError:
continue # Prova il formato successivo se quello attuale fallisce
return None # Se nessun formato ha avuto successo

View File

@@ -1 +0,0 @@
locals

View File

@@ -1 +0,0 @@
"""Utilità per i formati timestamp"""

View File

@@ -1,28 +0,0 @@
"""Funzioni per formato data
"""
from datetime import datetime
from re import search
def dateFmt(date):
t = date.replace("/", "-")
if search('^\d\d\d\d-\d\d-\d\d$', t):
d = datetime.strptime(t, "%Y-%m-%d")
elif search('^\d\d-\d\d-\d\d$', t):
d = datetime.strptime(t, "%y-%m-%d")
elif search('^\d\d-\d\d-\d\d\d\d$', t):
d = datetime.strptime(t, "%d-%m-%Y")
return datetime.strftime(d, "%Y-%m-%d")
def dateTimeFmt(date):
t = date.replace("/", "-")
if search('^\d\d\d\d-\d\d-\d\d$', t):
d = datetime.strptime(t, "%Y-%m-%d %H:%M:%S")
elif search('^\d\d-\d\d-\d\d$', t):
d = datetime.strptime(t, "%y-%m-%d %H:%M:%S")
elif search('^\d\d-\d\d-\d\d\d\d$', t):
d = datetime.strptime(t, "%d-%m-%Y %H:%M:%S")
return datetime.strftime(d, "%Y-%m-%d")

Some files were not shown because too many files have changed in this diff Show More