Compare commits

..

34 Commits

Author SHA1 Message Date
35527c89cd fix ftp 2025-09-15 22:32:12 +02:00
8cd5a21275 fix flag elab 2025-09-15 22:06:01 +02:00
2d2668c92c setting vscode 2025-09-12 20:54:21 +02:00
adfe2e7809 fix cread user dir 2025-09-12 20:52:11 +02:00
1a99b55dbb add flag stop elab 2025-09-11 21:28:42 +02:00
54cb20b6af pylint 2 2025-09-03 21:22:35 +02:00
39dba8f54a fix pylint 2025-09-03 21:05:19 +02:00
9b3f1171f3 gitignore 2025-09-03 20:48:54 +02:00
f7e2efa03e resync toml 2025-09-03 20:39:55 +02:00
4e548c883c versionato toml 2025-09-03 20:36:07 +02:00
1ce6c7fd09 fix alias + add username sender 2025-08-27 22:43:36 +02:00
730869ef1f mod alterna valori ping pong 2025-08-23 16:58:52 +02:00
d1582b8f9e add multi file logs filter errors 2025-08-22 21:15:10 +02:00
f33ae140fc ini e email 2025-08-21 19:03:23 +02:00
d3f7e9090a std ini file e load config 2025-08-21 16:21:47 +02:00
05816ee95d add doc in load ftp user 2025-08-20 21:55:59 +02:00
55383e51b8 docs db __init__ 2025-08-19 22:08:57 +02:00
fb0383b6b6 fix 2025-08-19 14:19:09 +02:00
ea5cdac7c0 rename old_script -> old_scripts 2025-08-19 14:15:15 +02:00
c6d486d0bd refactory old script 2025-08-19 12:36:27 +02:00
b79f07b407 add funcs docs 2025-08-19 12:01:15 +02:00
2b976d06b3 util ftp renamed connect 2025-08-11 22:59:38 +02:00
dbe2e7f5a7 fix send ftp e api 2025-08-10 16:47:04 +02:00
cfb185e029 fix status val 2025-08-09 20:14:20 +02:00
3a3b63e360 reorg elab_query 2025-08-09 19:09:40 +02:00
5fc40093e2 add alias for tools and units types and names 2025-08-03 21:46:15 +02:00
fdefd0a430 pini 2025-08-02 19:22:48 +02:00
6ff97316dc add src path 2025-07-31 23:10:23 +02:00
acaad8a99f fix GD 2025-07-28 23:03:56 +02:00
d6f1998d78 GD RSSI + normalizza orario 2025-07-27 23:20:18 +02:00
dc20713cad gestione GD 2025-07-27 19:25:42 +02:00
cee070d237 fix logging to use the new 2025-07-27 17:56:57 +02:00
287d2de81e fix caricamenti 2025-07-27 00:32:12 +02:00
a8df0f9584 fix 2025-07-21 22:07:46 +02:00
92 changed files with 3279 additions and 992 deletions

16
.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
*.pyc
*.toml
.python-version
uv.lock
*.log*
.vscode/settings.json
README.md
prova*.*
.codegpt
build/
LoadCSVData.pl
matlab_elab.py
doc_carri.txt
ase.egg-info/
site/
site.zip

2
.pylintrc Normal file
View File

@@ -0,0 +1,2 @@
# Oppure se vuoi essere più permissivo
max-line-length=140

4
.vscode/setting.json vendored Normal file
View File

@@ -0,0 +1,4 @@
{
"flake8.args": ["--max-line-length=140"],
"python.linting.flake8Args": ["--config","flake8.cfg"]
}

View File

@@ -2,18 +2,23 @@ DROP TABLE ase_lar.received;
CREATE TABLE `received` ( CREATE TABLE `received` (
`id` int NOT NULL AUTO_INCREMENT, `id` int NOT NULL AUTO_INCREMENT,
`filename` varchar(100) COLLATE utf8mb4_general_ci NOT NULL, `username` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`unit_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `filename` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`unit_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `unit_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `unit_type` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `tool_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_type` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, `tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_info` json DEFAULT NULL,
`locked` int DEFAULT '0', `locked` int DEFAULT '0',
`status` int DEFAULT '0', `status` int DEFAULT '0',
`matlab_timestamp` timestamp NULL DEFAULT NULL,
`inserted_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `inserted_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`loaded_at` datetime DEFAULT NULL, `loaded_at` timestamp NULL DEFAULT NULL,
`start_elab_at` timestamp NULL DEFAULT NULL,
`elaborated_at` timestamp NULL DEFAULT NULL, `elaborated_at` timestamp NULL DEFAULT NULL,
`sent_at` timestamp NULL DEFAULT NULL, `sent_raw_at` timestamp NULL DEFAULT NULL,
`sent_elab_at` timestamp NULL DEFAULT NULL,
`last_update_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=694 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; ) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;

91
docs/gen_ref_pages.py Normal file
View File

@@ -0,0 +1,91 @@
"""Genera le pagine di riferimento per l'API."""
from pathlib import Path
import mkdocs_gen_files
nav = mkdocs_gen_files.Nav()
# File e directory da escludere
EXCLUDE_PATTERNS = {
".env",
".env.*",
"__pycache__",
".git",
".pytest_cache",
".venv",
"venv",
"node_modules",
"docs", # Escludi tutta la directory docs
"build",
"dist",
"*.egg-info",
".mypy_cache",
".coverage",
"htmlcov"
}
def should_exclude(path: Path) -> bool:
"""Verifica se un percorso deve essere escluso."""
# Escludi file .env
if path.name.startswith('.env'):
return True
# Escludi lo script stesso
if path.name == "gen_ref_pages.py":
return True
# Escludi tutta la directory docs
if "old_script" in path.parts:
return True
# Escludi tutta la directory docs
if "docs" in path.parts:
return True
# Escludi pattern comuni
for pattern in EXCLUDE_PATTERNS:
if pattern in str(path):
return True
return False
# Cerca i file Python nella directory corrente
for path in sorted(Path(".").rglob("*.py")):
# Salta i file esclusi
if should_exclude(path):
continue
# Salta i file che iniziano con un punto
if any(part.startswith('.') for part in path.parts):
continue
# Salta i file che iniziano con prova
if any(part.startswith('prova') for part in path.parts):
continue
if any(part.startswith('matlab_elab') for part in path.parts):
continue
module_path = path.with_suffix("")
doc_path = path.with_suffix(".md")
full_doc_path = Path("reference", doc_path)
parts = tuple(module_path.parts)
if parts[-1] == "__init__":
parts = parts[:-1]
doc_path = doc_path.with_name("index.md")
full_doc_path = full_doc_path.with_name("index.md")
elif parts[-1] == "__main__":
continue
nav[parts] = doc_path.as_posix()
with mkdocs_gen_files.open(full_doc_path, "w") as fd:
ident = ".".join(parts)
fd.write(f"::: {ident}")
mkdocs_gen_files.set_edit_path(full_doc_path, path)
with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())

36
docs/index.md Normal file
View File

@@ -0,0 +1,36 @@
# Benvenuto nella documentazione
Questa è la documentazione automatica dell'applicazione Python ASE per la gestione delle file CSV ricevuti via FTP.
## Funzionalità
- Ricezione di file csv via FTP e salvataggio in database.
- Caricamnento dei dati in database con moduli dedicati per:
- tipologia di centralina e sensore
- nome di centralina e sensore
- Esecuzione elaborazione MatLab.
- Gestione utenti FTP
- Caricamento massivo utenti FTP da database
## Setup
- personalizzazione dei file env:
- env/db.ini
- env/ftp.ini
- env/load.ini
- env/elab.ini
- esecuzione del server FTP -> "python ftp_csv_receiver.py"
- esecuzione dell'orchestratore del caricamenti dei file csv -> "python load_orchestrator.py"
- esecuzione dell'orchestratore delle elaborazioni MatLab -> "python elab_orchestrator.py"
E' possibile creare servizi systemd per gestire l'esecuzione automatica delle funzionalità.
Viene usato il virtualenv quindi python deve essere eseguito con i dovuti setting
## Installazione
Installare il pacchetto ase-x.x.x-py3-none-any.whl
- pip install ase-x.x.x-py3-none-any.whl

View File

@@ -1,105 +0,0 @@
#!.venv/bin/python
# Import necessary libraries
import logging
import asyncio
from datetime import datetime
# Import custom modules for configuration and database connection
from utils.config import loader_matlab_elab as setting
from utils.database import DATA_LOADED, DATA_ELABORATED, DATA_SENT
from utils.database.matlab_query import get_matlab_command
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
from utils.database.loader_action import update_status, unlock
from utils.database.elab_query import get_data_as_csv
#from utils.ftp.elab_send import send_csv_to_customer
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
ELAB_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 60
async def worker(worker_id: int, cfg: object, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'elaborazione dei dati caricati.
Il worker preleva un record dal database che indica dati pronti per
l'elaborazione, esegue un comando Matlab associato e attende
prima di iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (object): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato")
while True:
try:
logger.info("Inizio elaborazione")
record = await get_next_csv_atomic(pool, cfg.dbrectable, DATA_LOADED)
if record:
id, unit_type, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record]
tool_elab_info = await get_matlab_command(cfg, tool_name, unit_name, pool)
if tool_elab_info:
if tool_elab_info['statustools'].lower() in cfg.elab_status:
logger.info(f"Elaborazione id {id} per {unit_name} {tool_name} ")
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name} {tool_name}"
timestamp_matlab_elab = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
proc = await asyncio.create_subprocess_shell(
matlab_cmd,
cwd=cfg.matlab_func_path,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
logger.error("Errore durante l'elaborazione")
logger.error(stderr.decode().strip())
with open(f"{cfg.matlab_error_path}{unit_name}{tool_name}_output_error.txt", "w") as f:
f.write(stderr.decode().strip())
else:
logger.info(stdout.decode().strip())
await update_status(cfg, id, DATA_ELABORATED, pool)
if not tool_elab_info["duedate"] or tool_elab_info["duedate"] in ('0000-00-00 00:00:00', '') or tool_elab_info["duedate"] > timestamp_matlab_elab:
if tool_elab_info['ftp_send']:
if elab_csv := await get_data_as_csv(cfg, id, unit_name, tool_name, timestamp_matlab_elab, pool):
print(elab_csv)
#if await send_csv_to_customer(cfg, id, unit_name, tool_name, elab_csv, pool):
#await update_status(cfg, id, DATA_SENT, pool)
await update_status(cfg, id, DATA_SENT, pool)
else:
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['duedate']}: ftp put didn't executed because due date reached.")
await unlock(cfg, id, pool)
await asyncio.sleep(ELAB_PROCESSING_DELAY)
else:
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['statustools']}: MatLab calc by-passed.")
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e:
logger.error(f"Errore durante l'esecuzione: {e}", exc_info=debug_mode)
await asyncio.sleep(1)
async def main():
"""Funzione principale che avvia l'elab_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

6
env/config.ini vendored Normal file
View File

@@ -0,0 +1,6 @@
[mysql]
host = 10.211.114.173
database = ase_lar
user = root
password = batt1l0

12
env/elab.ini vendored
View File

@@ -1,20 +1,20 @@
[logging] [logging]
logFilename = ./elab_data.log logFilename = ../logs/elab_data.log
[threads] [threads]
max_num = 10 max_num = 10
[tool] [tool]
# stati in minuscolo # stati in minuscolo
elab_status = active|manual update elab_status = active|manual upload
[matlab] [matlab]
#runtime = /usr/local/MATLAB/MATLAB_Runtime/v93 #runtime = /usr/local/MATLAB/MATLAB_Runtime/v93
#func_path = /usr/local/matlab_func/ #func_path = /usr/local/matlab_func/
runtime = /home/alex/matlab_sym/ runtime = /home/alex/matlab_sym/
func_path = /home/alex/matlab_sym/ func_path = /home/alex/matlab_sym/
timeout = 1800 timeout = 1800
error = "" error = ""
error_path = /tmp/ error_path = /tmp/

59
env/email.ini vendored Normal file
View File

@@ -0,0 +1,59 @@
[smtp]
address = smtp.aseltd.eu
port = 587
user = alert@aseltd.eu
password = Ase#2013!20@bat
[address]
from = ASE Alert System<alert@aseltd.eu>
to1 = andrea.carri@aseltd.eu,alessandro.battilani@gmail.com,alessandro.valletta@aseltd.eu,alberto.sillani@aseltd.eu,majd.saidani@aseltd.eu
to = alessandro.battilani@aseltd.eu
cc = alessandro.battilani@gmail.com
bcc =
[msg]
subject = ASE Alert System
body = <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Alert from ASE</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body style="margin: 0; padding: 0;">
<table bgcolor="#ffffff" border="0" cellpadding="0" cellspacing="0" width="100%%">
<tr>
<td align="center">
<img src="https://www2.aseltd.eu/static/img/logo_ASE_small.png" alt="ASE" style="display: block;" />
</td>
</tr>
<tr>
<td align="center">
<h1 style="margin: 5px;">Alert from ASE:</h1>
</td>
</tr>
<tr>
<td align="center">
<h3 style="margin: 5px;">Matlab function {matlab_cmd} failed on unit => {unit} - tool => {tool}</h3>
</td>
</tr>
<tr>
<td align="center">
<h4 style="margin: 5px;">{matlab_error}</h4>
</td>
</tr>
<tr>
<td style="padding: 20px; padding-bottom: 0px; color: red">
{MatlabErrors}
</td>
</tr>
<tr>
<td style="padding: 20px;">
{MatlabWarnings}
</td>
</tr>
</table>
</body>
</html>

12
env/ftp.ini vendored
View File

@@ -2,6 +2,7 @@
# python3 -c 'from hashlib import sha256;print(sha256("????password???".encode("UTF-8")).hexdigest())' # python3 -c 'from hashlib import sha256;print(sha256("????password???".encode("UTF-8")).hexdigest())'
[ftpserver] [ftpserver]
service_port = 2121
firstPort = 40000 firstPort = 40000
proxyAddr = 0.0.0.0 proxyAddr = 0.0.0.0
portRangeWidth = 500 portRangeWidth = 500
@@ -17,15 +18,20 @@
path = /home/alex/aseftp/csvfs/ path = /home/alex/aseftp/csvfs/
[logging] [logging]
logFilename = ./ftp_csv_rec.log logFilename = ../logs/ftp_csv_rec.log
[unit] [unit]
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9] Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
[tool] [tool]
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9] Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = CO_:CO|HEALTH-:HEALTH|READINGS-:READINGS|MESSPUNKTEPINI_:MESSPUNKTEPINI
[csv] [csv]
Infos = IP|Subnet|Gateway Infos = IP|Subnet|Gateway
[ts_pini]:
path_match = [276_208_TS0003]:TS0003|[Neuchatel_CDP]:TS7|[TS0006_EP28]:=|[TS0007_ChesaArcoiris]:=|[TS0006_EP28_3]:=|[TS0006_EP28_4]:TS0006_EP28_4|[TS0006_EP28_5]:TS0006_EP28_5|[TS18800]:=|[Granges_19 100]:=|[Granges_19 200]:=|[Chesa_Arcoiris_2]:=|[TS0006_EP28_1]:=|[TS_PS_Petites_Croisettes]:=|[_Chesa_Arcoiris_1]:=|[TS_test]:=|[TS-VIME]:=

6
env/load.ini vendored
View File

@@ -1,5 +1,5 @@
[logging] [logging]:
logFilename = ./load_raw_data.log logFilename = ../logs/load_raw_data.log
[threads] [threads]:
max_num = 5 max_num = 5

5
env/send.ini vendored Normal file
View File

@@ -0,0 +1,5 @@
[logging]
logFilename = ../logs/send_data.log
[threads]
max_num = 30

66
mkdocs.yml Normal file
View File

@@ -0,0 +1,66 @@
site_name: Ase receiver
site_description: Documentazione automatica della app Python ASE
theme:
name: material
features:
- navigation.tabs
- navigation.sections
- toc.integrate
- navigation.top
- search.suggest
- search.highlight
- content.tabs.link
- content.code.annotation
- content.code.copy
plugins:
- offline
- search
- mkdocstrings:
handlers:
python:
paths: ["."]
options:
docstring_style: google
show_source: true
show_root_heading: true
show_root_toc_entry: true
show_symbol_type_heading: true
show_symbol_type_toc: true
filters:
- "!^docs" # Escludi tutto ciò che inizia con "docs"
- gen-files:
scripts:
- docs/gen_ref_pages.py
- literate-nav:
nav_file: SUMMARY.md
nav:
- Home: index.md
- API Reference: reference/
markdown_extensions:
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.inlinehilite
- pymdownx.snippets
- pymdownx.superfences
- pymdownx.tabbed:
alternate_style: true
- admonition
- pymdownx.details
- attr_list
- md_in_html
# Escludi file dalla build
exclude_docs: |
.env*
__pycache__/
.git/
.pytest_cache/
.venv/
venv/
test/
.vscode/

31
pyproject.toml Normal file
View File

@@ -0,0 +1,31 @@
[project]
name = "ase"
version = "0.9.0"
description = "ASE backend"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"aiomysql>=0.2.0",
"cryptography>=45.0.3",
"mysql-connector-python>=9.3.0",
"pyftpdlib>=2.0.1",
"pyproj>=3.7.1",
"utm>=0.8.1",
]
[dependency-groups]
dev = [
"mkdocs>=1.6.1",
"mkdocs-gen-files>=0.5.0",
"mkdocs-literate-nav>=0.6.2",
"mkdocs-material>=9.6.15",
"mkdocstrings[python]>=0.29.1",
"ruff>=0.12.11",
]
[tool.setuptools]
package-dir = {"" = "src"}
[tool.setuptools.packages.find]
exclude = ["test","build"]
where = ["src"]

121
src/elab_orchestrator.py Executable file
View File

@@ -0,0 +1,121 @@
#!.venv/bin/python
"""
Orchestratore dei worker che lanciano le elaborazioni
"""
# Import necessary libraries
import logging
import asyncio
# Import custom modules for configuration and database connection
from utils.config import loader_matlab_elab as setting
from utils.database import WorkflowFlags
from utils.database.action_query import get_tool_info, check_flag_elab
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
from utils.database.loader_action import update_status, unlock
from utils.connect.send_email import send_error_email
from utils.general import read_error_lines_from_logs
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
ELAB_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 60
async def worker(worker_id: int, cfg: object, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'elaborazione dei dati caricati.
Il worker preleva un record dal database che indica dati pronti per
l'elaborazione, esegue un comando Matlab associato e attende
prima di iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (object): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato")
while True:
try:
logger.info("Inizio elaborazione")
if not await check_flag_elab(pool):
record = await get_next_csv_atomic(pool, cfg.dbrectable, WorkflowFlags.DATA_LOADED, WorkflowFlags.DATA_ELABORATED)
if record:
rec_id, _, tool_type, unit_name, tool_name = [x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record]
if tool_type.lower() != "gd": # i tool GD non devono essere elaborati ???
tool_elab_info = await get_tool_info(WorkflowFlags.DATA_ELABORATED, unit_name.upper(), tool_name.upper(), pool)
if tool_elab_info:
if tool_elab_info['statustools'].lower() in cfg.elab_status:
logger.info("Elaborazione ID %s per %s %s", rec_id, unit_name, tool_name)
await update_status(cfg, rec_id, WorkflowFlags.START_ELAB, pool)
matlab_cmd = f"timeout {cfg.matlab_timeout} ./run_{tool_elab_info['matcall']}.sh {cfg.matlab_runtime} {unit_name.upper()} {tool_name.upper()}"
proc = await asyncio.create_subprocess_shell(
matlab_cmd,
cwd=cfg.matlab_func_path,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
logger.error("Errore durante l'elaborazione")
logger.error(stderr.decode().strip())
if proc.returncode == 124:
error_type = f"Matlab elab excessive duration: killed after {cfg.matlab_timeout} seconds."
else:
error_type = f"Matlab elab failed: {proc.returncode}."
# da verificare i log dove prenderli
# with open(f"{cfg.matlab_error_path}{unit_name}{tool_name}_output_error.txt", "w") as f:
# f.write(stderr.decode().strip())
# errors = [line for line in stderr.decode().strip() if line.startswith("Error")]
# warnings = [line for line in stderr.decode().strip() if not line.startswith("Error")]
errors, warnings = await read_error_lines_from_logs(cfg.matlab_error_path, f"_{unit_name}_{tool_name}*_*_output_error.txt")
await send_error_email(unit_name.upper(), tool_name.upper(), tool_elab_info['matcall'], error_type, errors, warnings)
else:
logger.info(stdout.decode().strip())
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
await asyncio.sleep(ELAB_PROCESSING_DELAY)
else:
logger.info("ID %s %s - %s %s: MatLab calc by-passed.", rec_id, unit_name, tool_name, tool_elab_info['statustools'])
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, rec_id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
else:
await update_status(cfg, rec_id, WorkflowFlags.DATA_ELABORATED, pool)
await update_status(cfg, rec_id, WorkflowFlags.DUMMY_ELABORATED, pool)
await unlock(cfg, rec_id, pool)
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
else:
logger.info("Flag fermo elaborazione attivato")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore durante l'esecuzione: %s", e, exc_info=debug_mode)
await asyncio.sleep(1)
async def main():
"""Funzione principale che avvia l'elab_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,27 +1,32 @@
#!.venv/bin/python #!.venv/bin/python
"""This module implements an FTP server with custom commands for managing virtual users and handling CSV file uploads.""" """
This module implements an FTP server with custom commands for
managing virtual users and handling CSV file uploads.
"""
import os import os
import logging import logging
from hashlib import sha256 from hashlib import sha256
from pathlib import Path from pathlib import Path
from utils.config import loader_ftp_csv as setting
from utils.database.connection import connetti_db
from utils.ftp import user_admin, file_management
from pyftpdlib.handlers import FTPHandler from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer from pyftpdlib.servers import FTPServer
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
from utils.config import loader_ftp_csv as setting
from utils.database.connection import connetti_db
from utils.connect import user_admin, file_management
# Configure logging (moved inside main function) # Configure logging (moved inside main function)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class DummySha256Authorizer(DummyAuthorizer): class DummySha256Authorizer(DummyAuthorizer):
"""Custom authorizer that uses SHA256 for password hashing and manages users from a database.""" """Custom authorizer that uses SHA256 for password hashing and manages users from a database."""
def __init__(self: object, cfg: object) -> None: def __init__(self: object, cfg: dict) -> None:
"""Initializes the authorizer, adds the admin user, and loads users from the database. """Initializes the authorizer, adds the admin user, and loads users from the database.
Args: Args:
@@ -29,39 +34,45 @@ class DummySha256Authorizer(DummyAuthorizer):
""" """
super().__init__() super().__init__()
self.add_user( self.add_user(
cfg.adminuser[0], cfg.adminuser[1], cfg.adminuser[2], perm=cfg.adminuser[3]) cfg.adminuser[0], cfg.adminuser[1], cfg.adminuser[2], perm=cfg.adminuser[3]
)
# Define the database connection # Define the database connection
conn = connetti_db(cfg) conn = connetti_db(cfg)
# Create a cursor # Create a cursor
cur = conn.cursor() cur = conn.cursor()
cur.execute(f'SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL') cur.execute(
f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL"
)
for ftpuser, hash, virtpath, perm in cur.fetchall(): for ftpuser, user_hash, virtpath, perm in cur.fetchall():
self.add_user(ftpuser, hash, virtpath, perm) # Create the user's directory if it does not exist.
"""
Create the user's directory if it does not exist.
"""
try: try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True) Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
except Exception as e: self.add_user(ftpuser, user_hash, virtpath, perm)
self.responde(f'551 Error in create virtual user path: {e}') except Exception as e: # pylint: disable=broad-except
self.responde(f"551 Error in create virtual user path: {e}")
def validate_authentication(self: object, username: str, password: str, handler: object) -> None: def validate_authentication(
# Validate the user's password against the stored hash self: object, username: str, password: str, handler: object
hash = sha256(password.encode("UTF-8")).hexdigest() ) -> None:
# Validate the user's password against the stored user_hash
user_hash = sha256(password.encode("UTF-8")).hexdigest()
try: try:
if self.user_table[username]["pwd"] != hash: if self.user_table[username]["pwd"] != user_hash:
raise KeyError raise KeyError
except KeyError: except KeyError:
raise AuthenticationFailed raise AuthenticationFailed
class ASEHandler(FTPHandler): class ASEHandler(FTPHandler):
"""Custom FTP handler that extends FTPHandler with custom commands and file handling.""" """Custom FTP handler that extends FTPHandler with custom commands and file handling."""
def __init__(self: object, conn: object, server: object, ioloop:object=None) -> None: def __init__(
self: object, conn: object, server: object, ioloop: object = None
) -> None:
"""Initializes the handler, adds custom commands, and sets up command permissions. """Initializes the handler, adds custom commands, and sets up command permissions.
Args: Args:
@@ -73,20 +84,44 @@ class ASEHandler(FTPHandler):
self.proto_cmds = FTPHandler.proto_cmds.copy() self.proto_cmds = FTPHandler.proto_cmds.copy()
# Add custom FTP commands for managing virtual users - command in lowercase # Add custom FTP commands for managing virtual users - command in lowercase
self.proto_cmds.update( self.proto_cmds.update(
{'SITE ADDU': dict(perm='M', auth=True, arg=True, {
help='Syntax: SITE <SP> ADDU USERNAME PASSWORD (add virtual user).')} "SITE ADDU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> ADDU USERNAME PASSWORD (add virtual user).",
)
}
) )
self.proto_cmds.update( self.proto_cmds.update(
{'SITE DISU': dict(perm='M', auth=True, arg=True, {
help='Syntax: SITE <SP> DISU USERNAME (disable virtual user).')} "SITE DISU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> DISU USERNAME (disable virtual user).",
)
}
) )
self.proto_cmds.update( self.proto_cmds.update(
{'SITE ENAU': dict(perm='M', auth=True, arg=True, {
help='Syntax: SITE <SP> ENAU USERNAME (enable virtual user).')} "SITE ENAU": dict(
perm="M",
auth=True,
arg=True,
help="Syntax: SITE <SP> ENAU USERNAME (enable virtual user).",
)
}
) )
self.proto_cmds.update( self.proto_cmds.update(
{'SITE LSTU': dict(perm='M', auth=True, arg=None, {
help='Syntax: SITE <SP> LSTU (list virtual users).')} "SITE LSTU": dict(
perm="M",
auth=True,
arg=None,
help="Syntax: SITE <SP> LSTU (list virtual users).",
)
}
) )
def on_file_received(self: object, file: str) -> None: def on_file_received(self: object, file: str) -> None:
@@ -111,6 +146,7 @@ class ASEHandler(FTPHandler):
def ftp_SITE_LSTU(self: object, line: str) -> None: def ftp_SITE_LSTU(self: object, line: str) -> None:
return user_admin.ftp_SITE_LSTU(self, line) return user_admin.ftp_SITE_LSTU(self, line)
def main(): def main():
"""Main function to start the FTP server.""" """Main function to start the FTP server."""
# Load the configuration settings # Load the configuration settings
@@ -136,13 +172,12 @@ def main():
) )
# Create and start the FTP server # Create and start the FTP server
server = FTPServer(("0.0.0.0", 2121), handler) server = FTPServer(("0.0.0.0", cfg.service_port), handler)
server.serve_forever() server.serve_forever()
except Exception as e: except Exception as e:
logger.error( logger.error("Exit with error: %s.", e)
f"Exit with error: {e}."
)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,15 +1,16 @@
#!/usr/bin/env python3 #!.venv/bin/python
""" """
Script per prelevare dati da MySQL e inviare comandi SITE FTP Script per prelevare dati da MySQL e inviare comandi SITE FTP
""" """
import mysql.connector
from utils.database.connection import connetti_db
from utils.config import users_loader as setting
from ftplib import FTP from ftplib import FTP
import logging import logging
import sys import sys
from typing import List, Tuple from typing import List, Tuple
import mysql.connector
from utils.database.connection import connetti_db
from utils.config import users_loader as setting
# Configurazione logging # Configurazione logging
logging.basicConfig( logging.basicConfig(
@@ -27,19 +28,30 @@ FTP_CONFIG = {
} }
def connect_ftp() -> FTP: def connect_ftp() -> FTP:
"""Connessione al server FTP""" """
Establishes a connection to the FTP server using the predefined configuration.
Returns:
FTP: An active FTP connection object.
"""
try: try:
ftp = FTP() ftp = FTP()
ftp.connect(FTP_CONFIG['host'], FTP_CONFIG['port']) ftp.connect(FTP_CONFIG['host'], FTP_CONFIG['port'])
ftp.login(FTP_CONFIG['user'], FTP_CONFIG['password']) ftp.login(FTP_CONFIG['user'], FTP_CONFIG['password'])
logger.info("Connessione FTP stabilita") logger.info("Connessione FTP stabilita")
return ftp return ftp
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore connessione FTP: {e}") logger.error("Errore connessione FTP: %s", e)
sys.exit(1) sys.exit(1)
def fetch_data_from_db(connection: mysql.connector.MySQLConnection) -> List[Tuple]: def fetch_data_from_db(connection: mysql.connector.MySQLConnection) -> List[Tuple]:
"""Preleva i dati dal database""" """
Fetches username and password data from the 'ftp_accounts' table in the database.
Args:
connection (mysql.connector.MySQLConnection): The database connection object.
Returns:
List[Tuple]: A list of tuples, where each tuple contains (username, password).
"""
try: try:
cursor = connection.cursor() cursor = connection.cursor()
@@ -52,28 +64,38 @@ def fetch_data_from_db(connection: mysql.connector.MySQLConnection) -> List[Tupl
cursor.execute(query) cursor.execute(query)
results = cursor.fetchall() results = cursor.fetchall()
logger.info(f"Prelevate {len(results)} righe dal database") logger.info("Prelevate %s righe dal database", len(results))
return results return results
except mysql.connector.Error as e: except mysql.connector.Error as e:
logger.error(f"Errore query database: {e}") logger.error("Errore query database: %s", e)
return [] return []
finally: finally:
cursor.close() cursor.close()
def send_site_command(ftp: FTP, command: str) -> bool: def send_site_command(ftp: FTP, command: str) -> bool:
"""Invia un comando SITE al server FTP""" """
Sends a SITE command to the FTP server.
Args:
ftp (FTP): The FTP connection object.
command (str): The SITE command string to send (e.g., "ADDU username password").
Returns:
bool: True if the command was sent successfully, False otherwise.
"""
try: try:
# Il comando SITE viene inviato usando sendcmd # Il comando SITE viene inviato usando sendcmd
response = ftp.sendcmd(f"SITE {command}") response = ftp.sendcmd(f"SITE {command}")
logger.info(f"Comando SITE '{command}' inviato. Risposta: {response}") logger.info("Comando SITE %s inviato. Risposta: %s", command, response)
return True return True
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore invio comando SITE '{command}': {e}") logger.error("Errore invio comando SITE %s: %s", command, e)
return False return False
def main(): def main():
"""Funzione principale""" """
Main function to connect to the database, fetch FTP user data, and send SITE ADDU commands to the FTP server.
"""
logger.info("Avvio script caricamento utenti FTP") logger.info("Avvio script caricamento utenti FTP")
cfg = setting.Config() cfg = setting.Config()
@@ -99,7 +121,7 @@ def main():
# Costruisci il comando SITE completo # Costruisci il comando SITE completo
ftp_site_command = f'addu {username} {password}' ftp_site_command = f'addu {username} {password}'
logger.info(f"Sending ftp command: {ftp_site_command}") logger.info("Sending ftp command: %s", ftp_site_command)
# Invia comando SITE # Invia comando SITE
if send_site_command(ftp_connection, ftp_site_command): if send_site_command(ftp_connection, ftp_site_command):
@@ -107,24 +129,24 @@ def main():
else: else:
error_count += 1 error_count += 1
logger.info(f"Elaborazione completata. Successi: {success_count}, Errori: {error_count}") logger.info("Elaborazione completata. Successi: %s, Errori: %s", success_count, error_count)
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore generale: {e}") logger.error("Errore generale: %s", e)
finally: finally:
# Chiudi connessioni # Chiudi connessioni
try: try:
ftp_connection.quit() ftp_connection.quit()
logger.info("Connessione FTP chiusa") logger.info("Connessione FTP chiusa")
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore chiusura connessione FTP: {e}") logger.error("Errore chiusura connessione FTP: %s", e)
try: try:
db_connection.close() db_connection.close()
logger.info("Connessione MySQL chiusa") logger.info("Connessione MySQL chiusa")
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore chiusura connessione MySQL: {e}") logger.error("Errore chiusura connessione MySQL: %s", e)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,4 +1,7 @@
#!.venv/bin/python #!.venv/bin/python
"""
Orchestratore dei worker che caricano i dati su dataraw
"""
# Import necessary libraries # Import necessary libraries
import logging import logging
@@ -7,7 +10,7 @@ import asyncio
# Import custom modules for configuration and database connection # Import custom modules for configuration and database connection
from utils.config import loader_load_data as setting from utils.config import loader_load_data as setting
from utils.database import CSV_RECEIVED from utils.database import WorkflowFlags
from utils.csv.loaders import get_next_csv_atomic from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context from utils.orchestrator_utils import run_orchestrator, worker_context
@@ -19,7 +22,8 @@ CSV_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare # Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 60 NO_RECORD_SLEEP = 60
async def worker(worker_id: int, cfg: object, pool: object) -> None:
async def worker(worker_id: int, cfg: dict, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'elaborazione dei file CSV. """Esegue il ciclo di lavoro per l'elaborazione dei file CSV.
Il worker preleva un record CSV dal database, ne elabora il contenuto Il worker preleva un record CSV dal database, ne elabora il contenuto
@@ -27,20 +31,23 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
Args: Args:
worker_id (int): L'ID univoco del worker. worker_id (int): L'ID univoco del worker.
cfg (object): L'oggetto di configurazione. cfg (dict): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database. pool (object): Il pool di connessioni al database.
""" """
# Imposta il context per questo worker # Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}") worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato") logger.info("Avviato")
while True: while True:
try: try:
logger.info("Inizio elaborazione") logger.info("Inizio elaborazione")
record = await get_next_csv_atomic(
record = await get_next_csv_atomic(pool, cfg.dbrectable, CSV_RECEIVED) pool,
cfg.dbrectable,
WorkflowFlags.CSV_RECEIVED,
WorkflowFlags.DATA_LOADED,
)
if record: if record:
success = await load_csv(record, cfg, pool) success = await load_csv(record, cfg, pool)
@@ -51,8 +58,8 @@ async def worker(worker_id: int, cfg: object, pool: object) -> None:
logger.info("Nessun record disponibile") logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP) await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: except Exception as e: # pylint: disable=broad-except
logger.error(f"Errore durante l'esecuzione: {e}", exc_info=debug_mode) logger.error("Errore durante l'esecuzione: %s", e, exc_info=1)
await asyncio.sleep(1) await asyncio.sleep(1)
@@ -60,21 +67,28 @@ async def load_csv(record: tuple, cfg: object, pool: object) -> bool:
"""Carica ed elabora un record CSV utilizzando il modulo di parsing appropriato. """Carica ed elabora un record CSV utilizzando il modulo di parsing appropriato.
Args: Args:
record: Una tupla contenente i dettagli del record CSV da elaborare (id, unit_type, tool_type, unit_name, tool_name). record: Una tupla contenente i dettagli del record CSV da elaborare
(rec_id, unit_type, tool_type, unit_name, tool_name).
cfg: L'oggetto di configurazione contenente i parametri del sistema. cfg: L'oggetto di configurazione contenente i parametri del sistema.
pool (object): Il pool di connessioni al database. pool (object): Il pool di connessioni al database.
Returns: Returns:
True se l'elaborazione del CSV è avvenuta con successo, False altrimenti. True se l'elaborazione del CSV è avvenuta con successo, False altrimenti.
""" """
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.debug("Inizio ricerca nuovo CSV da elaborare") logger.debug("Inizio ricerca nuovo CSV da elaborare")
id, unit_type, tool_type, unit_name, tool_name = [ rec_id, unit_type, tool_type, unit_name, tool_name = [
x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record x.lower().replace(" ", "_") if isinstance(x, str) else x for x in record
] ]
logger.info( logger.info(
f"Trovato CSV da elaborare: ID={id}, Tipo={unit_type}_{tool_type}, Nome={unit_name}_{tool_name}" "Trovato CSV da elaborare: ID=%s, Tipo=%s_%s, Nome=%s_%s",
rec_id,
unit_type,
tool_type,
unit_name,
tool_name,
) )
# Costruisce il nome del modulo da caricare dinamicamente # Costruisce il nome del modulo da caricare dinamicamente
@@ -87,27 +101,29 @@ async def load_csv(record: tuple, cfg: object, pool: object) -> bool:
modulo = None modulo = None
for module_name in module_names: for module_name in module_names:
try: try:
logger.debug(f"Caricamento dinamico del modulo: {module_name}") logger.debug("Caricamento dinamico del modulo: %s", module_name)
modulo = importlib.import_module(module_name) modulo = importlib.import_module(module_name)
logger.info(f"Funzione 'main_loader' caricata dal modulo {module_name}") logger.info("Funzione 'main_loader' caricata dal modulo %s", module_name)
break break
except (ImportError, AttributeError) as e: except (ImportError, AttributeError) as e:
logger.debug( logger.debug(
f"Modulo {module_name} non presente o non valido. {e}", "Modulo %s non presente o non valido. %s",
module_name,
e,
exc_info=debug_mode, exc_info=debug_mode,
) )
if not modulo: if not modulo:
logger.error(f"Nessun modulo trovato {module_names}") logger.error("Nessun modulo trovato %s", module_names)
return False return False
# Ottiene la funzione 'main_loader' dal modulo # Ottiene la funzione 'main_loader' dal modulo
funzione = getattr(modulo, "main_loader") funzione = getattr(modulo, "main_loader")
# Esegui la funzione # Esegui la funzione
logger.info(f"Elaborazione con modulo {modulo} per ID={id}") logger.info("Elaborazione con modulo %s per ID=%s", modulo, rec_id)
await funzione(cfg, id, pool) await funzione(cfg, rec_id, pool)
logger.info(f"Elaborazione completata per ID={id}") logger.info("Elaborazione completata per ID=%s", rec_id)
return True return True

View File

@@ -35,7 +35,7 @@ def getDataFromCsvAndInsert(pathFile):
# print('Error:', e) # print('Error:', e)
folder_name = pathFile.split("/")[-2]#cartella folder_name = pathFile.split("/")[-2]#cartella
if "[276_208_TS0003]" in pathFile: if "[276_208_TS0003]" in pathFile:
folder_name = "TS0003" folder_name = "TS0003"
elif "[Neuchatel_CDP]" in pathFile: elif "[Neuchatel_CDP]" in pathFile:
@@ -319,7 +319,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -334,7 +334,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][63]), int(resultSoglie[0][64])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][63]), int(resultSoglie[0][64])])
conn.commit() conn.commit()
elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -366,7 +366,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -381,7 +381,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][17]), int(resultSoglie[0][18])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][17]), int(resultSoglie[0][18])])
conn.commit() conn.commit()
elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -413,7 +413,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -428,7 +428,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][69]), int(resultSoglie[0][70])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][69]), int(resultSoglie[0][70])])
conn.commit() conn.commit()
elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -460,7 +460,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -475,7 +475,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][23]), int(resultSoglie[0][24])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][23]), int(resultSoglie[0][24])])
conn.commit() conn.commit()
elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -511,7 +511,7 @@ def getDataFromCsvAndInsert(pathFile):
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
#print(abs(zPenultimo), ultimaDataDatoPenultimo) #print(abs(zPenultimo), ultimaDataDatoPenultimo)
if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ):
#print("creo") #print("creo")
@@ -527,7 +527,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][75]), int(resultSoglie[0][76])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][75]), int(resultSoglie[0][76])])
conn.commit() conn.commit()
elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -560,7 +560,7 @@ def getDataFromCsvAndInsert(pathFile):
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
#print(abs(zPenultimo), ultimaDataDatoPenultimo) #print(abs(zPenultimo), ultimaDataDatoPenultimo)
if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ):
#print("creo") #print("creo")
@@ -576,7 +576,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][29]), int(resultSoglie[0][30])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][29]), int(resultSoglie[0][30])])
conn.commit() conn.commit()
elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -608,7 +608,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -623,7 +623,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][81]), int(resultSoglie[0][82])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][81]), int(resultSoglie[0][82])])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -655,7 +655,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -670,7 +670,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][35]), int(resultSoglie[0][36])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][35]), int(resultSoglie[0][36])])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -702,7 +702,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -717,7 +717,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][87]), int(resultSoglie[0][88])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][87]), int(resultSoglie[0][88])])
conn.commit() conn.commit()
elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -749,7 +749,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -764,7 +764,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][41]), int(resultSoglie[0][42])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][41]), int(resultSoglie[0][42])])
conn.commit() conn.commit()
elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -830,8 +830,8 @@ def getDataFromCsvAndInsert(pathFile):
fdate = drange.split(",")[0] fdate = drange.split(",")[0]
ldate = drange.split(",")[1] ldate = drange.split(",")[1]
params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate] params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc, l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc,
@@ -856,10 +856,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoCoppieSpostLat, l.sms_livello_unoCoppieSpostLat,
l.sms_livello_dueCoppieSpostLat, l.sms_livello_dueCoppieSpostLat,
l.sms_livello_treCoppieSpostLat l.sms_livello_treCoppieSpostLat
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -874,8 +874,8 @@ def getDataFromCsvAndInsert(pathFile):
arrayCoppie[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie) arrayCoppie[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie)
else: else:
params = [progetto_id, lavoro_id, coppiaMira[0]] params = [progetto_id, lavoro_id, coppiaMira[0]]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc, l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc,
@@ -900,10 +900,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoCoppieSpostLat, l.sms_livello_unoCoppieSpostLat,
l.sms_livello_dueCoppieSpostLat, l.sms_livello_dueCoppieSpostLat,
l.sms_livello_treCoppieSpostLat l.sms_livello_treCoppieSpostLat
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -939,18 +939,18 @@ def getDataFromCsvAndInsert(pathFile):
fdate = drange.split(",")[0] fdate = drange.split(",")[0]
ldate = drange.split(",")[1] ldate = drange.split(",")[1]
params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate] params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro, l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro,
l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro, l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro,
l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro, l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro,
l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -965,18 +965,18 @@ def getDataFromCsvAndInsert(pathFile):
arrayCoppieMuro[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie) arrayCoppieMuro[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie)
else: else:
params = [progetto_id, lavoro_id, coppiaMira[0]] params = [progetto_id, lavoro_id, coppiaMira[0]]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro, l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro,
l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro, l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro,
l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro, l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro,
l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -997,15 +997,15 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(sql, (lavoro_id, mira_id, mira_id)) cursor.execute(sql, (lavoro_id, mira_id, mira_id))
result_coppie = cursor.fetchall() result_coppie = cursor.fetchall()
for coppia in result_coppie: for coppia in result_coppie:
sql = """SELECT lavoro_id, num, lista sql = """SELECT lavoro_id, num, lista
FROM upgeo_mire_tralicci FROM upgeo_mire_tralicci
WHERE lavoro_id = %s AND JSON_CONTAINS(lista, CAST(%s AS JSON), '$') WHERE lavoro_id = %s AND JSON_CONTAINS(lista, CAST(%s AS JSON), '$')
ORDER BY num ASC""" ORDER BY num ASC"""
cursor.execute(sql, (lavoro_id, coppia[0])) cursor.execute(sql, (lavoro_id, coppia[0]))
result_tralicci = cursor.fetchall() result_tralicci = cursor.fetchall()
for traliccio in result_tralicci: for traliccio in result_tralicci:
sql = """SELECT id, name, multipleDateRange sql = """SELECT id, name, multipleDateRange
FROM upgeo_mire FROM upgeo_mire
WHERE abilitato = 1 AND lavoro_id = %s AND (id = %s OR id = %s)""" WHERE abilitato = 1 AND lavoro_id = %s AND (id = %s OR id = %s)"""
cursor.execute(sql, (coppia[1], coppia[3], coppia[4])) cursor.execute(sql, (coppia[1], coppia[3], coppia[4]))
result_coppia_mire = cursor.fetchall() result_coppia_mire = cursor.fetchall()
@@ -1023,14 +1023,14 @@ def getDataFromCsvAndInsert(pathFile):
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio,
s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio, s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio,
l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio, l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio,
l.areaInterventoImmediatoInizioCoppieIncTraliccio, l.areaInterventoImmediatoInizioCoppieIncTraliccio,
l.areaAttenzioneInizioCoppieAssestTraliccio, l.areaAttenzioneInizioCoppieAssestTraliccio,
l.areaInterventoInizioCoppieAssestTraliccio, l.areaInterventoInizioCoppieAssestTraliccio,
l.areaInterventoImmediatoInizioCoppieAssestTraliccio, l.areaInterventoImmediatoInizioCoppieAssestTraliccio,
l.areaAttenzioneInizioCoppieSpostLatTraliccio, l.areaAttenzioneInizioCoppieSpostLatTraliccio,
l.areaInterventoInizioCoppieSpostLatTraliccio, l.areaInterventoInizioCoppieSpostLatTraliccio,
l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio, l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio,
l.reportVarInclinTraliccio, l.reportAssestTraliccio, l.reportVarInclinTraliccio, l.reportAssestTraliccio,
l.reportSpostLatTraliccio, l.parametroLettureTraliccio l.reportSpostLatTraliccio, l.parametroLettureTraliccio
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
@@ -1060,14 +1060,14 @@ def getDataFromCsvAndInsert(pathFile):
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio,
s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio, s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio,
l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio, l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio,
l.areaInterventoImmediatoInizioCoppieIncTraliccio, l.areaInterventoImmediatoInizioCoppieIncTraliccio,
l.areaAttenzioneInizioCoppieAssestTraliccio, l.areaAttenzioneInizioCoppieAssestTraliccio,
l.areaInterventoInizioCoppieAssestTraliccio, l.areaInterventoInizioCoppieAssestTraliccio,
l.areaInterventoImmediatoInizioCoppieAssestTraliccio, l.areaInterventoImmediatoInizioCoppieAssestTraliccio,
l.areaAttenzioneInizioCoppieSpostLatTraliccio, l.areaAttenzioneInizioCoppieSpostLatTraliccio,
l.areaInterventoInizioCoppieSpostLatTraliccio, l.areaInterventoInizioCoppieSpostLatTraliccio,
l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio, l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio,
l.reportVarInclinTraliccio, l.reportAssestTraliccio, l.reportVarInclinTraliccio, l.reportAssestTraliccio,
l.reportSpostLatTraliccio, l.parametroLettureTraliccio l.reportSpostLatTraliccio, l.parametroLettureTraliccio
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
@@ -1092,15 +1092,15 @@ def getDataFromCsvAndInsert(pathFile):
print(4, lavoro_id, mira_id) print(4, lavoro_id, mira_id)
print() print()
sql = """ sql = """
SELECT SELECT
mire.id AS mira_id, mire.id AS mira_id,
mire.name AS mira_name, mire.name AS mira_name,
mire.multipleDateRange, mire.multipleDateRange,
mire.progressiva_id, mire.progressiva_id,
progressivebinari.name AS progressiva_name, progressivebinari.name AS progressiva_name,
progressivebinari.offsetInizialeSghembo progressivebinari.offsetInizialeSghembo
FROM upgeo_mire AS mire FROM upgeo_mire AS mire
JOIN upgeo_mire_progressivebinari AS progressivebinari JOIN upgeo_mire_progressivebinari AS progressivebinari
ON mire.progressiva_id = progressivebinari.id ON mire.progressiva_id = progressivebinari.id
WHERE mire.abilitato = 1 AND mire.lavoro_id = %s AND mire.id = %s WHERE mire.abilitato = 1 AND mire.lavoro_id = %s AND mire.id = %s
ORDER BY progressivebinari.id ORDER BY progressivebinari.id
@@ -1120,23 +1120,23 @@ def getDataFromCsvAndInsert(pathFile):
fdate, ldate = range_item.split(",") fdate, ldate = range_item.split(",")
params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0], fdate, ldate] params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0], fdate, ldate]
sql = """ sql = """
SELECT SELECT
d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id, d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id,
s.id AS site_id, m.id AS mira_id, m.name AS mira_name, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon,
d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name, d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name,
us.description AS strumento_desc, un.description AS nota_desc, us.description AS strumento_desc, un.description AS nota_desc,
d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio, d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio,
l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione, l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione,
m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo, m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo,
l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv, l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv,
l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert,
l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert, l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert,
l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz, l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz,
l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo,
l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo, l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo,
l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz, l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz,
l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat, l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat,
%s AS offsetInizialeSghembo, l.parametroLettureBinari, %s AS offsetInizialeSghembo, l.parametroLettureBinari,
l.email_livello_unoBinariTrasv, l.email_livello_unoBinariTrasv,
l.email_livello_dueBinariTrasv, l.email_livello_dueBinariTrasv,
@@ -1162,14 +1162,14 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoBinariSghembo, l.sms_livello_unoBinariSghembo,
l.sms_livello_dueBinariSghembo, l.sms_livello_dueBinariSghembo,
l.sms_livello_treBinariSghembo l.sms_livello_treBinariSghembo
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
JOIN upgeo_mire AS m ON m.lavoro_id = l.id JOIN upgeo_mire AS m ON m.lavoro_id = l.id
JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id
LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id
LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id
LEFT JOIN upgeo_note AS un ON un.id = d.nota_id LEFT JOIN upgeo_note AS un ON un.id = d.nota_id
WHERE s.upgeo = 1 AND s.id = %s AND l.id = %s AND m.id = %s WHERE s.upgeo = 1 AND s.id = %s AND l.id = %s AND m.id = %s
AND d.EventTimestamp BETWEEN %s AND %s""" AND d.EventTimestamp BETWEEN %s AND %s"""
if(resultSoglie[0][94] != ''): if(resultSoglie[0][94] != ''):
sql += " and d.EventTimestamp >= %s" sql += " and d.EventTimestamp >= %s"
@@ -1184,23 +1184,23 @@ def getDataFromCsvAndInsert(pathFile):
else: else:
params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0]] params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0]]
sql = """ sql = """
SELECT SELECT
d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id, d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id,
s.id AS site_id, m.id AS mira_id, m.name AS mira_name, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon,
d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name, d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name,
us.description AS strumento_desc, un.description AS nota_desc, us.description AS strumento_desc, un.description AS nota_desc,
d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio, d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio,
l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione, l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione,
m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo, m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo,
l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv, l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv,
l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert,
l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert, l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert,
l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz, l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz,
l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo,
l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo, l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo,
l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz, l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz,
l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat, l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat,
%s AS offsetInizialeSghembo, l.parametroLettureBinari, %s AS offsetInizialeSghembo, l.parametroLettureBinari,
l.email_livello_unoBinariTrasv, l.email_livello_unoBinariTrasv,
l.email_livello_dueBinariTrasv, l.email_livello_dueBinariTrasv,
@@ -1226,10 +1226,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoBinariSghembo, l.sms_livello_unoBinariSghembo,
l.sms_livello_dueBinariSghembo, l.sms_livello_dueBinariSghembo,
l.sms_livello_treBinariSghembo l.sms_livello_treBinariSghembo
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
JOIN upgeo_mire AS m ON m.lavoro_id = l.id JOIN upgeo_mire AS m ON m.lavoro_id = l.id
JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id
LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id
LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id
LEFT JOIN upgeo_note AS un ON un.id = d.nota_id LEFT JOIN upgeo_note AS un ON un.id = d.nota_id
@@ -1475,7 +1475,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), 1, dato_date, 41]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), 1, dato_date, 41])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or if not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1489,8 +1489,8 @@ def getDataFromCsvAndInsert(pathFile):
if(abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))): if(abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 2, sms_livello_dueBinariTrasv, email_livello_dueBinariTrasv]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 2, sms_livello_dueBinariTrasv, email_livello_dueBinariTrasv])
conn.commit() conn.commit()
elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1509,7 +1509,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 3, sms_livello_treBinariTrasv, email_livello_treBinariTrasv]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 3, sms_livello_treBinariTrasv, email_livello_treBinariTrasv])
conn.commit() conn.commit()
elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1607,7 +1607,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0]) print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0])
print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000) print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000): abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000):
zdx = nearestElementDx[1] zdx = nearestElementDx[1]
zdxPrev = nearestElementDxPrev[1] zdxPrev = nearestElementDxPrev[1]
@@ -1626,7 +1626,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or if not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1641,7 +1641,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 2, "R", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 2, "R", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1660,7 +1660,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 3, "R", sms_livello_treBinariLongVert, email_livello_treBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 3, "R", sms_livello_treBinariLongVert, email_livello_treBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1678,7 +1678,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0]) print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0])
print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000) print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
zsx = nearestElementSx[1] zsx = nearestElementSx[1]
zsxPrev = nearestElementSxPrev[1] zsxPrev = nearestElementSxPrev[1]
@@ -1697,7 +1697,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or if not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1712,7 +1712,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 2, "L", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 2, "L", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1731,7 +1731,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 3, "L", sms_livello_treBinariLongVert, email_livello_treBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 3, "L", sms_livello_treBinariLongVert, email_livello_treBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1825,7 +1825,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0]) print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0])
print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000) print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000): abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000):
ndx = nearestElementDx[1] ndx = nearestElementDx[1]
ndx0 = arrDx[0][1] ndx0 = arrDx[0][1]
@@ -1856,7 +1856,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or if not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1871,7 +1871,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 2, "R", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 2, "R", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1890,7 +1890,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 3, "R", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 3, "R", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1908,7 +1908,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0]) print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0])
print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000) print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
nsx = nearestElementSx[1] nsx = nearestElementSx[1]
nsx0 = arrSx[0][1] nsx0 = arrSx[0][1]
@@ -1939,7 +1939,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or if not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1954,7 +1954,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 2, "L", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 2, "L", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1973,7 +1973,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 3, "L", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 3, "L", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -2066,9 +2066,9 @@ def getDataFromCsvAndInsert(pathFile):
max_millis = max(nearestElementDx[0], nearestElementSx[0], nearestElementDxPenultimo[0], nearestElementSxPenultimo[0]) max_millis = max(nearestElementDx[0], nearestElementSx[0], nearestElementDxPenultimo[0], nearestElementSxPenultimo[0])
dato_date = datetime.fromtimestamp(max_millis / 1000).strftime("%Y-%m-%d %H:%M:%S") dato_date = datetime.fromtimestamp(max_millis / 1000).strftime("%Y-%m-%d %H:%M:%S")
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000 and abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000 and
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
zdx = nearestElementDx[1] zdx = nearestElementDx[1]
zdxPrev = nearestElementDxPrev[1] zdxPrev = nearestElementDxPrev[1]
@@ -2095,7 +2095,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 42]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 42])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or if not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2110,7 +2110,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 2, sms_livello_dueBinariSghembo, email_livello_dueBinariSghembo]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 2, sms_livello_dueBinariSghembo, email_livello_dueBinariSghembo])
conn.commit() conn.commit()
elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2129,7 +2129,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 3, sms_livello_treBinariSghembo, email_livello_treBinariSghembo]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 3, sms_livello_treBinariSghembo, email_livello_treBinariSghembo])
conn.commit() conn.commit()
elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2417,7 +2417,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], 1, dato_date, 11]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], 1, dato_date, 11])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or if not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2432,7 +2432,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 2, soglieCoppieUnitaMisura, sms_livello_dueCoppieInc, email_livello_dueCoppieInc]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 2, soglieCoppieUnitaMisura, sms_livello_dueCoppieInc, email_livello_dueCoppieInc])
conn.commit() conn.commit()
elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2451,7 +2451,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 3, soglieCoppieUnitaMisura, sms_livello_treCoppieInc, email_livello_treCoppieInc]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 3, soglieCoppieUnitaMisura, sms_livello_treCoppieInc, email_livello_treCoppieInc])
conn.commit() conn.commit()
elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2470,7 +2470,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 12]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 12])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or if not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2485,7 +2485,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 2, "mm", sms_livello_dueCoppieAssest, email_livello_dueCoppieAssest]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 2, "mm", sms_livello_dueCoppieAssest, email_livello_dueCoppieAssest])
conn.commit() conn.commit()
elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2504,7 +2504,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 3, "mm", sms_livello_treCoppieAssest, email_livello_treCoppieAssest]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 3, "mm", sms_livello_treCoppieAssest, email_livello_treCoppieAssest])
conn.commit() conn.commit()
elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2523,7 +2523,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 13]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 13])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or if not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2538,7 +2538,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 2, "mm", sms_livello_dueCoppieSpostLat, email_livello_dueCoppieSpostLat]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 2, "mm", sms_livello_dueCoppieSpostLat, email_livello_dueCoppieSpostLat])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2557,7 +2557,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 3, "mm", sms_livello_treCoppieSpostLat, email_livello_treCoppieSpostLat]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 3, "mm", sms_livello_treCoppieSpostLat, email_livello_treCoppieSpostLat])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2565,14 +2565,17 @@ def getDataFromCsvAndInsert(pathFile):
conn.commit() conn.commit()
cursor.close() cursor.close()
conn.close() conn.close()
"""
if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta
orig_folder = pathFile.split("/")[-2] orig_folder = pathFile.split("/")[-2]
new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name) new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name)
shutil.move(pathFile, new_pathFile) shutil.move(pathFile, new_pathFile)
if not os.path.exists(pathFile): if not os.path.exists(pathFile):
print(f"File moved successfully from {pathFile} to {new_pathFile}\n") print(f"File moved successfully from {pathFile} to {new_pathFile}\n")
else: else:
print("File move operation failed.\n") print("File move operation failed.\n")
"""
#except Exception as e: #except Exception as e:
# print(f"An unexpected error occurred: {str(e)}\n") # print(f"An unexpected error occurred: {str(e)}\n")

View File

@@ -1,6 +1,6 @@
from configparser import ConfigParser from configparser import ConfigParser
def read_db_config(filename='/home/battilo/scripts/config.ini', section='mysql'): def read_db_config(filename='../env/config.ini', section='mysql'):
parser = ConfigParser() parser = ConfigParser()
parser.read(filename) parser.read(filename)

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from decimal import Decimal
from datetime import datetime
import ezodf
def getDataFromCsv(pathFile):
try:
folder_path, file_with_extension = os.path.split(pathFile)
unit_name = os.path.basename(folder_path)#unitname
tool_name, _ = os.path.splitext(file_with_extension)#toolname
tool_name = tool_name.replace("HIRPINIA_", "")
tool_name = tool_name.split("_")[0]
print(unit_name, tool_name)
datiRaw = []
doc = ezodf.opendoc(pathFile)
for sheet in doc.sheets:
node_num = sheet.name.replace("S-", "")
print(f"Sheet Name: {sheet.name}")
rows_to_skip = 2
for i, row in enumerate(sheet.rows()):
if i < rows_to_skip:
continue
row_data = [cell.value for cell in row]
date_time = datetime.strptime(row_data[0], "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S").split(" ")
date = date_time[0]
time = date_time[1]
val0 = row_data[2]
val1 = row_data[4]
val2 = row_data[6]
val3 = row_data[8]
datiRaw.append((unit_name, tool_name, node_num, date, time, -1, -273, val0, val1, val2, val3))
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor(dictionary=True)
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0,Val1,Val2,Val3) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.executemany(queryRaw, datiRaw)
conn.commit()
except Error as e:
print('Error:', e)
finally:
queryMatlab = "select m.matcall from tools as t join units as u on u.id=t.unit_id join matfuncs as m on m.id=t.matfunc where u.name=%s and t.name=%s"
cursor.execute(queryMatlab, [unit_name, tool_name])
resultMatlab = cursor.fetchall()
if(resultMatlab):
print("Avvio "+str(resultMatlab[0]["matcall"]))
os.system("cd /usr/local/matlab_func/; ./run_"+str(resultMatlab[0]["matcall"])+".sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+str(unit_name)+" "+str(tool_name)+"")
cursor.close()
conn.close()
except Exception as e:
print(f"An unexpected error occurred: {str(e)}\n")
def main():
print("Avviato.")
getDataFromCsv(sys.argv[1])
print("Finito.")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,305 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from decimal import Decimal
from datetime import datetime
def insertData(dati):
#print(dati)
#print(len(dati))
if(len(dati) > 0):
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
if(len(dati) == 2):
u = ""
t = ""
rawdata = dati[0]
elabdata = dati[1]
if(len(rawdata) > 0):
for r in rawdata:
#print(r)
#print(len(r))
if(len(r) == 6):#nodo1
unitname = r[0]
toolname = r[1]
nodenum = r[2]
pressure = Decimal(r[3])*100
date = r[4]
time = r[5]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][8] is None):
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
dateDiff = datetimeNew - datetimeOld
if(dateDiff.total_seconds() / 3600 >= 5):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "UPDATE RAWDATACOR SET val0=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [pressure, date, time, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][8] is not None):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
else:#altri 2->5
unitname = r[0]
toolname = r[1]
nodenum = r[2]
freqinhz = r[3]
therminohms = r[4]
freqindigit = r[5]
date = r[6]
time = r[7]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][8] is None):
query = "UPDATE RAWDATACOR SET val0=%s, val1=%s, val2=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [freqinhz, therminohms, freqindigit, date, time, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][8] is not None):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
if(len(elabdata) > 0):
for e in elabdata:
#print(e)
#print(len(e))
if(len(e) == 6):#nodo1
unitname = e[0]
toolname = e[1]
nodenum = e[2]
pressure = Decimal(e[3])*100
date = e[4]
time = e[5]
try:
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [unitname, toolname, nodenum, date, time, pressure])
conn.commit()
except Error as e:
print('Error:', e)
else:#altri 2->5
unitname = e[0]
toolname = e[1]
u = unitname
t = toolname
nodenum = e[2]
pch = e[3]
tch = e[4]
date = e[5]
time = e[6]
try:
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift, T_node) VALUES(%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [unitname, toolname, nodenum, date, time, pch, tch])
conn.commit()
except Error as e:
print('Error:', e)
#os.system("cd /usr/local/matlab_func/; ./run_ATD_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+u+" "+t+"")
else:
for r in dati:
#print(r)
unitname = r[0]
toolname = r[1]
nodenum = r[2]
date = r[3]
time = r[4]
battery = r[5]
temperature = r[6]
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [unitname, toolname, nodenum])
result = cursor.fetchall()
if(result):
if(result[0][25] is None or result[0][25] == -1.00):
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
dateDiff = datetimeNew - datetimeOld
#print(dateDiff.total_seconds() / 3600)
if(dateDiff.total_seconds() / 3600 >= 5):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "UPDATE RAWDATACOR SET BatLevelModule=%s, TemperatureModule=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND (BatLevelModule is NULL or BatLevelModule = -1.00) ORDER BY EventDate desc,EventTime desc limit 1"
try:
cursor.execute(query, [battery, temperature, unitname, toolname, nodenum])
conn.commit()
except Error as e:
print('Error:', e)
elif(result[0][25] is not None and result[0][25] != -1.00):
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
else:
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
try:
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
conn.commit()
except Error as e:
print('Error:', e)
except Error as e:
print('Error:', e)
cursor.close()
conn.close()
def getDataFromCsv(pathFile):
with open(pathFile, 'r') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
serial_number = data[0].split(",")[1]
data = data[10:] #rimuove righe header
dati = []
rawDatiReadings = []#tmp
elabDatiReadings = []#tmp
datiReadings = []
i = 0
unit = ""
tool = ""
#row = data[0]#quando non c'era il for solo 1 riga
for row in data:#se ci sono righe multiple
row = row.split(",")
if i == 0:
query = "SELECT unit_name, tool_name FROM sisgeo_tools WHERE serial_number='"+serial_number+"'"
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
cursor.execute(query)
result = cursor.fetchall()
except Error as e:
print('Error:', e)
unit = result[0][0]
tool = result[0][1]
#print(result[0][0])
#print(result[0][1])
if("health" in pathFile):
datetime = str(row[0]).replace("\"", "").split(" ")
date = datetime[0]
time = datetime[1]
battery = row[1]
temperature = row[2]
dati.append((unit, tool, 1, date, time, battery, temperature))
dati.append((unit, tool, 2, date, time, battery, temperature))
dati.append((unit, tool, 3, date, time, battery, temperature))
dati.append((unit, tool, 4, date, time, battery, temperature))
dati.append((unit, tool, 5, date, time, battery, temperature))
else:
datetime = str(row[0]).replace("\"", "").split(" ")
date = datetime[0]
time = datetime[1]
atmpressure = row[1]#nodo1
#raw
freqinhzch1 = row[2]#nodo2
freqindigitch1 = row[3]#nodo2
thermResInOhmsch1 = row[4]#nodo2
freqinhzch2 = row[5]#nodo3
freqindigitch2 = row[6]#nodo3
thermResInOhmsch2 = row[7]#nodo3
freqinhzch3 = row[8]#nodo4
freqindigitch3 = row[9]#nodo4
thermResInOhmsch3 = row[10]#nodo4
freqinhzch4 = row[11]#nodo5
freqindigitch4 = row[12]#nodo5
thermResInOhmsch4 = row[13]#nodo5
#elab
pch1 = row[18]#nodo2
tch1 = row[19]#nodo2
pch2 = row[20]#nodo3
tch2 = row[21]#nodo3
pch3 = row[22]#nodo4
tch3 = row[23]#nodo4
pch4 = row[24]#nodo5
tch4 = row[25]#nodo5
rawDatiReadings.append((unit, tool, 1, atmpressure, date, time))
rawDatiReadings.append((unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time))
rawDatiReadings.append((unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time))
rawDatiReadings.append((unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time))
rawDatiReadings.append((unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time))
elabDatiReadings.append((unit, tool, 1, atmpressure, date, time))
elabDatiReadings.append((unit, tool, 2, pch1, tch1, date, time))
elabDatiReadings.append((unit, tool, 3, pch2, tch2, date, time))
elabDatiReadings.append((unit, tool, 4, pch3, tch3, date, time))
elabDatiReadings.append((unit, tool, 5, pch4, tch4, date, time))
#[ram],[elab]#quando c'era solo 1 riga
#dati = [
# [
# (unit, tool, 1, atmpressure, date, time),
# (unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time),
# (unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time),
# (unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time),
# (unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time),
# ], [
# (unit, tool, 1, atmpressure, date, time),
# (unit, tool, 2, pch1, tch1, date, time),
# (unit, tool, 3, pch2, tch2, date, time),
# (unit, tool, 4, pch3, tch3, date, time),
# (unit, tool, 5, pch4, tch4, date, time),
# ]
# ]
i+=1
#print(dati)
if(len(rawDatiReadings) > 0 or len(elabDatiReadings) > 0):
datiReadings = [rawDatiReadings, elabDatiReadings]
if(len(datiReadings) > 0):
return datiReadings
return dati
def main():
insertData(getDataFromCsv(sys.argv[1]))
if __name__ == '__main__':
main()

306
src/old_scripts/sorotecPini.py Executable file
View File

@@ -0,0 +1,306 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from datetime import datetime
import math
import shutil
def removeDuplicates(lst):
return list(set([i for i in lst]))
def getDataFromCsvAndInsert(pathFile):
try:
print(pathFile)
folder_name = pathFile.split("/")[-2]#cartella
with open(pathFile, 'r') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
if(len(data) > 0 and data is not None):
if(folder_name == "ID0247"):
unit_name = "ID0247"
tool_name = "DT0001"
data.pop(0) #rimuove header
data.pop(0)
data.pop(0)
data.pop(0)
data = [element for element in data if element != ""]
try:
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor()
queryElab = "insert ignore into ELABDATADISP(UnitName,ToolNameID,NodeNum,EventDate,EventTime,load_value) values (%s,%s,%s,%s,%s,%s)"
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0) values (%s,%s,%s,%s,%s,%s,%s,%s)"
if("_1_" in pathFile):
print("File tipo 1.\n")
#print(unit_name, tool_name)
dataToInsertElab = []
dataToInsertRaw = []
for row in data:
rowSplitted = row.replace("\"","").split(";")
eventTimestamp = rowSplitted[0].split(" ")
date = eventTimestamp[0].split("-")
date = date[2]+"-"+date[1]+"-"+date[0]
time = eventTimestamp[1]
an3 = rowSplitted[1]
an4 = rowSplitted[2]#V unit battery
OUTREG2 = rowSplitted[3]
E8_181_CH2 = rowSplitted[4]#2
E8_181_CH3 = rowSplitted[5]#3
E8_181_CH4 = rowSplitted[6]#4
E8_181_CH5 = rowSplitted[7]#5
E8_181_CH6 = rowSplitted[8]#6
E8_181_CH7 = rowSplitted[9]#7
E8_181_CH8 = rowSplitted[10]#8
E8_182_CH1 = rowSplitted[11]#9
E8_182_CH2 = rowSplitted[12]#10
E8_182_CH3 = rowSplitted[13]#11
E8_182_CH4 = rowSplitted[14]#12
E8_182_CH5 = rowSplitted[15]#13
E8_182_CH6 = rowSplitted[16]#14
E8_182_CH7 = rowSplitted[17]#15
E8_182_CH8 = rowSplitted[18]#16
E8_183_CH1 = rowSplitted[19]#17
E8_183_CH2 = rowSplitted[20]#18
E8_183_CH3 = rowSplitted[21]#19
E8_183_CH4 = rowSplitted[22]#20
E8_183_CH5 = rowSplitted[23]#21
E8_183_CH6 = rowSplitted[24]#22
E8_183_CH7 = rowSplitted[25]#23
E8_183_CH8 = rowSplitted[26]#24
E8_184_CH1 = rowSplitted[27]#25
E8_184_CH2 = rowSplitted[28]#26
E8_184_CH3 = rowSplitted[29]#27 mv/V
E8_184_CH4 = rowSplitted[30]#28 mv/V
E8_184_CH5 = rowSplitted[31]#29 mv/V
E8_184_CH6 = rowSplitted[32]#30 mv/V
E8_184_CH7 = rowSplitted[33]#31 mv/V
E8_184_CH8 = rowSplitted[34]#32 mv/V
E8_181_CH1 = rowSplitted[35]#1
an1 = rowSplitted[36]
an2 = rowSplitted[37]
#print(unit_name, tool_name, 1, E8_181_CH1)
#print(unit_name, tool_name, 2, E8_181_CH2)
#print(unit_name, tool_name, 3, E8_181_CH3)
#print(unit_name, tool_name, 4, E8_181_CH4)
#print(unit_name, tool_name, 5, E8_181_CH5)
#print(unit_name, tool_name, 6, E8_181_CH6)
#print(unit_name, tool_name, 7, E8_181_CH7)
#print(unit_name, tool_name, 8, E8_181_CH8)
#print(unit_name, tool_name, 9, E8_182_CH1)
#print(unit_name, tool_name, 10, E8_182_CH2)
#print(unit_name, tool_name, 11, E8_182_CH3)
#print(unit_name, tool_name, 12, E8_182_CH4)
#print(unit_name, tool_name, 13, E8_182_CH5)
#print(unit_name, tool_name, 14, E8_182_CH6)
#print(unit_name, tool_name, 15, E8_182_CH7)
#print(unit_name, tool_name, 16, E8_182_CH8)
#print(unit_name, tool_name, 17, E8_183_CH1)
#print(unit_name, tool_name, 18, E8_183_CH2)
#print(unit_name, tool_name, 19, E8_183_CH3)
#print(unit_name, tool_name, 20, E8_183_CH4)
#print(unit_name, tool_name, 21, E8_183_CH5)
#print(unit_name, tool_name, 22, E8_183_CH6)
#print(unit_name, tool_name, 23, E8_183_CH7)
#print(unit_name, tool_name, 24, E8_183_CH8)
#print(unit_name, tool_name, 25, E8_184_CH1)
#print(unit_name, tool_name, 26, E8_184_CH2)
#print(unit_name, tool_name, 27, E8_184_CH3)
#print(unit_name, tool_name, 28, E8_184_CH4)
#print(unit_name, tool_name, 29, E8_184_CH5)
#print(unit_name, tool_name, 30, E8_184_CH6)
#print(unit_name, tool_name, 31, E8_184_CH7)
#print(unit_name, tool_name, 32, E8_184_CH8)
#---------------------------------------------------------------------------------------
dataToInsertRaw.append((unit_name, tool_name, 1, date, time, an4, -273, E8_181_CH1))
dataToInsertRaw.append((unit_name, tool_name, 2, date, time, an4, -273, E8_181_CH2))
dataToInsertRaw.append((unit_name, tool_name, 3, date, time, an4, -273, E8_181_CH3))
dataToInsertRaw.append((unit_name, tool_name, 4, date, time, an4, -273, E8_181_CH4))
dataToInsertRaw.append((unit_name, tool_name, 5, date, time, an4, -273, E8_181_CH5))
dataToInsertRaw.append((unit_name, tool_name, 6, date, time, an4, -273, E8_181_CH6))
dataToInsertRaw.append((unit_name, tool_name, 7, date, time, an4, -273, E8_181_CH7))
dataToInsertRaw.append((unit_name, tool_name, 8, date, time, an4, -273, E8_181_CH8))
dataToInsertRaw.append((unit_name, tool_name, 9, date, time, an4, -273, E8_182_CH1))
dataToInsertRaw.append((unit_name, tool_name, 10, date, time, an4, -273, E8_182_CH2))
dataToInsertRaw.append((unit_name, tool_name, 11, date, time, an4, -273, E8_182_CH3))
dataToInsertRaw.append((unit_name, tool_name, 12, date, time, an4, -273, E8_182_CH4))
dataToInsertRaw.append((unit_name, tool_name, 13, date, time, an4, -273, E8_182_CH5))
dataToInsertRaw.append((unit_name, tool_name, 14, date, time, an4, -273, E8_182_CH6))
dataToInsertRaw.append((unit_name, tool_name, 15, date, time, an4, -273, E8_182_CH7))
dataToInsertRaw.append((unit_name, tool_name, 16, date, time, an4, -273, E8_182_CH8))
dataToInsertRaw.append((unit_name, tool_name, 17, date, time, an4, -273, E8_183_CH1))
dataToInsertRaw.append((unit_name, tool_name, 18, date, time, an4, -273, E8_183_CH2))
dataToInsertRaw.append((unit_name, tool_name, 19, date, time, an4, -273, E8_183_CH3))
dataToInsertRaw.append((unit_name, tool_name, 20, date, time, an4, -273, E8_183_CH4))
dataToInsertRaw.append((unit_name, tool_name, 21, date, time, an4, -273, E8_183_CH5))
dataToInsertRaw.append((unit_name, tool_name, 22, date, time, an4, -273, E8_183_CH6))
dataToInsertRaw.append((unit_name, tool_name, 23, date, time, an4, -273, E8_183_CH7))
dataToInsertRaw.append((unit_name, tool_name, 24, date, time, an4, -273, E8_183_CH8))
dataToInsertRaw.append((unit_name, tool_name, 25, date, time, an4, -273, E8_184_CH1))
dataToInsertRaw.append((unit_name, tool_name, 26, date, time, an4, -273, E8_184_CH2))
#---------------------------------------------------------------------------------------
dataToInsertElab.append((unit_name, tool_name, 1, date, time, E8_181_CH1))
dataToInsertElab.append((unit_name, tool_name, 2, date, time, E8_181_CH2))
dataToInsertElab.append((unit_name, tool_name, 3, date, time, E8_181_CH3))
dataToInsertElab.append((unit_name, tool_name, 4, date, time, E8_181_CH4))
dataToInsertElab.append((unit_name, tool_name, 5, date, time, E8_181_CH5))
dataToInsertElab.append((unit_name, tool_name, 6, date, time, E8_181_CH6))
dataToInsertElab.append((unit_name, tool_name, 7, date, time, E8_181_CH7))
dataToInsertElab.append((unit_name, tool_name, 8, date, time, E8_181_CH8))
dataToInsertElab.append((unit_name, tool_name, 9, date, time, E8_182_CH1))
dataToInsertElab.append((unit_name, tool_name, 10, date, time, E8_182_CH2))
dataToInsertElab.append((unit_name, tool_name, 11, date, time, E8_182_CH3))
dataToInsertElab.append((unit_name, tool_name, 12, date, time, E8_182_CH4))
dataToInsertElab.append((unit_name, tool_name, 13, date, time, E8_182_CH5))
dataToInsertElab.append((unit_name, tool_name, 14, date, time, E8_182_CH6))
dataToInsertElab.append((unit_name, tool_name, 15, date, time, E8_182_CH7))
dataToInsertElab.append((unit_name, tool_name, 16, date, time, E8_182_CH8))
dataToInsertElab.append((unit_name, tool_name, 17, date, time, E8_183_CH1))
dataToInsertElab.append((unit_name, tool_name, 18, date, time, E8_183_CH2))
dataToInsertElab.append((unit_name, tool_name, 19, date, time, E8_183_CH3))
dataToInsertElab.append((unit_name, tool_name, 20, date, time, E8_183_CH4))
dataToInsertElab.append((unit_name, tool_name, 21, date, time, E8_183_CH5))
dataToInsertElab.append((unit_name, tool_name, 22, date, time, E8_183_CH6))
dataToInsertElab.append((unit_name, tool_name, 23, date, time, E8_183_CH7))
dataToInsertElab.append((unit_name, tool_name, 24, date, time, E8_183_CH8))
dataToInsertElab.append((unit_name, tool_name, 25, date, time, E8_184_CH1))
dataToInsertElab.append((unit_name, tool_name, 26, date, time, E8_184_CH2))
#---------------------------------------------------------------------------------------
cursor.executemany(queryElab, dataToInsertElab)
cursor.executemany(queryRaw, dataToInsertRaw)
conn.commit()
#print(dataToInsertElab)
#print(dataToInsertRaw)
elif("_2_" in pathFile):
print("File tipo 2.\n")
#print(unit_name, tool_name)
dataToInsertElab = []
dataToInsertRaw = []
for row in data:
rowSplitted = row.replace("\"","").split(";")
eventTimestamp = rowSplitted[0].split(" ")
date = eventTimestamp[0].split("-")
date = date[2]+"-"+date[1]+"-"+date[0]
time = eventTimestamp[1]
an2 = rowSplitted[1]
an3 = rowSplitted[2]
an1 = rowSplitted[3]
OUTREG2 = rowSplitted[4]
E8_181_CH1 = rowSplitted[5]#33 mv/V
E8_181_CH2 = rowSplitted[6]#34 mv/V
E8_181_CH3 = rowSplitted[7]#35 mv/V
E8_181_CH4 = rowSplitted[8]#36 mv/V
E8_181_CH5 = rowSplitted[9]#37 mv/V
E8_181_CH6 = rowSplitted[10]#38 mv/V
E8_181_CH7 = rowSplitted[11]#39 mv/V
E8_181_CH8 = rowSplitted[12]#40 mv/V
E8_182_CH1 = rowSplitted[13]#41
E8_182_CH2 = rowSplitted[14]#42
E8_182_CH3 = rowSplitted[15]#43
E8_182_CH4 = rowSplitted[16]#44
E8_182_CH5 = rowSplitted[17]#45 mv/V
E8_182_CH6 = rowSplitted[18]#46 mv/V
E8_182_CH7 = rowSplitted[19]#47 mv/V
E8_182_CH8 = rowSplitted[20]#48 mv/V
E8_183_CH1 = rowSplitted[21]#49
E8_183_CH2 = rowSplitted[22]#50
E8_183_CH3 = rowSplitted[23]#51
E8_183_CH4 = rowSplitted[24]#52
E8_183_CH5 = rowSplitted[25]#53 mv/V
E8_183_CH6 = rowSplitted[26]#54 mv/V
E8_183_CH7 = rowSplitted[27]#55 mv/V
E8_183_CH8 = rowSplitted[28]#56
E8_184_CH1 = rowSplitted[29]#57
E8_184_CH2 = rowSplitted[30]#58
E8_184_CH3 = rowSplitted[31]#59
E8_184_CH4 = rowSplitted[32]#60
E8_184_CH5 = rowSplitted[33]#61
E8_184_CH6 = rowSplitted[34]#62
E8_184_CH7 = rowSplitted[35]#63 mv/V
E8_184_CH8 = rowSplitted[36]#64 mv/V
an4 = rowSplitted[37]#V unit battery
#print(unit_name, tool_name, 33, E8_181_CH1)
#print(unit_name, tool_name, 34, E8_181_CH2)
#print(unit_name, tool_name, 35, E8_181_CH3)
#print(unit_name, tool_name, 36, E8_181_CH4)
#print(unit_name, tool_name, 37, E8_181_CH5)
#print(unit_name, tool_name, 38, E8_181_CH6)
#print(unit_name, tool_name, 39, E8_181_CH7)
#print(unit_name, tool_name, 40, E8_181_CH8)
#print(unit_name, tool_name, 41, E8_182_CH1)
#print(unit_name, tool_name, 42, E8_182_CH2)
#print(unit_name, tool_name, 43, E8_182_CH3)
#print(unit_name, tool_name, 44, E8_182_CH4)
#print(unit_name, tool_name, 45, E8_182_CH5)
#print(unit_name, tool_name, 46, E8_182_CH6)
#print(unit_name, tool_name, 47, E8_182_CH7)
#print(unit_name, tool_name, 48, E8_182_CH8)
#print(unit_name, tool_name, 49, E8_183_CH1)
#print(unit_name, tool_name, 50, E8_183_CH2)
#print(unit_name, tool_name, 51, E8_183_CH3)
#print(unit_name, tool_name, 52, E8_183_CH4)
#print(unit_name, tool_name, 53, E8_183_CH5)
#print(unit_name, tool_name, 54, E8_183_CH6)
#print(unit_name, tool_name, 55, E8_183_CH7)
#print(unit_name, tool_name, 56, E8_183_CH8)
#print(unit_name, tool_name, 57, E8_184_CH1)
#print(unit_name, tool_name, 58, E8_184_CH2)
#print(unit_name, tool_name, 59, E8_184_CH3)
#print(unit_name, tool_name, 60, E8_184_CH4)
#print(unit_name, tool_name, 61, E8_184_CH5)
#print(unit_name, tool_name, 62, E8_184_CH6)
#print(unit_name, tool_name, 63, E8_184_CH7)
#print(unit_name, tool_name, 64, E8_184_CH8)
#print(rowSplitted)
#---------------------------------------------------------------------------------------
dataToInsertRaw.append((unit_name, tool_name, 41, date, time, an4, -273, E8_182_CH1))
dataToInsertRaw.append((unit_name, tool_name, 42, date, time, an4, -273, E8_182_CH2))
dataToInsertRaw.append((unit_name, tool_name, 43, date, time, an4, -273, E8_182_CH3))
dataToInsertRaw.append((unit_name, tool_name, 44, date, time, an4, -273, E8_182_CH4))
dataToInsertRaw.append((unit_name, tool_name, 49, date, time, an4, -273, E8_183_CH1))
dataToInsertRaw.append((unit_name, tool_name, 50, date, time, an4, -273, E8_183_CH2))
dataToInsertRaw.append((unit_name, tool_name, 51, date, time, an4, -273, E8_183_CH3))
dataToInsertRaw.append((unit_name, tool_name, 52, date, time, an4, -273, E8_183_CH4))
dataToInsertRaw.append((unit_name, tool_name, 56, date, time, an4, -273, E8_183_CH8))
dataToInsertRaw.append((unit_name, tool_name, 57, date, time, an4, -273, E8_184_CH1))
dataToInsertRaw.append((unit_name, tool_name, 58, date, time, an4, -273, E8_184_CH2))
dataToInsertRaw.append((unit_name, tool_name, 59, date, time, an4, -273, E8_184_CH3))
dataToInsertRaw.append((unit_name, tool_name, 60, date, time, an4, -273, E8_184_CH4))
dataToInsertRaw.append((unit_name, tool_name, 61, date, time, an4, -273, E8_184_CH5))
dataToInsertRaw.append((unit_name, tool_name, 62, date, time, an4, -273, E8_184_CH6))
#---------------------------------------------------------------------------------------
dataToInsertElab.append((unit_name, tool_name, 41, date, time, E8_182_CH1))
dataToInsertElab.append((unit_name, tool_name, 42, date, time, E8_182_CH2))
dataToInsertElab.append((unit_name, tool_name, 43, date, time, E8_182_CH3))
dataToInsertElab.append((unit_name, tool_name, 44, date, time, E8_182_CH4))
dataToInsertElab.append((unit_name, tool_name, 49, date, time, E8_183_CH1))
dataToInsertElab.append((unit_name, tool_name, 50, date, time, E8_183_CH2))
dataToInsertElab.append((unit_name, tool_name, 51, date, time, E8_183_CH3))
dataToInsertElab.append((unit_name, tool_name, 52, date, time, E8_183_CH4))
dataToInsertElab.append((unit_name, tool_name, 56, date, time, E8_183_CH8))
dataToInsertElab.append((unit_name, tool_name, 57, date, time, E8_184_CH1))
dataToInsertElab.append((unit_name, tool_name, 58, date, time, E8_184_CH2))
dataToInsertElab.append((unit_name, tool_name, 59, date, time, E8_184_CH3))
dataToInsertElab.append((unit_name, tool_name, 60, date, time, E8_184_CH4))
dataToInsertElab.append((unit_name, tool_name, 61, date, time, E8_184_CH5))
dataToInsertElab.append((unit_name, tool_name, 62, date, time, E8_184_CH6))
#---------------------------------------------------------------------------------------
cursor.executemany(queryElab, dataToInsertElab)
cursor.executemany(queryRaw, dataToInsertRaw)
conn.commit()
#print(dataToInsertElab)
#print(dataToInsertRaw)
except Error as e:
print('Error:', e)
finally:
cursor.close()
conn.close()
except Exception as e:
print(f"An unexpected error occurred: {str(e)}\n")
def main():
getDataFromCsvAndInsert(sys.argv[1])
if __name__ == '__main__':
main()

171
src/old_scripts/vulinkScript.py Executable file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from datetime import datetime
import json
def checkBatteryLevel(db_conn, db_cursor, unit, date_time, battery_perc):
print(date_time, battery_perc)
if(float(battery_perc) < 25):#sotto il 25%
query = "select unit_name, date_time from alarms where unit_name=%s and date_time < %s and type_id=2 order by date_time desc limit 1"
db_cursor.execute(query, [unit, date_time])
result = db_cursor.fetchall()
if(len(result) > 0):
alarm_date_time = result[0]["date_time"]#datetime not str
format1 = "%Y-%m-%d %H:%M"
dt1 = datetime.strptime(date_time, format1)
time_difference = abs(dt1 - alarm_date_time)
if time_difference.total_seconds() > 24 * 60 * 60:
print("The difference is above 24 hours. Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
else:
print("Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
soglie = json.loads(soglie_str)
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
ph = soglia["data"]["ph"]
ph_uno = soglia["data"]["ph_uno"]
ph_due = soglia["data"]["ph_due"]
ph_tre = soglia["data"]["ph_tre"]
ph_uno_value = soglia["data"]["ph_uno_value"]
ph_due_value = soglia["data"]["ph_due_value"]
ph_tre_value = soglia["data"]["ph_tre_value"]
ph_uno_sms = soglia["data"]["ph_uno_sms"]
ph_due_sms = soglia["data"]["ph_due_sms"]
ph_tre_sms = soglia["data"]["ph_tre_sms"]
ph_uno_email = soglia["data"]["ph_uno_email"]
ph_due_email = soglia["data"]["ph_due_email"]
ph_tre_email = soglia["data"]["ph_tre_email"]
alert_uno = 0
alert_due = 0
alert_tre = 0
ph_value_prev = 0
#print(unit, tool, node_num, date_time)
query = "select XShift, EventDate, EventTime from ELABDATADISP where UnitName=%s and ToolNameID=%s and NodeNum=%s and concat(EventDate, ' ', EventTime) < %s order by concat(EventDate, ' ', EventTime) desc limit 1"
db_cursor.execute(query, [unit, tool, node_num, date_time])
resultPhPrev = db_cursor.fetchall()
if(len(resultPhPrev) > 0):
ph_value_prev = float(resultPhPrev[0]["XShift"])
#ph_value = random.uniform(7, 10)
print(tool, unit, node_num, date_time, ph_value)
#print(ph_value_prev, ph_value)
if(ph == 1):
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
if(ph_value_prev <= float(ph_tre_value)):
alert_tre = 1
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
if(ph_value_prev <= float(ph_due_value)):
alert_due = 1
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
if(ph_value_prev <= float(ph_uno_value)):
alert_uno = 1
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
if(alert_tre == 1):
print("level3",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
db_conn.commit()
elif(alert_due == 1):
print("level2",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
db_conn.commit()
elif(alert_uno == 1):
print("level1",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
db_conn.commit()
def getDataFromCsv(pathFile):
try:
folder_path, file_with_extension = os.path.split(pathFile)
file_name, _ = os.path.splitext(file_with_extension)#toolname
serial_number = file_name.split("_")[0]
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
query_nodes = "SELECT t.soglie, n.num as node_num, n.nodetype_id FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor(dictionary=True)
cursor.execute(query, [serial_number])
result = cursor.fetchall()
unit = result[0]["unit_name"]
tool = result[0]["tool_name"]
cursor.execute(query_node_depth, [unit, tool])
resultNode = cursor.fetchall()
cursor.execute(query_nodes, [unit, tool])
resultAllNodes = cursor.fetchall()
#print(resultAllNodes)
node_num_piezo = next((item for item in resultAllNodes if item.get('nodetype_id') == 2), None)["node_num"]
node_num_baro = next((item for item in resultAllNodes if item.get('nodetype_id') == 3), None)["node_num"]
node_num_conductivity = next((item for item in resultAllNodes if item.get('nodetype_id') == 94), None)["node_num"]
node_num_ph = next((item for item in resultAllNodes if item.get('nodetype_id') == 97), None)["node_num"]
#print(node_num_piezo, node_num_baro, node_num_conductivity, node_num_ph)
# 2 piezo
# 3 baro
# 94 conductivity
# 97 ph
node_depth = float(resultNode[0]["depth"]) #node piezo depth
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
for row in data:
row = row.split(",")
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
date_time = date_time.split(" ")
date = date_time[0]
time = date_time[1]
temperature_unit = float(row[2])
battery_perc = float(row[3])
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
conductivity = float(row[6])
ph = float(row[11])
temperature_piezo = float(row[14])
pressure = float(row[16])*1000
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_baro, date, time, battery_perc, temperature_unit, pressure_baro])
cursor.execute(queryInsElab, [unit, tool, node_num_baro, date, time, pressure_baro])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_conductivity, date, time, battery_perc, temperature_unit, conductivity])
cursor.execute(queryInsElab, [unit, tool, node_num_conductivity, date, time, conductivity])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_ph, date, time, battery_perc, temperature_unit, ph])
cursor.execute(queryInsElab, [unit, tool, node_num_ph, date, time, ph])
conn.commit()
checkSogliePh(conn, cursor, unit, tool, node_num_ph, date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_piezo, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
cursor.execute(queryInsElab, [unit, tool, node_num_piezo, date, time, temperature_piezo, depth, pressure])
conn.commit()
checkBatteryLevel(conn, cursor, unit, date_time[0]+" "+date_time[1], battery_perc)
except Error as e:
print('Error:', e)
def main():
getDataFromCsv(sys.argv[1])
if __name__ == '__main__':
main()

80
src/send_orchestrator.py Executable file
View File

@@ -0,0 +1,80 @@
#!.venv/bin/python
"""
Orchestratore dei worker che inviano i dati ai clienti
"""
# Import necessary libraries
import logging
import asyncio
# Import custom modules for configuration and database connection
from utils.config import loader_send_data as setting
from utils.database import WorkflowFlags
from utils.csv.loaders import get_next_csv_atomic
from utils.orchestrator_utils import run_orchestrator, worker_context
from utils.connect.send_data import process_workflow_record
from utils.general import alterna_valori
# from utils.ftp.send_data import ftp_send_elab_csv_to_customer, api_send_elab_csv_to_customer, \
# ftp_send_raw_csv_to_customer, api_send_raw_csv_to_customer
# Initialize the logger for this module
logger = logging.getLogger()
# Delay tra un processamento CSV e il successivo (in secondi)
ELAB_PROCESSING_DELAY = 0.2
# Tempo di attesa se non ci sono record da elaborare
NO_RECORD_SLEEP = 30
async def worker(worker_id: int, cfg: dict, pool: object) -> None:
"""Esegue il ciclo di lavoro per l'invio dei dati.
Il worker preleva un record dal database che indica dati pronti per
l'invio (sia raw che elaborati), li processa e attende prima di
iniziare un nuovo ciclo.
Args:
worker_id (int): L'ID univoco del worker.
cfg (dict): L'oggetto di configurazione.
pool (object): Il pool di connessioni al database.
"""
# Imposta il context per questo worker
worker_context.set(f"W{worker_id:02d}")
debug_mode = logging.getLogger().getEffectiveLevel() == logging.DEBUG
logger.info("Avviato")
alternatore = alterna_valori(
[WorkflowFlags.CSV_RECEIVED, WorkflowFlags.SENT_RAW_DATA],
[WorkflowFlags.DATA_ELABORATED, WorkflowFlags.SENT_ELAB_DATA],
)
while True:
try:
logger.info("Inizio elaborazione")
status, fase = next(alternatore)
record = await get_next_csv_atomic(pool, cfg.dbrectable, status, fase)
if record:
await process_workflow_record(record, fase, cfg, pool)
await asyncio.sleep(ELAB_PROCESSING_DELAY)
else:
logger.info("Nessun record disponibile")
await asyncio.sleep(NO_RECORD_SLEEP)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore durante l'esecuzione: %s", e, exc_info=debug_mode)
await asyncio.sleep(1)
async def main():
"""Funzione principale che avvia il send_orchestrator."""
await run_orchestrator(setting.Config, worker)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,3 @@
"""Config ini setting"""
from pathlib import Path
ENV_PARENT_PATH = Path(__file__).resolve().parent.parent.parent.parent

View File

@@ -0,0 +1,25 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/email.ini"])
# email setting
self.from_addr = c.get("address", "from")
self.to_addr = c.get("address", "to")
self.cc_addr = c.get("address", "cc")
self.bcc_addr = c.get("address", "bcc")
self.subject = c.get("msg", "subject")
self.body = c.get("msg", "body")
self.smtp_addr = c.get("smtp", "address")
self.smtp_port = c.getint("smtp", "port")
self.smtp_user = c.get("smtp", "user")
self.smtp_passwd = c.get("smtp", "password")

View File

@@ -2,14 +2,20 @@
""" """
from configparser import ConfigParser from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config: class Config:
def __init__(self): def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'ftp.ini' and 'db.ini' for FTP server, CSV, logging, and database.
"""
c = ConfigParser() c = ConfigParser()
c.read(["env/ftp.ini", "env/db.ini"]) c.read([f"{ENV_PARENT_PATH}/env/ftp.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# FTP setting # FTP setting
self.service_port = c.getint("ftpserver", "service_port")
self.firstport = c.getint("ftpserver", "firstPort") self.firstport = c.getint("ftpserver", "firstPort")
self.proxyaddr = c.get("ftpserver", "proxyAddr") self.proxyaddr = c.get("ftpserver", "proxyAddr")
self.portrangewidth = c.getint("ftpserver", "portRangeWidth") self.portrangewidth = c.getint("ftpserver", "portRangeWidth")
@@ -45,11 +51,28 @@ class Config:
# unit setting # unit setting
self.units_name = [part for part in c.get("unit", "Names").split('|')] self.units_name = [part for part in c.get("unit", "Names").split('|')]
self.units_type = [part for part in c.get("unit", "Types").split('|')] self.units_type = [part for part in c.get("unit", "Types").split('|')]
self.units_alias = {
key: value
for item in c.get("unit", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
#self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]} #self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]}
# tool setting # tool setting
self.tools_name = [part for part in c.get("tool", "Names").split('|')] self.tools_name = [part for part in c.get("tool", "Names").split('|')]
self.tools_type = [part for part in c.get("tool", "Types").split('|')] self.tools_type = [part for part in c.get("tool", "Types").split('|')]
self.tools_alias = {
key: key if value == '=' else value
for item in c.get("tool", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
# csv info # csv info
self.csv_infos = [part for part in c.get("csv", "Infos").split('|')] self.csv_infos = [part for part in c.get("csv", "Infos").split('|')]
# TS pini path match
self.ts_pini_path_match = {
key: key[1:-1] if value == '=' else value
for item in c.get("ts_pini", "path_match").split('|')
for key, value in [item.split(':', 1)]
}

View File

@@ -2,12 +2,17 @@
""" """
from configparser import ConfigParser from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config: class Config:
def __init__(self): def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'load.ini' and 'db.ini' for logging, worker, database, and table configurations.
"""
c = ConfigParser() c = ConfigParser()
c.read(["env/load.ini", "env/db.ini"]) c.read([f"{ENV_PARENT_PATH}/env/load.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting # LOG setting
self.logfilename = c.get("logging", "logFilename") self.logfilename = c.get("logging", "logFilename")
@@ -29,4 +34,3 @@ class Config:
self.dbrawdata = c.get("tables", "rawTableName") self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName") self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName") self.dbnodes = c.get("tables", "nodesTableName")

View File

@@ -2,12 +2,17 @@
""" """
from configparser import ConfigParser from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config: class Config:
def __init__(self): def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'elab.ini' and 'db.ini' for logging, worker, database, table, tool, and Matlab configurations.
"""
c = ConfigParser() c = ConfigParser()
c.read(["env/elab.ini", "env/db.ini"]) c.read([f"{ENV_PARENT_PATH}/env/elab.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting # LOG setting
self.logfilename = c.get("logging", "logFilename") self.logfilename = c.get("logging", "logFilename")

View File

@@ -0,0 +1,36 @@
"""set configurations
"""
from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config:
def __init__(self):
"""
Initializes the Config class by reading configuration files.
It loads settings from 'send.ini' and 'db.ini' for logging, worker, database, and table configurations.
"""
c = ConfigParser()
c.read([f"{ENV_PARENT_PATH}/env/send.ini", f"{ENV_PARENT_PATH}/env/db.ini"])
# LOG setting
self.logfilename = c.get("logging", "logFilename")
# Worker setting
self.max_threads = c.getint("threads", "max_num")
# DB setting
self.dbhost = c.get("db", "hostname")
self.dbport = c.getint("db", "port")
self.dbuser = c.get("db", "user")
self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries")
# Tables
self.dbusertable = c.get("tables", "userTableName")
self.dbrectable = c.get("tables", "recTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbrawdata = c.get("tables", "rawTableName")
self.dbnodes = c.get("tables", "nodesTableName")

View File

@@ -2,12 +2,16 @@
""" """
from configparser import ConfigParser from configparser import ConfigParser
from . import ENV_PARENT_PATH
class Config: class Config:
"""
Handles configuration loading for database settings to load ftp users.
"""
def __init__(self): def __init__(self):
c = ConfigParser() c = ConfigParser()
c.read(["env/db.ini"]) c.read([f"{ENV_PARENT_PATH}/env/db.ini"])
# DB setting # DB setting
self.dbhost = c.get("db", "hostname") self.dbhost = c.get("db", "hostname")
@@ -16,5 +20,3 @@ class Config:
self.dbpass = c.get("db", "password") self.dbpass = c.get("db", "password")
self.dbname = c.get("db", "dbName") self.dbname = c.get("db", "dbName")
self.max_retries = c.getint("db", "maxRetries") self.max_retries = c.getint("db", "maxRetries")

View File

@@ -0,0 +1,91 @@
import os
from datetime import datetime
import logging
import re
import mysql.connector
from utils.database.connection import connetti_db
from utils.csv.parser import extract_value
logger = logging.getLogger(__name__)
def on_file_received(self: object, file: str) -> None:
"""
Processes a received file, extracts relevant information, and inserts it into the database.
If the file is empty, it is removed. Otherwise, it extracts unit and tool
information from the filename and the first few lines of the CSV, handles
aliases, and then inserts the data into the configured database table.
Args:
file (str): The path to the received file."""
if not os.stat(file).st_size:
os.remove(file)
logger.info(f'File {file} is empty: removed.')
else:
cfg = self.cfg
path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
new_filename = f"{filename}_{timestamp}{fileExtension}"
os.rename(file, f"{path}/{new_filename}")
if (fileExtension.upper() in (cfg.fileext)):
with open(f"{path}/{new_filename}", 'r', encoding='utf-8', errors='ignore') as csvfile:
lines = csvfile.readlines()
unit_name = extract_value(cfg.units_name, filename, str(lines[0:10]))
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
tool_info = "{}"
# se esiste l'alias in alias_unit_type, allora prende il valore dell'alias... verifica sia lo unit_type completo che i primi 3 caratteri per CO_xxxxx
upper_unit_type = unit_type.upper()
unit_type = cfg.units_alias.get(upper_unit_type) or \
cfg.units_alias.get(upper_unit_type[:3]) or \
upper_unit_type
upper_tool_type = tool_type.upper()
tool_type = cfg.tools_alias.get(upper_tool_type) or \
cfg.tools_alias.get(upper_tool_type[:3]) or \
upper_tool_type
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
logger.error(f'{e}')
# Create a cursor
cur = conn.cursor()
# da estrarre in un modulo
if (unit_type.upper() == "ISI CSV LOG" and tool_type.upper() == "VULINK" ):
serial_number = filename.split('_')[0]
tool_info = f'{{"serial_number": {serial_number}}}'
try:
cur.execute(f"SELECT unit_name, tool_name FROM {cfg.dbname}.vulink_tools WHERE serial_number = '{serial_number}'")
unit_name, tool_name = cur.fetchone()
except Exception as e:
logger.warning(f'{tool_type} serial number {serial_number} not found in table vulink_tools. {e}')
# da estrarre in un modulo
if (unit_type.upper() == "STAZIONETOTALE" and tool_type.upper() == "INTEGRITY MONITOR" ):
escaped_keys = [re.escape(key) for key in cfg.ts_pini_path_match.keys()]
stazione = extract_value(escaped_keys, filename)
if stazione:
tool_info = f'{{"Stazione": "{cfg.ts_pini_path_match.get(stazione)}"}}'
try:
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (username, filename, unit_name, unit_type, tool_name, tool_type, tool_data, tool_info) VALUES (%s,%s, %s, %s, %s, %s, %s, %s)", (self.username, new_filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines), tool_info))
conn.commit()
conn.close()
except Exception as e:
logger.error(f'File {new_filename} not loaded. Held in user path.')
logger.error(f'{e}')
"""
else:
os.remove(file)
logger.info(f'File {new_filename} removed.')
"""

View File

@@ -0,0 +1,473 @@
from ftplib import FTP, FTP_TLS, all_errors
from io import BytesIO
import logging
import aiomysql
from datetime import datetime
from utils.database.loader_action import update_status, unlock
from utils.database.action_query import get_data_as_csv, get_tool_info, get_elab_timestamp
from utils.database import WorkflowFlags
logger = logging.getLogger(__name__)
class FTPConnection:
"""
Manages an FTP or FTP_TLS connection, providing a context manager for automatic disconnection.
"""
def __init__(self, host, port=21, use_tls=False, user='', passwd='',
passive=True, timeout=None, debug=0, context=None):
self.use_tls = use_tls
if use_tls:
self.ftp = FTP_TLS(context=context, timeout=timeout) if context else FTP_TLS(timeout=timeout)
else:
self.ftp = FTP(timeout=timeout)
if debug > 0:
self.ftp.set_debuglevel(debug)
self.ftp.connect(host, port)
self.ftp.login(user, passwd)
self.ftp.set_pasv(passive)
if use_tls:
self.ftp.prot_p()
def __getattr__(self, name):
"""Delega tutti i metodi non definiti all'oggetto FTP sottostante"""
return getattr(self.ftp, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.ftp.quit()
async def ftp_send_raw_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, pool: object) -> bool:
None
return True
async def ftp_send_elab_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, csv_data: str, pool: object) -> bool:
"""
Sends elaborated CSV data to a customer via FTP.
Retrieves FTP connection details from the database based on the unit name,
then establishes an FTP connection and uploads the CSV data.
Args:
cfg (dict): Configuration dictionary (not directly used in this function but passed for consistency).
id (int): The ID of the record being processed (used for logging).
unit (str): The name of the unit associated with the data.
tool (str): The name of the tool associated with the data.
csv_data (str): The CSV data as a string to be sent.
pool (object): The database connection pool.
Returns:
bool: True if the CSV data was sent successfully, False otherwise.
"""
query = """
select ftp_addrs, ftp_user, ftp_passwd, ftp_parm, ftp_filename, ftp_target, duedate from units
where name = '%s'";'
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
try:
await cur.execute(query, (unit,))
send_ftp_info = await cur.fetchone()
logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp")
except Exception as e:
logger.error(f"id {id} - {unit} - {tool} - errore nella query per invio ftp: {e}")
try:
# Converti in bytes
csv_bytes = csv_data.encode('utf-8')
csv_buffer = BytesIO(csv_bytes)
ftp_parms = await parse_ftp_parms(send_ftp_info["ftp_parm"])
use_tls = 'ssl_version' in ftp_parms
passive = ftp_parms.get('passive', True)
port = ftp_parms.get('port', 21)
# Connessione FTP
with FTPConnection(host=send_ftp_info["ftp_addrs"], port=port, use_tls=use_tls, user=send_ftp_info["ftp_user"], passwd=send_ftp_info["ftp_passwd"], passive=passive) as ftp:
# Cambia directory
if send_ftp_info["ftp_target"] != "/":
ftp.cwd(send_ftp_info["ftp_target"])
# Invia il file
result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer)
if result.startswith('226'):
logger.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo")
return True
else:
logger.error(f"Errore nell'invio: {result}")
return False
except all_errors as e:
logger.error(f"Errore FTP: {e}")
return False
except Exception as e:
logger.error(f"Errore generico: {e}")
return False
finally:
csv_buffer.close()
async def parse_ftp_parms(ftp_parms: str) -> dict:
"""
Parses a string of FTP parameters into a dictionary.
Args:
ftp_parms (str): A string containing key-value pairs separated by commas,
with keys and values separated by '=>'.
Returns:
dict: A dictionary where keys are parameter names (lowercase) and values are their parsed values.
"""
# Rimuovere spazi e dividere per virgola
pairs = ftp_parms.split(',')
result = {}
for pair in pairs:
if '=>' in pair:
key, value = pair.split('=>', 1)
key = key.strip().lower()
value = value.strip().lower()
# Convertire i valori appropriati
if value.isdigit():
value = int(value)
elif value == '':
value = None
result[key] = value
return result
async def process_workflow_record(record: tuple, fase: int, cfg: dict, pool: object):
"""
Elabora un singolo record del workflow in base alla fase specificata.
Args:
record: Tupla contenente i dati del record
fase: Fase corrente del workflow
cfg: Configurazione
pool: Pool di connessioni al database
"""
# Estrazione e normalizzazione dei dati del record
id, unit_type, tool_type, unit_name, tool_name = [
x.lower().replace(" ", "_") if isinstance(x, str) else x
for x in record
]
try:
# Recupero informazioni principali
tool_elab_info = await get_tool_info(fase, unit_name.upper(), tool_name.upper(), pool)
if tool_elab_info:
timestamp_matlab_elab = await get_elab_timestamp(id, pool)
# Verifica se il processing può essere eseguito
if not _should_process(tool_elab_info, timestamp_matlab_elab):
logger.info(f"id {id} - {unit_name} - {tool_name} {tool_elab_info['duedate']}: "
"invio dati non eseguito - due date raggiunta.")
await update_status(cfg, id, fase, pool)
return
# Routing basato sulla fase
success = await _route_by_phase(fase, tool_elab_info, cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if success:
await update_status(cfg, id, fase, pool)
else:
await update_status(cfg, id, fase, pool)
except Exception as e:
logger.error(f"Errore durante elaborazione id {id} - {unit_name} - {tool_name}: {e}")
raise
finally:
await unlock(cfg, id, pool)
def _should_process(tool_elab_info: dict, timestamp_matlab_elab: datetime) -> bool:
"""
Determines if a record should be processed based on its due date.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its due date.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
Returns:
bool: True if the record should be processed, False otherwise."""
"""Verifica se il record può essere processato basandosi sulla due date."""
duedate = tool_elab_info.get("duedate")
# Se non c'è duedate o è vuota/nulla, può essere processato
if not duedate or duedate in ('0000-00-00 00:00:00', ''):
return True
# Se timestamp_matlab_elab è None/null, usa il timestamp corrente
comparison_timestamp = timestamp_matlab_elab if timestamp_matlab_elab is not None else datetime.now()
# Converti duedate in datetime se è una stringa
if isinstance(duedate, str):
duedate = datetime.strptime(duedate, '%Y-%m-%d %H:%M:%S')
# Assicurati che comparison_timestamp sia datetime
if isinstance(comparison_timestamp, str):
comparison_timestamp = datetime.strptime(comparison_timestamp, '%Y-%m-%d %H:%M:%S')
return duedate > comparison_timestamp
async def _route_by_phase(fase: int, tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str,
timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Routes the processing of a workflow record based on the current phase.
This function acts as a dispatcher, calling the appropriate handler function
for sending elaborated data or raw data based on the `fase` (phase) parameter.
Args:
fase (int): The current phase of the workflow (e.g., WorkflowFlags.SENT_ELAB_DATA, WorkflowFlags.SENT_RAW_DATA).
tool_elab_info (dict): A dictionary containing information about the tool and its elaboration status.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
if fase == WorkflowFlags.SENT_ELAB_DATA:
return await _handle_elab_data_phase(tool_elab_info, cfg, id, unit_name,
tool_name, timestamp_matlab_elab, pool)
elif fase == WorkflowFlags.SENT_RAW_DATA:
return await _handle_raw_data_phase(tool_elab_info, cfg, id, unit_name,
tool_name, pool)
else:
logger.info(f"id {id} - {unit_name} - {tool_name}: nessuna azione da eseguire.")
return True
async def _handle_elab_data_phase(tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str,
timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Handles the phase of sending elaborated data.
This function checks if elaborated data needs to be sent via FTP or API
based on the `tool_elab_info` and calls the appropriate sending function.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its elaboration status,
including flags for FTP and API sending.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
# FTP send per dati elaborati
if tool_elab_info.get('ftp_send'):
return await _send_elab_data_ftp(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
# API send per dati elaborati
elif _should_send_elab_api(tool_elab_info):
return await _send_elab_data_api(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
return True
async def _handle_raw_data_phase(tool_elab_info: dict, cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Handles the phase of sending raw data.
This function checks if raw data needs to be sent via FTP or API
based on the `tool_elab_info` and calls the appropriate sending function.
Args:
tool_elab_info (dict): A dictionary containing information about the tool and its raw data sending status,
including flags for FTP and API sending.
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the data sending operation was successful or no action was needed, False otherwise.
"""
# FTP send per dati raw
if tool_elab_info.get('ftp_send_raw'):
return await _send_raw_data_ftp(cfg, id, unit_name, tool_name, pool)
# API send per dati raw
elif _should_send_raw_api(tool_elab_info):
return await _send_raw_data_api(cfg, id, unit_name, tool_name, pool)
return True
def _should_send_elab_api(tool_elab_info: dict) -> bool:
"""Verifica se i dati elaborati devono essere inviati via API."""
return (tool_elab_info.get('inoltro_api') and
tool_elab_info.get('api_send') and
tool_elab_info.get('inoltro_api_url', '').strip())
def _should_send_raw_api(tool_elab_info: dict) -> bool:
"""Verifica se i dati raw devono essere inviati via API."""
return (tool_elab_info.get('inoltro_api_raw') and
tool_elab_info.get('api_send_raw') and
tool_elab_info.get('inoltro_api_url_raw', '').strip())
async def _send_elab_data_ftp(cfg: dict, id: int, unit_name: str, tool_name: str, timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Sends elaborated data via FTP.
This function retrieves the elaborated CSV data and attempts to send it
to the customer via FTP. It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the FTP sending was successful, False otherwise.
"""
try:
elab_csv = await get_data_as_csv(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if not elab_csv:
return False
print(elab_csv)
# if await send_elab_csv_to_customer(cfg, id, unit_name, tool_name, elab_csv, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio FTP fallito.")
return False
except Exception as e:
logger.error(f"Errore invio FTP elab data id {id}: {e}")
return False
async def _send_elab_data_api(cfg: dict, id: int, unit_name: str, tool_name: str, timestamp_matlab_elab: datetime, pool: object) -> bool:
"""
Sends elaborated data via API.
This function retrieves the elaborated CSV data and attempts to send it
to the customer via an API. It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
timestamp_matlab_elab (datetime): The timestamp of the last MATLAB elaboration.
pool (object): The database connection pool.
Returns:
bool: True if the API sending was successful, False otherwise.
"""
try:
elab_csv = await get_data_as_csv(cfg, id, unit_name, tool_name,
timestamp_matlab_elab, pool)
if not elab_csv:
return False
print(elab_csv)
# if await send_elab_csv_to_customer(cfg, id, unit_name, tool_name, elab_csv, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio API fallito.")
return False
except Exception as e:
logger.error(f"Errore invio API elab data id {id}: {e}")
return False
async def _send_raw_data_ftp(cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Sends raw data via FTP.
This function attempts to send raw CSV data to the customer via FTP.
It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the FTP sending was successful, False otherwise.
"""
try:
# if await ftp_send_raw_csv_to_customer(cfg, id, unit_name, tool_name, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio FTP raw fallito.")
return False
except Exception as e:
logger.error(f"Errore invio FTP raw data id {id}: {e}")
return False
async def _send_raw_data_api(cfg: dict, id: int, unit_name: str, tool_name: str, pool: object) -> bool:
"""
Sends raw data via API.
This function attempts to send raw CSV data to the customer via an API.
It logs success or failure.
Args:
cfg (dict): The configuration dictionary.
id (int): The ID of the record being processed.
unit_name (str): The name of the unit associated with the data.
tool_name (str): The name of the tool associated with the data.
pool (object): The database connection pool.
Returns:
bool: True if the API sending was successful, False otherwise.
"""
try:
# if await api_send_raw_csv_to_customer(cfg, id, unit_name, tool_name, pool):
if True: # Placeholder per test
return True
else:
logger.error(f"id {id} - {unit_name} - {tool_name}: invio API raw fallito.")
return False
except Exception as e:
logger.error(f"Errore invio API raw data id {id}: {e}")
return False

View File

@@ -0,0 +1,47 @@
import smtplib
import logging
from email.message import EmailMessage
from utils.config import loader_email as setting
cfg = setting.Config()
logger = logging.getLogger(__name__)
async def send_error_email(unit_name: str, tool_name: str, matlab_cmd: str, matlab_error: str, errors: list, warnings: list) -> None:
"""
Sends an error email containing details about a MATLAB processing failure.
The email includes information about the unit, tool, MATLAB command, error message,
and lists of specific errors and warnings encountered.
Args:
unit_name (str): The name of the unit involved in the processing.
tool_name (str): The name of the tool involved in the processing.
matlab_cmd (str): The MATLAB command that was executed.
matlab_error (str): The main MATLAB error message.
errors (list): A list of detailed error messages from MATLAB.
warnings (list): A list of detailed warning messages from MATLAB.
"""
# Creazione dell'oggetto messaggio
msg = EmailMessage()
msg['Subject'] = cfg.subject
msg['From'] = cfg.from_addr
msg['To'] = cfg.to_addr
msg['Cc'] = cfg.cc_addr
msg['Bcc'] = cfg.bcc_addr
MatlabErrors = "<br/>".join(errors)
MatlabWarnings = "<br/>".join(dict.fromkeys(warnings))
# Imposta il contenuto del messaggio come HTML
msg.add_alternative(cfg.body.format(unit=unit_name, tool=tool_name, matlab_cmd=matlab_cmd, matlab_error=matlab_error,
MatlabErrors=MatlabErrors, MatlabWarnings=MatlabWarnings), subtype='html')
try:
# Connessione al server SMTP
with smtplib.SMTP(cfg.smtp_addr, cfg.smtp_port) as server:
server.starttls() # Avvia la crittografia TLS per una connessione sicura
server.login(cfg.smtp_user, cfg.smtp_passwd) # Autenticazione con il server
server.send_message(msg) # Invio dell'email
logger.info("Email inviata con successo!")
except Exception as e:
logger.error(f"Errore durante l'invio dell'email: {e}")

View File

@@ -41,14 +41,14 @@ def ftp_SITE_ADDU(self: object, line: str) -> None:
conn = connetti_db(cfg) conn = connetti_db(cfg)
except mysql.connector.Error as e: except mysql.connector.Error as e:
print(f"Error: {e}") print(f"Error: {e}")
logging.error(f'{e}') logger.error(f'{e}')
# Create a cursor # Create a cursor
cur = conn.cursor() cur = conn.cursor()
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')") cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbusertable} (ftpuser, hash, virtpath, perm) VALUES ('{user}', '{hash}', '{cfg.virtpath + user}', '{cfg.defperm}')")
conn.commit() conn.commit()
conn.close() conn.close()
logging.info(f"User {user} created.") logger.info(f"User {user} created.")
self.respond('200 SITE ADDU successful.') self.respond('200 SITE ADDU successful.')
except Exception as e: except Exception as e:
self.respond(f'501 SITE ADDU failed: {e}.') self.respond(f'501 SITE ADDU failed: {e}.')
@@ -72,7 +72,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None:
conn = connetti_db(cfg) conn = connetti_db(cfg)
except mysql.connector.Error as e: except mysql.connector.Error as e:
print(f"Error: {e}") print(f"Error: {e}")
logging.error(f'{e}') logger.error(f'{e}')
# Crea un cursore # Crea un cursore
cur = conn.cursor() cur = conn.cursor()
@@ -80,7 +80,7 @@ def ftp_SITE_DISU(self: object, line: str) -> None:
conn.commit() conn.commit()
conn.close() conn.close()
logging.info(f"User {user} deleted.") logger.info(f"User {user} deleted.")
self.respond('200 SITE DISU successful.') self.respond('200 SITE DISU successful.')
except Exception as e: except Exception as e:
self.respond('501 SITE DISU failed.') self.respond('501 SITE DISU failed.')
@@ -102,7 +102,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
conn = connetti_db(cfg) conn = connetti_db(cfg)
except mysql.connector.Error as e: except mysql.connector.Error as e:
print(f"Error: {e}") print(f"Error: {e}")
logging.error(f'{e}') logger.error(f'{e}')
# Crea un cursore # Crea un cursore
cur = conn.cursor() cur = conn.cursor()
@@ -110,7 +110,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'") cur.execute(f"UPDATE {cfg.dbname}.{cfg.dbusertable} SET disabled_at = null WHERE ftpuser = '{user}'")
conn.commit() conn.commit()
except Exception as e: except Exception as e:
logging.error(f"Update DB failed: {e}") logger.error(f"Update DB failed: {e}")
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'") cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE ftpuser = '{user}'")
@@ -123,7 +123,7 @@ def ftp_SITE_ENAU(self: object, line: str) -> None:
conn.close() conn.close()
logging.info(f"User {user} restored.") logger.info(f"User {user} restored.")
self.respond('200 SITE ENAU successful.') self.respond('200 SITE ENAU successful.')
except Exception as e: except Exception as e:
@@ -145,7 +145,7 @@ def ftp_SITE_LSTU(self: object, line: str) -> None:
conn = connetti_db(cfg) conn = connetti_db(cfg)
except mysql.connector.Error as e: except mysql.connector.Error as e:
print(f"Error: {e}") print(f"Error: {e}")
logging.error(f'{e}') logger.error(f'{e}')
# Crea un cursore # Crea un cursore
cur = conn.cursor() cur = conn.cursor()

View File

@@ -1,10 +1,12 @@
#!.venv/bin/python #!.venv/bin/python
from utils.database.nodes_query import get_nodes_type from utils.database.nodes_query import get_nodes_type
import utils.timestamp.date_check as date_check from utils.timestamp.date_check import normalizza_data, normalizza_orario
from utils.database.loader_action import find_nearest_timestamp
import logging import logging
import re import re
from itertools import islice from itertools import islice
from datetime import datetime, timedelta
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -21,10 +23,10 @@ async def get_data(cfg: object, id: int, pool: object) -> tuple:
""" """
async with pool.acquire() as conn: async with pool.acquire() as conn:
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.execute(f'select unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}') await cur.execute(f'select filename, unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}')
unit_name, tool_name, tool_data = await cur.fetchone() filename, unit_name, tool_name, tool_data = await cur.fetchone()
return unit_name, tool_name, tool_data return filename, unit_name, tool_name, tool_data
async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list: async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
""" """
@@ -37,10 +39,17 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]: """
Ciclo su tutte le righe del file CSV, escludendo quelle che:
non hanno il pattern ';|;' perché non sono dati ma è la header
che hanno il pattern 'No RX' perché sono letture non pervenute o in errore
che hanno il pattern '.-' perché sono letture con un numero errato - negativo dopo la virgola
che hanno il pattern 'File Creation' perché vuol dire che c'è stato un errore della centralina
"""
for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3) timestamp, batlevel, temperature, rilevazioni = riga.split(';',3)
EventDate, EventTime = timestamp.split(' ') EventDate, EventTime = timestamp.split(' ')
if batlevel == '|': if batlevel == '|':
@@ -54,7 +63,7 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;' valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;'
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1): for num_nodo, valori_nodo in enumerate(valori_nodi, start=1):
valori = valori_nodo.split(';') valori = valori_nodo.split(';')
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori)))) matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori return matrice_valori
@@ -69,8 +78,8 @@ async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
pattern = r'^(?:\d{4}\/\d{2}\/\d{2}|\d{2}\/\d{2}\/\d{4}) \d{2}:\d{2}:\d{2}(?:;\d+\.\d+){2}(?:;\d+){4}$' pattern = r'^(?:\d{4}\/\d{2}\/\d{2}|\d{2}\/\d{2}\/\d{4}) \d{2}:\d{2}:\d{2}(?:;\d+\.\d+){2}(?:;\d+){4}$'
@@ -80,13 +89,13 @@ async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
EventDate, EventTime = timestamp.split(' ') EventDate, EventTime = timestamp.split(' ')
if any(node_ains): if any(node_ains):
for node_num, analog_act in enumerate([analog_input1, analog_input2], start=1): for node_num, analog_act in enumerate([analog_input1, analog_input2], start=1):
matrice_valori.append([UnitName, ToolNameID, node_num, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + [analog_act] + ([None] * (19 - 1))) matrice_valori.append([UnitName, ToolNameID, node_num, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + [analog_act] + ([None] * (19 - 1)))
else: else:
logger.info(f"Nessun Ingresso analogico per {UnitName} {ToolNameID}") logger.info(f"Nessun Ingresso analogico per {UnitName} {ToolNameID}")
if any(node_dins): if any(node_dins):
start_node = 3 if any(node_ains) else 1 start_node = 3 if any(node_ains) else 1
for node_num, digital_act in enumerate([digital_input1, digital_input2], start=start_node): for node_num, digital_act in enumerate([digital_input1, digital_input2], start=start_node):
matrice_valori.append([UnitName, ToolNameID, node_num, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + [digital_act] + ([None] * (19 - 1))) matrice_valori.append([UnitName, ToolNameID, node_num, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + [digital_act] + ([None] * (19 - 1)))
else: else:
logger.info(f"Nessun Ingresso digitale per {UnitName} {ToolNameID}") logger.info(f"Nessun Ingresso digitale per {UnitName} {ToolNameID}")
@@ -103,11 +112,11 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]: for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3) timestamp, batlevel, temperature, rilevazioni = riga.replace(';|;',';').split(';',3)
EventDate, EventTime = timestamp.split(' ') EventDate, EventTime = timestamp.split(' ')
valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|'] valori_splitted = [valore for valore in rilevazioni.split(';') if valore != '|']
@@ -116,7 +125,7 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels] valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels]
for num_nodo, valori in enumerate(valori_nodi, start=1): for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori)))) matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori return matrice_valori
@@ -131,11 +140,11 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = get_nodes_type(cfg, ToolNameID, UnitName) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
for riga in [riga for riga in righe if ';|;' in riga]: for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2) timestamp, batlevel, rilevazioni = riga.replace(';|;',';').split(';',2)
if timestamp == '': if timestamp == '':
continue continue
@@ -148,7 +157,7 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels] valori_nodi = [list(islice(valori_iter, channels)) for channels in node_channels]
for num_nodo, valori in enumerate(valori_nodi, start=1): for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori)))) matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori return matrice_valori
@@ -164,7 +173,7 @@ async def make_tlp_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
valori_x_nodo = 2 valori_x_nodo = 2
matrice_valori = [] matrice_valori = []
@@ -175,7 +184,7 @@ async def make_tlp_matrix(cfg: object, id: int, pool: object) -> list:
lista_rilevazioni.append(barometer) lista_rilevazioni.append(barometer)
valori_nodi = [lista_rilevazioni[i:i + valori_x_nodo] for i in range(0, len(lista_rilevazioni), valori_x_nodo)] valori_nodi = [lista_rilevazioni[i:i + valori_x_nodo] for i in range(0, len(lista_rilevazioni), valori_x_nodo)]
for num_nodo, valori in enumerate(valori_nodi, start=1): for num_nodo, valori in enumerate(valori_nodi, start=1):
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori)))) matrice_valori.append([UnitName, ToolNameID, num_nodo, normalizza_data(EventDate), normalizza_orario(EventTime), batlevel, temperature] + valori + ([None] * (19 - len(valori))))
return matrice_valori return matrice_valori
@@ -191,20 +200,37 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
pattern = r'^-\d*dB$' pattern = r';-?\d+dB$'
for riga in [riga for riga in righe if ';|;' in riga]: for riga in [riga for riga in righe if ';|;' in riga and 'No RX' not in riga and '.-' not in riga and 'File Creation' not in riga and riga.isprintable()]:
timestamp, batlevel, temperature, rilevazioni = riga.split(';',3) timestamp, rilevazioni = riga.split(';|;',1)
EventDate, EventTime = timestamp.split(' ') EventDate, EventTime = timestamp.split(' ')
if batlevel == '|': #logger.debug(f"GD id {id}: {pattern} {rilevazioni}")
batlevel = temperature if re.search(pattern, rilevazioni):
temperature, rilevazioni = rilevazioni.split(';',1) if len(matrice_valori) == 0:
if re.match(pattern, rilevazioni): matrice_valori.append(['RSSI'])
valori_nodi = rilevazioni.lstrip('|;').rstrip(';').split(';|;') # Toglie '|;' iniziali, toglie eventuali ';' finali, dividi per ';|;' batlevel, temperature, rssi = rilevazioni.split(';')
for num_nodo, valori_nodo in enumerate(valori_nodi, start=1): #logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {rssi}")
valori = valori_nodo.split(';')
matrice_valori.append([UnitName, ToolNameID, num_nodo, date_check.conforma_data(EventDate), EventTime, batlevel, temperature] + valori + ([None] * (19 - len(valori)))) gd_timestamp = datetime.strptime(f"{normalizza_data(EventDate)} {normalizza_orario(EventTime)}", "%Y-%m-%d %H:%M:%S")
start_timestamp = gd_timestamp - timedelta(seconds=45)
end_timestamp = gd_timestamp + timedelta(seconds=45)
matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 1, f"{start_timestamp:%Y-%m-%d %H:%M:%S}", f"{end_timestamp:%Y-%m-%d %H:%M:%S}", f"{gd_timestamp:%Y-%m-%d %H:%M:%S}", batlevel, temperature, int(rssi[:-2])])
elif all(char == ';' for char in rilevazioni):
pass
elif ';|;' in rilevazioni:
unit_metrics, data = rilevazioni.split(';|;')
batlevel, temperature = unit_metrics.split(';')
#logger.debug(f"GD id {id}: {EventDate}, {EventTime}, {batlevel}, {temperature}, {data}")
dt_timestamp, dt_batlevel, dt_temperature = await find_nearest_timestamp(cfg, {"timestamp": f"{normalizza_data(EventDate)} {normalizza_orario(EventTime)}", "unit": UnitName, "tool": ToolNameID.replace("GD", "DT"), "node_num": 1}, pool)
EventDate, EventTime = dt_timestamp.strftime('%Y-%m-%d %H:%M:%S').split(' ')
valori = data.split(';')
matrice_valori.append([UnitName, ToolNameID.replace("GD", "DT"), 2, EventDate, EventTime, float(dt_batlevel), float(dt_temperature)] + valori + ([None] * (16 - len(valori))) + [batlevel, temperature, None])
else:
logger.warning(f"GD id {id}: dati non trattati - {rilevazioni}")
return matrice_valori return matrice_valori

141
src/utils/csv/loaders.py Normal file
View File

@@ -0,0 +1,141 @@
import asyncio
import tempfile
import os
from utils.database.loader_action import load_data, update_status, unlock
from utils.database import WorkflowFlags
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix, make_tlp_matrix, make_gd_matrix, make_musa_matrix, get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object, action: str) -> None:
"""
Main loader function to process CSV data based on the specified action.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record to process.
pool (object): The database connection pool.
action (str): The type of data processing to perform (e.g., "pipe_separator", "analogic_digital").
"""
type_matrix_mapping = {
"pipe_separator": make_pipe_sep_matrix,
"analogic_digital": make_ain_din_matrix,
"channels": make_channels_matrix,
"tlp": make_tlp_matrix,
"gd": make_gd_matrix,
"musa": make_musa_matrix
}
if action in type_matrix_mapping:
function_to_call = type_matrix_mapping[action]
# Create a matrix of values from the data
matrice_valori = await function_to_call(cfg, id, pool)
logger.info("matrice valori creata")
# Load the data into the database
if await load_data(cfg, matrice_valori, pool, type=action):
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await unlock(cfg, id, pool)
else:
logger.warning(f"Action '{action}' non riconosciuta.")
async def get_next_csv_atomic(pool: object, table_name: str, status: int, next_status: int) -> tuple:
"""
Retrieves the next available CSV record for processing in an atomic manner.
This function acquires a database connection from the pool, begins a transaction,
and attempts to select and lock a single record from the specified table that
matches the given status and has not yet reached the next_status. It uses
`SELECT FOR UPDATE SKIP LOCKED` to ensure atomicity and prevent other workers
from processing the same record concurrently.
Args:
pool (object): The database connection pool.
table_name (str): The name of the table to query.
status (int): The current status flag that the record must have.
next_status (int): The status flag that the record should NOT have yet.
Returns:
tuple: The next available received record if found, otherwise None.
"""
async with pool.acquire() as conn:
# IMPORTANTE: Disabilita autocommit per questa transazione
await conn.begin()
try:
async with conn.cursor() as cur:
# Usa SELECT FOR UPDATE per lock atomico
await cur.execute(f"""
SELECT id, unit_type, tool_type, unit_name, tool_name
FROM {table_name}
WHERE locked = 0
AND ((status & %s) > 0 OR %s = 0)
AND (status & %s) = 0
ORDER BY id
LIMIT 1
FOR UPDATE SKIP LOCKED
""", (status, status, next_status))
result = await cur.fetchone()
if result:
await cur.execute(f"""
UPDATE {table_name}
SET locked = 1
WHERE id = %s
""", (result[0],))
# Commit esplicito per rilasciare il lock
await conn.commit()
return result
except Exception as e:
# Rollback in caso di errore
await conn.rollback()
raise e
async def main_old_script_loader(cfg: object, id: int, pool: object, script_name: str) -> None:
"""
This function retrieves CSV data, writes it to a temporary file,
executes an external Python script to process it,
and then updates the workflow status in the database.
Args:
cfg (object): The configuration object.
id (int): The ID of the CSV record to process.
pool (object): The database connection pool.
script_name (str): The name of the script to execute (without the .py extension).
"""
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', f'old_scripts/{script_name}.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma {script_name}.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info(f"Programma {script_name}.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

26
src/utils/csv/parser.py Normal file
View File

@@ -0,0 +1,26 @@
import re
def extract_value(patterns: list, primary_source: str, secondary_source: str = None, default: str='Not Defined') -> str:
"""
Extracts a value from a given source (or sources) based on a list of regex patterns.
It iterates through the provided patterns and attempts to find a match in the
primary source first, then in the secondary source if provided. The first
successful match is returned. If no match is found after checking all sources
with all patterns, a default value is returned.
Args:
patterns (list): A list of regular expression strings to search for.
primary_source (str): The main string to search within.
secondary_source (str, optional): An additional string to search within if no match is found in the primary source. Defaults to None.
default (str, optional): The value to return if no match is found. Defaults to 'Not Defined'.
Returns:
str: The first matched value, or the default value if no match is found.
"""
for source in [source for source in (primary_source, secondary_source) if source is not None]:
for pattern in patterns:
matches = re.findall(pattern, source, re.IGNORECASE)
if matches:
return matches[0] # Return the first match immediately
return default # Return default if no matches are found

View File

@@ -0,0 +1,36 @@
class WorkflowFlags:
"""
Defines integer flags representing different stages in a data processing workflow.
Each flag is a power of 2, allowing them to be combined using bitwise operations
to represent multiple states simultaneously.
"""
CSV_RECEIVED = 0 # 0000
DATA_LOADED = 1 # 0001
START_ELAB = 2 # 0010
DATA_ELABORATED = 4 # 0100
SENT_RAW_DATA = 8 # 1000
SENT_ELAB_DATA = 16 # 10000
DUMMY_ELABORATED = 32 # 100000 (Used for testing or specific dummy elaborations)
# Mappatura flag -> colonna timestamp
FLAG_TO_TIMESTAMP = {
WorkflowFlags.CSV_RECEIVED: "inserted_at",
WorkflowFlags.DATA_LOADED: "loaded_at",
WorkflowFlags.START_ELAB: "start_elab_at",
WorkflowFlags.DATA_ELABORATED: "elaborated_at",
WorkflowFlags.SENT_RAW_DATA: "sent_raw_at",
WorkflowFlags.SENT_ELAB_DATA: "sent_elab_at",
WorkflowFlags.DUMMY_ELABORATED: "elaborated_at" # Shares the same timestamp column as DATA_ELABORATED
}
"""
A dictionary mapping each WorkflowFlag to the corresponding database column
name that stores the timestamp when that workflow stage was reached.
"""
# Dimensione degli split della matrice per il caricamento
BATCH_SIZE = 1000
"""
The number of records to process in a single batch when loading data into the database.
This helps manage memory usage and improve performance for large datasets.
"""

View File

@@ -0,0 +1,147 @@
import logging
import aiomysql
import csv
from io import StringIO
from utils.database import WorkflowFlags
logger = logging.getLogger(__name__)
sub_select = {
WorkflowFlags.DATA_ELABORATED:
"""m.matcall, s.`desc` AS statustools""",
WorkflowFlags.SENT_RAW_DATA:
"""t.ftp_send, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, s.`desc` AS statustools, IFNULL(u.duedate, "") AS duedate""",
WorkflowFlags.SENT_ELAB_DATA:
"""t.ftp_send_raw, IFNULL(u.ftp_mode_raw, "") AS ftp_mode_raw,
IFNULL(u.ftp_addrs_raw, "") AS ftp_addrs_raw, IFNULL(u.ftp_user_raw, "") AS ftp_user_raw,
IFNULL(u.ftp_passwd_raw, "") AS ftp_passwd_raw, IFNULL(u.ftp_filename_raw, "") AS ftp_filename_raw,
IFNULL(u.ftp_parm_raw, "") AS ftp_parm_raw, IFNULL(u.ftp_target_raw, "") AS ftp_target_raw,
t.unit_id, s.`desc` AS statustools, u.inoltro_ftp_raw, u.inoltro_api_raw,
IFNULL(u.inoltro_api_url_raw, "") AS inoltro_api_url_raw,
IFNULL(u.inoltro_api_bearer_token_raw, "") AS inoltro_api_bearer_token_raw,
t.api_send_raw, IFNULL(u.duedate, "") AS duedate
"""
}
async def get_tool_info(next_status: int, unit: str, tool: str, pool: object) -> tuple:
"""
Retrieves tool-specific information from the database based on the next workflow status,
unit name, and tool name.
This function dynamically selects columns based on the `next_status` provided,
joining `matfuncs`, `tools`, `units`, and `statustools` tables.
Args:
next_status (int): The next workflow status flag (e.g., WorkflowFlags.DATA_ELABORATED).
This determines which set of columns to select from the database.
unit (str): The name of the unit associated with the tool.
tool (str): The name of the tool.
pool (object): The database connection pool.
Returns:
tuple: A dictionary-like object (aiomysql.DictCursor result) containing the tool information,
or None if no information is found for the given unit and tool.
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
try:
await cur.execute(f"""
SELECT {sub_select[next_status]}
FROM matfuncs AS m
INNER JOIN tools AS t ON t.matfunc = m.id
INNER JOIN units AS u ON u.id = t.unit_id
INNER JOIN statustools AS s ON t.statustool_id = s.id
WHERE t.name = '{tool}' AND u.name = '{unit}';
""")
result = await cur.fetchone()
if not result:
logger.warning(f"{unit} - {tool}: Tool info not found.")
return None
else:
return result
except Exception as e:
logger.error(f"Error: {e}")
async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_timestamp: float, pool: object) -> str:
"""
Retrieves elaborated data from the database and formats it as a CSV string.
The query selects data from the `ElabDataView` based on `UnitName`, `ToolNameID`,
and a `updated_at` timestamp, then orders it. The first row of the CSV will be
the column headers.
Args:
cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency).
id_recv (int): The ID of the record being processed (used for logging).
pool (object): The database connection pool.
unit (str): The name of the unit to filter the data.
tool (str): The ID of the tool to filter the data.
matlab_timestamp (float): A timestamp used to filter data updated after this time.
Returns:
str: A string containing the elaborated data in CSV format.
"""
query = """
select * from (
select 'ToolNameID', 'EventDate', 'EventTime', 'NodeNum', 'NodeType', 'NodeDepth',
'XShift', 'YShift', 'ZShift' , 'X', 'Y', 'Z', 'HShift', 'HShiftDir', 'HShift_local',
'speed', 'speed_local', 'acceleration', 'acceleration_local', 'T_node', 'water_level', 'pressure', 'load_value', 'AlfaX', 'AlfaY', 'CalcErr'
union all
select ToolNameID, EventDate, EventTime, NodeNum, NodeType, NodeDepth,
XShift, YShift, ZShift , X, Y, Z, HShift, HShiftDir, HShift_local,
speed, speed_local, acceleration, acceleration_local, T_node, water_level, pressure, load_value, AlfaX, AlfaY, calcerr
from ElabDataView
where UnitName = %s and ToolNameID = %s and updated_at > %s
order by ToolNameID DESC, concat(EventDate, EventTime), convert(`NodeNum`, UNSIGNED INTEGER) DESC
) resulting_set
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(query, (unit, tool, matlab_timestamp))
results = await cur.fetchall()
logger.info(f"id {id_recv} - {unit} - {tool}: estratti i dati per invio CSV")
logger.info(f"Numero di righe estratte: {len(results)}")
# Creare CSV in memoria
output = StringIO()
writer = csv.writer(output, delimiter=",", lineterminator="\n", quoting=csv.QUOTE_MINIMAL)
for row in results:
writer.writerow(row)
csv_data = output.getvalue()
output.close()
return csv_data
except Exception as e:
logger.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}")
return None
async def get_elab_timestamp(id_recv: int, pool: object) -> float:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f"""SELECT start_elab_at from received where id = {id_recv}""")
results = await cur.fetchone()
return results[0]
except Exception as e:
logger.error(f"id {id_recv} - Errore nella query timestamp elaborazione: {e}")
return None
async def check_flag_elab(pool: object) -> None:
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute("SELECT stop_elab from admin_panel")
results = await cur.fetchone()
return results[0]
except Exception as e:
logger.error(f"Errore nella query check flag stop elaborazioni: {e}")
return None

View File

@@ -0,0 +1,232 @@
#!.venv/bin/python
import logging
import asyncio
from utils.database import FLAG_TO_TIMESTAMP, BATCH_SIZE
from datetime import datetime, timedelta
logger = logging.getLogger(__name__)
async def load_data(cfg: object, matrice_valori: list, pool: object, type: str) -> bool:
"""Carica una lista di record di dati grezzi nel database.
Esegue un'operazione di inserimento massivo (executemany) per caricare i dati.
Utilizza la clausola 'ON DUPLICATE KEY UPDATE' per aggiornare i record esistenti.
Implementa una logica di re-tentativo in caso di deadlock.
Args:
cfg (object): L'oggetto di configurazione contenente i nomi delle tabelle e i parametri di re-tentativo.
matrice_valori (list): Una lista di tuple, dove ogni tupla rappresenta una riga da inserire.
pool (object): Il pool di connessioni al database.
type (str): tipo di caricamento dati. Per GD fa l'update del tool DT corrispondente
Returns:
bool: True se il caricamento ha avuto successo, False altrimenti.
"""
if not matrice_valori:
logger.info("Nulla da caricare.")
return True
if type == "gd" and matrice_valori[0][0] == "RSSI":
matrice_valori.pop(0)
sql_load_RAWDATA = f"""
UPDATE {cfg.dbrawdata} t1
JOIN (
SELECT id
FROM {cfg.dbrawdata}
WHERE UnitName = %s AND ToolNameID = %s AND NodeNum = %s
AND TIMESTAMP(`EventDate`, `EventTime`) BETWEEN %s AND %s
ORDER BY ABS(TIMESTAMPDIFF(SECOND, TIMESTAMP(`EventDate`, `EventTime`), %s))
LIMIT 1
) t2 ON t1.id = t2.id
SET t1.BatLevelModule = %s, t1.TemperatureModule = %s, t1.RssiModule = %s
"""
else:
sql_load_RAWDATA = f"""
INSERT INTO {cfg.dbrawdata} (
`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,
`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,
`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`,
`BatLevelModule`,`TemperatureModule`, `RssiModule`
)
VALUES (
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s
) as new_data
ON DUPLICATE KEY UPDATE
`BatLevel` = IF({cfg.dbrawdata}.`BatLevel` != new_data.`BatLevel`, new_data.`BatLevel`, {cfg.dbrawdata}.`BatLevel`),
`Temperature` = IF({cfg.dbrawdata}.`Temperature` != new_data.Temperature, new_data.Temperature, {cfg.dbrawdata}.`Temperature`),
`Val0` = IF({cfg.dbrawdata}.`Val0` != new_data.Val0 AND new_data.`Val0` IS NOT NULL, new_data.Val0, {cfg.dbrawdata}.`Val0`),
`Val1` = IF({cfg.dbrawdata}.`Val1` != new_data.Val1 AND new_data.`Val1` IS NOT NULL, new_data.Val1, {cfg.dbrawdata}.`Val1`),
`Val2` = IF({cfg.dbrawdata}.`Val2` != new_data.Val2 AND new_data.`Val2` IS NOT NULL, new_data.Val2, {cfg.dbrawdata}.`Val2`),
`Val3` = IF({cfg.dbrawdata}.`Val3` != new_data.Val3 AND new_data.`Val3` IS NOT NULL, new_data.Val3, {cfg.dbrawdata}.`Val3`),
`Val4` = IF({cfg.dbrawdata}.`Val4` != new_data.Val4 AND new_data.`Val4` IS NOT NULL, new_data.Val4, {cfg.dbrawdata}.`Val4`),
`Val5` = IF({cfg.dbrawdata}.`Val5` != new_data.Val5 AND new_data.`Val5` IS NOT NULL, new_data.Val5, {cfg.dbrawdata}.`Val5`),
`Val6` = IF({cfg.dbrawdata}.`Val6` != new_data.Val6 AND new_data.`Val6` IS NOT NULL, new_data.Val6, {cfg.dbrawdata}.`Val6`),
`Val7` = IF({cfg.dbrawdata}.`Val7` != new_data.Val7 AND new_data.`Val7` IS NOT NULL, new_data.Val7, {cfg.dbrawdata}.`Val7`),
`Val8` = IF({cfg.dbrawdata}.`Val8` != new_data.Val8 AND new_data.`Val8` IS NOT NULL, new_data.Val8, {cfg.dbrawdata}.`Val8`),
`Val9` = IF({cfg.dbrawdata}.`Val9` != new_data.Val9 AND new_data.`Val9` IS NOT NULL, new_data.Val9, {cfg.dbrawdata}.`Val9`),
`ValA` = IF({cfg.dbrawdata}.`ValA` != new_data.ValA AND new_data.`ValA` IS NOT NULL, new_data.ValA, {cfg.dbrawdata}.`ValA`),
`ValB` = IF({cfg.dbrawdata}.`ValB` != new_data.ValB AND new_data.`ValB` IS NOT NULL, new_data.ValB, {cfg.dbrawdata}.`ValB`),
`ValC` = IF({cfg.dbrawdata}.`ValC` != new_data.ValC AND new_data.`ValC` IS NOT NULL, new_data.ValC, {cfg.dbrawdata}.`ValC`),
`ValD` = IF({cfg.dbrawdata}.`ValD` != new_data.ValD AND new_data.`ValD` IS NOT NULL, new_data.ValD, {cfg.dbrawdata}.`ValD`),
`ValE` = IF({cfg.dbrawdata}.`ValE` != new_data.ValE AND new_data.`ValE` IS NOT NULL, new_data.ValE, {cfg.dbrawdata}.`ValE`),
`ValF` = IF({cfg.dbrawdata}.`ValF` != new_data.ValF AND new_data.`ValF` IS NOT NULL, new_data.ValF, {cfg.dbrawdata}.`ValF`),
`BatLevelModule` = IF({cfg.dbrawdata}.`BatLevelModule` != new_data.BatLevelModule, new_data.BatLevelModule, {cfg.dbrawdata}.`BatLevelModule`),
`TemperatureModule` = IF({cfg.dbrawdata}.`TemperatureModule` != new_data.TemperatureModule, new_data.TemperatureModule, {cfg.dbrawdata}.`TemperatureModule`),
`RssiModule` = IF({cfg.dbrawdata}.`RssiModule` != new_data.RssiModule, new_data.RssiModule, {cfg.dbrawdata}.`RssiModule`),
`Created_at` = NOW()
"""
#logger.info(f"Query insert: {sql_load_RAWDATA}.")
#logger.info(f"Matrice valori da inserire: {matrice_valori}.")
rc = False
async with pool.acquire() as conn:
async with conn.cursor() as cur:
for attempt in range(cfg.max_retries):
try:
logger.info(f"Loading data attempt {attempt + 1}.")
for i in range(0, len(matrice_valori), BATCH_SIZE):
batch = matrice_valori[i:i + BATCH_SIZE]
await cur.executemany(sql_load_RAWDATA, batch)
await conn.commit()
logger.info(f"Completed batch {i//BATCH_SIZE + 1}/{(len(matrice_valori)-1)//BATCH_SIZE + 1}")
logger.info("Data loaded.")
rc = True
break
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}.")
# logger.error(f"Matrice valori da inserire: {batch}.")
if e.args[0] == 1213: # Deadlock detected
logger.warning(
f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}"
)
if attempt < cfg.max_retries - 1:
delay = 2 * attempt
await asyncio.sleep(delay)
continue
else:
logger.error("Max retry attempts reached for deadlock")
raise
return rc
async def update_status(cfg: object, id: int, status: str, pool: object) -> None:
"""Aggiorna lo stato di un record nella tabella dei record CSV.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da aggiornare.
status (int): Il nuovo stato da impostare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"""update {cfg.dbrectable} set
status = status | {status},
{FLAG_TO_TIMESTAMP[status]} = now()
where id = {id}
"""
)
await conn.commit()
logger.info(f"Status updated id {id}.")
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}")
async def unlock(cfg: object, id: int, pool: object) -> None:
"""Sblocca un record nella tabella dei record CSV.
Imposta il campo 'locked' a 0 per un dato ID.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da sbloccare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"update {cfg.dbrectable} set locked = 0 where id = {id}"
)
await conn.commit()
logger.info(f"id {id} unlocked.")
except Exception as e:
await conn.rollback()
logger.error(f"Error: {e}")
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple:
"""Recupera le informazioni per l'esecuzione di un comando Matlab dal database.
Args:
cfg (object): L'oggetto di configurazione.
unit (str): Il nome dell'unità.
tool (str): Il nome dello strumento.
pool (object): Il pool di connessioni al database.
Returns:
tuple: Una tupla contenente le informazioni del comando Matlab, o None in caso di errore.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate
from matfuncs as m
inner join tools as t on t.matfunc = m.id
inner join units as u on u.id = t.unit_id
inner join statustools as s on t.statustool_id = s.id
where t.name = "{tool}" and u.name = "{unit}"''')
return await cur.fetchone()
except Exception as e:
logger.error(f"Error: {e}")
async def find_nearest_timestamp(cfg: object, unit_tool_data: dict, pool: object) -> tuple:
"""
Finds the nearest timestamp in the raw data table based on a reference timestamp
and unit/tool/node information.
Args:
cfg (object): Configuration object containing database table name (`cfg.dbrawdata`).
unit_tool_data (dict): A dictionary containing:
- "timestamp" (str): The reference timestamp string in "%Y-%m-%d %H:%M:%S" format.
- "unit" (str): The UnitName to filter by.
- "tool" (str): The ToolNameID to filter by.
- "node_num" (int): The NodeNum to filter by.
pool (object): The database connection pool.
Returns:
tuple: A tuple containing the event timestamp, BatLevel, and Temperature of the
nearest record, or None if an error occurs or no record is found.
"""
ref_timestamp = datetime.strptime(unit_tool_data["timestamp"], "%Y-%m-%d %H:%M:%S")
start_timestamp = ref_timestamp - timedelta(seconds=45)
end_timestamp = ref_timestamp + timedelta(seconds=45)
logger.info(f"Find nearest timestamp: {ref_timestamp}")
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''SELECT TIMESTAMP(`EventDate`, `EventTime`) AS event_timestamp, BatLevel, Temperature
FROM {cfg.dbrawdata}
WHERE UnitName = "{unit_tool_data["unit"]}" AND ToolNameID = "{unit_tool_data["tool"]}" AND NodeNum = {unit_tool_data["node_num"]}
AND TIMESTAMP(`EventDate`, `EventTime`) BETWEEN "{start_timestamp}" AND "{end_timestamp}"
ORDER BY ABS(TIMESTAMPDIFF(SECOND, TIMESTAMP(`EventDate`, `EventTime`), "{ref_timestamp}"))
LIMIT 1
''')
return await cur.fetchone()
except Exception as e:
logger.error(f"Error: {e}")

78
src/utils/general.py Normal file
View File

@@ -0,0 +1,78 @@
import glob
import os
from itertools import cycle, chain
import logging
logger = logging.getLogger()
def alterna_valori(*valori: any, ping_pong: bool = False) -> any:
"""
Genera una sequenza ciclica di valori, con opzione per una sequenza "ping-pong".
Args:
*valori (any): Uno o più valori da ciclare.
ping_pong (bool, optional): Se True, la sequenza sarà valori -> valori al contrario.
Ad esempio, per (1, 2, 3) diventa 1, 2, 3, 2, 1, 2, 3, ...
Se False, la sequenza è semplicemente ciclica.
Defaults to False.
Yields:
any: Il prossimo valore nella sequenza ciclica.
"""
if not valori:
return
if ping_pong:
# Crea la sequenza ping-pong: valori + valori al contrario (senza ripetere primo e ultimo)
forward = valori
backward = valori[-2:0:-1] # Esclude ultimo e primo elemento
ping_pong_sequence = chain(forward, backward)
yield from cycle(ping_pong_sequence)
else:
yield from cycle(valori)
async def read_error_lines_from_logs(base_path: str, pattern: str) -> tuple[list[str], list[str]]:
"""
Reads error and warning lines from log files matching a given pattern within a base path.
This asynchronous function searches for log files, reads their content, and categorizes
lines starting with 'Error' as errors and all other non-empty lines as warnings.
Args:
base_path (str): The base directory where log files are located.
pattern (str): The glob-style pattern to match log filenames (e.g., "*.txt", "prefix_*_output_error.txt").
Returns:
tuple[list[str], list[str]]: A tuple containing two lists:
- The first list contains all extracted error messages.
- The second list contains all extracted warning messages."""
# Costruisce il path completo con il pattern
search_pattern = os.path.join(base_path, pattern)
# Trova tutti i file che corrispondono al pattern
matching_files = glob.glob(search_pattern)
if not matching_files:
logger.warning(f"Nessun file trovato per il pattern: {search_pattern}")
return [], []
errors = []
warnings = []
for file_path in matching_files:
try:
with open(file_path, 'r', encoding='utf-8') as file:
lines = file.readlines()
# Usando dict.fromkeys() per mantenere l'ordine e togliere le righe duplicate per i warnings
non_empty_lines = [line.strip() for line in lines if line.strip()]
errors = [line for line in non_empty_lines if line.startswith('Error')]
warnings = list(dict.fromkeys(line for line in non_empty_lines if not line.startswith('Error')))
except Exception as e:
logger.error(f"Errore durante la lettura del file {file_path}: {e}")
return errors, warnings

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as hirpinia_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'hirpinia_hirpinia'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "hirpiniaLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await hirpinia_main_loader(cfg, id, pool, "hirpiniaLoadScript")

View File

@@ -1,6 +1,7 @@
from utils.csv.loaders import main_loader as pipe_sep_main_loader from utils.csv.loaders import main_loader as pipe_sep_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None: async def main_loader(cfg: object, id: int, pool: object) -> None:
""" """
Carica ed elabora i dati CSV specifici per il tipo 'hortus_hortus'. Carica ed elabora i dati CSV specifici per il tipo 'hortus_hortus'.

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as vulink_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'isi_csv_log_vulink'.
Questa funzione è un wrapper per `vulink_main_loader` e passa il nome
dello script di elaborazione come "vulinkScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await vulink_main_loader(cfg, id, pool, "vulinkScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sisgeo_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sisgeo_health'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "sisgeoLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sisgeo_main_loader(cfg, id, pool, "sisgeoLoadScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sisgeo_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sisgeo_readings'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "sisgeoLoadScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sisgeo_main_loader(cfg, id, pool, "sisgeoLoadScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as sorotecPini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'sorotecpini_co'.
Questa funzione è un wrapper per `sorotecPini_main_loader` e passa il nome
dello script di elaborazione come "sorotecPini".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await sorotecPini_main_loader(cfg, id, pool, "sorotecPini")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as ts_pini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'stazionetotale_integrity_monitor'.
Questa funzione è un wrapper per `main_old_script_loader` e passa il nome
dello script di elaborazione come "TS_PiniScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await ts_pini_main_loader(cfg, id, pool, "TS_PiniScript")

View File

@@ -0,0 +1,16 @@
from utils.csv.loaders import main_old_script_loader as ts_pini_main_loader
async def main_loader(cfg: object, id: int, pool: object) -> None:
"""
Carica ed elabora i dati CSV specifici per il tipo 'stazionetotale_messpunktepini'.
Questa funzione è un wrapper per `ts_pini_main_loader` e passa il nome
dello script di elaborazione come "TS_PiniScript".
Args:
cfg (object): L'oggetto di configurazione.
id (int): L'ID del record CSV da elaborare.
pool (object): Il pool di connessioni al database.
"""
await ts_pini_main_loader(cfg, id, pool, "TS_PiniScript")

View File

@@ -0,0 +1,37 @@
from datetime import datetime
def normalizza_data(data_string: str)->str:
"""
Normalizza una stringa di data al formato YYYY-MM-DD, provando diversi formati di input.
Args:
data_string (str): La stringa di data da normalizzare.
Returns:
str: La data normalizzata nel formato YYYY-MM-DD,
o None se la stringa non può essere interpretata come una data.
"""
formato_desiderato = "%Y-%m-%d"
formati_input = ["%Y/%m/%d", "%Y-%m-%d", "%d-%m-%Y","%d/%m/%Y", ] # Ordine importante: prova prima il più probabile
for formato_input in formati_input:
try:
data_oggetto = datetime.strptime(data_string, formato_input)
return data_oggetto.strftime(formato_desiderato)
except ValueError:
continue # Prova il formato successivo se quello attuale fallisce
return None # Se nessun formato ha avuto successo
def normalizza_orario(orario_str):
try:
# Prova prima con HH:MM:SS
dt = datetime.strptime(orario_str, "%H:%M:%S")
return dt.strftime("%H:%M:%S")
except ValueError:
try:
# Se fallisce, prova con HH:MM
dt = datetime.strptime(orario_str, "%H:%M")
return dt.strftime("%H:%M:%S")
except ValueError:
return orario_str # Restituisce originale se non parsabile

View File

@@ -1 +0,0 @@
"""Config ini setting"""

View File

@@ -1,74 +0,0 @@
from utils.database.loader_action import load_data, update_status, unlock
from utils.database import DATA_LOADED
from utils.csv.data_preparation import make_pipe_sep_matrix, make_ain_din_matrix, make_channels_matrix, make_tlp_matrix, make_gd_matrix, make_musa_matrix
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object, action: str) -> None:
"""
Main loader function to process CSV data based on the specified action.
Args:
cfg (object): Configuration object.
id (int): The ID of the CSV record to process.
pool (object): The database connection pool.
action (str): The type of data processing to perform (e.g., "pipe_separator", "analogic_digital").
"""
type_matrix_mapping = {
"pipe_separator": make_pipe_sep_matrix,
"analogic_digital": make_ain_din_matrix,
"channels": make_channels_matrix,
"tlp": make_tlp_matrix,
"gd": make_gd_matrix,
"musa": make_musa_matrix
}
if action in type_matrix_mapping:
function_to_call = type_matrix_mapping[action]
# Create a matrix of values from the data
matrice_valori = await function_to_call(cfg, id, pool)
logger.info("matrice valori creata")
# Load the data into the database
if await load_data(cfg, matrice_valori, pool):
await update_status(cfg, id, DATA_LOADED, pool)
await unlock(cfg, id, pool)
else:
logger.warning(f"Action '{action}' non riconosciuta.")
async def get_next_csv_atomic(pool, table_name, status):
"""Preleva atomicamente il prossimo CSV da elaborare"""
async with pool.acquire() as conn:
# IMPORTANTE: Disabilita autocommit per questa transazione
await conn.begin()
try:
async with conn.cursor() as cur:
# Usa SELECT FOR UPDATE per lock atomico
await cur.execute(f"""
SELECT id, unit_type, tool_type, unit_name, tool_name
FROM {table_name}
WHERE locked = 0 AND status = %s
ORDER BY id
LIMIT 1
FOR UPDATE SKIP LOCKED
""", (status,))
result = await cur.fetchone()
if result:
await cur.execute(f"""
UPDATE {table_name}
SET locked = 1
WHERE id = %s
""", (result[0],))
# Commit esplicito per rilasciare il lock
await conn.commit()
return result
except Exception as e:
# Rollback in caso di errore
await conn.rollback()
raise e

View File

@@ -1,10 +0,0 @@
import re
def extract_value(patterns: list, primary_source: str, secondary_source: str, default='Not Defined') -> str:
for source in (primary_source, secondary_source):
for pattern in patterns:
matches = re.findall(pattern, source, re.IGNORECASE)
if matches:
return matches[0] # Return the first match immediately
return default # Return default if no matches are found

View File

@@ -1,4 +0,0 @@
CSV_RECEIVED = 0
DATA_LOADED = 1
DATA_ELABORATED = 2
DATA_SENT = 3

View File

@@ -1,60 +0,0 @@
import csv
from io import StringIO
import logging
logger = logging.getLogger(__name__)
async def get_data_as_csv(cfg: dict, id_recv: int, unit: str, tool: str, matlab_timestamp: float, pool: object) -> str:
"""
Retrieves elaborated data from the database and formats it as a CSV string.
The query selects data from the `ElabDataView` based on `UnitName`, `ToolNameID`,
and a `updated_at` timestamp, then orders it. The first row of the CSV will be
the column headers.
Args:
cfg (dict): Configuration dictionary (not directly used in the query but passed for consistency).
id (int): The ID of the record being processed (used for logging).
pool (object): The database connection pool.
unit (str): The name of the unit to filter the data.
tool (str): The ID of the tool to filter the data.
matlab_timestamp (float): A timestamp used to filter data updated after this time.
Returns:
str: A string containing the elaborated data in CSV format.
"""
query = """
select * from (
select 'ToolNameID', 'EventDate', 'EventTime', 'NodeNum', 'NodeType', 'NodeDepth',
'XShift', 'YShift', 'ZShift' , 'X', 'Y', 'Z', 'HShift', 'HShiftDir', 'HShift_local',
'speed', 'speed_local', 'acceleration', 'acceleration_local', 'T_node', 'water_level', 'pressure', 'load_value', 'AlfaX', 'AlfaY', 'CalcErr'
union all
select ToolNameID, EventDate, EventTime, NodeNum, NodeType, NodeDepth,
XShift, YShift, ZShift , X, Y, Z, HShift, HShiftDir, HShift_local,
speed, speed_local, acceleration, acceleration_local, T_node, water_level, pressure, load_value, AlfaX, AlfaY, calcerr
from ElabDataView
where UnitName = %s and ToolNameID = %s and updated_at > %s
order by ToolNameID DESC, concat(EventDate, EventTime), convert(`NodeNum`, UNSIGNED INTEGER) DESC
) resulting_set
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(query, (unit, tool, matlab_timestamp))
results = await cur.fetchall()
logger.info(f"id {id_recv} - {unit} - {tool}: estratti i dati per invio CSV")
logger.info(f"Numero di righe estratte: {len(results)}")
# Creare CSV in memoria
output = StringIO()
writer = csv.writer(output, delimiter=";", lineterminator="\n", quoting=csv.QUOTE_MINIMAL)
for row in results:
writer.writerow(row)
csv_data = output.getvalue()
output.close()
return csv_data
except Exception as e:
logging.error(f"id {id_recv} - {unit} - {tool} - errore nel query creazione csv: {e}")
return None

View File

@@ -1,164 +0,0 @@
#!.venv/bin/python
import logging
import asyncio
logger = logging.getLogger(__name__)
timestamp_cols = ["inserted_at", "loaded_at", "elaborated_at", "sent_at"]
async def load_data(cfg: object, matrice_valori: list, pool: object) -> bool:
"""Carica una lista di record di dati grezzi nel database.
Esegue un'operazione di inserimento massivo (executemany) per caricare i dati.
Utilizza la clausola 'ON DUPLICATE KEY UPDATE' per aggiornare i record esistenti.
Implementa una logica di re-tentativo in caso di deadlock.
Args:
cfg (object): L'oggetto di configurazione contenente i nomi delle tabelle e i parametri di re-tentativo.
matrice_valori (list): Una lista di tuple, dove ogni tupla rappresenta una riga da inserire.
pool (object): Il pool di connessioni al database.
Returns:
bool: True se il caricamento ha avuto successo, False altrimenti.
"""
if not matrice_valori:
logger.info("Nulla da caricare.")
return True
sql_insert_RAWDATA = f"""
INSERT INTO {cfg.dbrawdata} (
`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,
`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,
`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`,
`BatLevelModule`,`TemperatureModule`, `RssiModule`
)
VALUES (
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s
) as new_data
ON DUPLICATE KEY UPDATE
`BatLevel` = IF({cfg.dbrawdata}.`BatLevel` != new_data.`BatLevel`, new_data.`BatLevel`, {cfg.dbrawdata}.`BatLevel`),
`Temperature` = IF({cfg.dbrawdata}.`Temperature` != new_data.Temperature, new_data.Temperature, {cfg.dbrawdata}.`Temperature`),
`Val0` = IF({cfg.dbrawdata}.`Val0` != new_data.Val0 AND new_data.`Val0` IS NOT NULL, new_data.Val0, {cfg.dbrawdata}.`Val0`),
`Val1` = IF({cfg.dbrawdata}.`Val1` != new_data.Val1 AND new_data.`Val1` IS NOT NULL, new_data.Val1, {cfg.dbrawdata}.`Val1`),
`Val2` = IF({cfg.dbrawdata}.`Val2` != new_data.Val2 AND new_data.`Val2` IS NOT NULL, new_data.Val2, {cfg.dbrawdata}.`Val2`),
`Val3` = IF({cfg.dbrawdata}.`Val3` != new_data.Val3 AND new_data.`Val3` IS NOT NULL, new_data.Val3, {cfg.dbrawdata}.`Val3`),
`Val4` = IF({cfg.dbrawdata}.`Val4` != new_data.Val4 AND new_data.`Val4` IS NOT NULL, new_data.Val4, {cfg.dbrawdata}.`Val4`),
`Val5` = IF({cfg.dbrawdata}.`Val5` != new_data.Val5 AND new_data.`Val5` IS NOT NULL, new_data.Val5, {cfg.dbrawdata}.`Val5`),
`Val6` = IF({cfg.dbrawdata}.`Val6` != new_data.Val6 AND new_data.`Val6` IS NOT NULL, new_data.Val6, {cfg.dbrawdata}.`Val6`),
`Val7` = IF({cfg.dbrawdata}.`Val7` != new_data.Val7 AND new_data.`Val7` IS NOT NULL, new_data.Val7, {cfg.dbrawdata}.`Val7`),
`Val8` = IF({cfg.dbrawdata}.`Val8` != new_data.Val8 AND new_data.`Val8` IS NOT NULL, new_data.Val8, {cfg.dbrawdata}.`Val8`),
`Val9` = IF({cfg.dbrawdata}.`Val9` != new_data.Val9 AND new_data.`Val9` IS NOT NULL, new_data.Val9, {cfg.dbrawdata}.`Val9`),
`ValA` = IF({cfg.dbrawdata}.`ValA` != new_data.ValA AND new_data.`ValA` IS NOT NULL, new_data.ValA, {cfg.dbrawdata}.`ValA`),
`ValB` = IF({cfg.dbrawdata}.`ValB` != new_data.ValB AND new_data.`ValB` IS NOT NULL, new_data.ValB, {cfg.dbrawdata}.`ValB`),
`ValC` = IF({cfg.dbrawdata}.`ValC` != new_data.ValC AND new_data.`ValC` IS NOT NULL, new_data.ValC, {cfg.dbrawdata}.`ValC`),
`ValD` = IF({cfg.dbrawdata}.`ValD` != new_data.ValD AND new_data.`ValD` IS NOT NULL, new_data.ValD, {cfg.dbrawdata}.`ValD`),
`ValE` = IF({cfg.dbrawdata}.`ValE` != new_data.ValE AND new_data.`ValE` IS NOT NULL, new_data.ValE, {cfg.dbrawdata}.`ValE`),
`ValF` = IF({cfg.dbrawdata}.`ValF` != new_data.ValF AND new_data.`ValF` IS NOT NULL, new_data.ValF, {cfg.dbrawdata}.`ValF`),
`BatLevelModule` = IF({cfg.dbrawdata}.`BatLevelModule` != new_data.BatLevelModule, new_data.BatLevelModule, {cfg.dbrawdata}.`BatLevelModule`),
`TemperatureModule` = IF({cfg.dbrawdata}.`TemperatureModule` != new_data.TemperatureModule, new_data.TemperatureModule, {cfg.dbrawdata}.`TemperatureModule`),
`RssiModule` = IF({cfg.dbrawdata}.`RssiModule` != new_data.RssiModule, new_data.RssiModule, {cfg.dbrawdata}.`RssiModule`),
`Created_at` = NOW()
"""
rc = False
async with pool.acquire() as conn:
async with conn.cursor() as cur:
for attempt in range(cfg.max_retries):
try:
logging.info(f"Loading data attempt {attempt + 1}.")
await cur.executemany(sql_insert_RAWDATA, matrice_valori)
await conn.commit()
logging.info("Data loaded.")
rc = True
break
except Exception as e:
await conn.rollback()
logging.error(f"Error: {e}.")
if e.args[0] == 1213: # Deadlock detected
logging.warning(
f"Deadlock detected, attempt {attempt + 1}/{cfg.max_retries}"
)
if attempt < cfg.max_retries - 1:
delay = 2 * attempt
await asyncio.sleep(delay)
continue
else:
logging.error("Max retry attempts reached for deadlock")
raise
return rc
async def update_status(cfg: object, id: int, status: int, pool: object) -> None:
"""Aggiorna lo stato di un record nella tabella dei record CSV.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da aggiornare.
status (int): Il nuovo stato da impostare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"update {cfg.dbrectable} set status = {status}, {timestamp_cols[status]} = now() where id = {id}"
)
await conn.commit()
logging.info(f"Status updated id {id}.")
except Exception as e:
await conn.rollback()
logging.error(f"Error: {e}")
async def unlock(cfg: object, id: int, pool: object) -> None:
"""Sblocca un record nella tabella dei record CSV.
Imposta il campo 'locked' a 0 per un dato ID.
Args:
cfg (object): L'oggetto di configurazione contenente il nome della tabella.
id (int): L'ID del record da sbloccare.
pool (object): Il pool di connessioni al database.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(
f"update {cfg.dbrectable} set locked = 0 where id = {id}"
)
await conn.commit()
logging.info(f"id {id} unlocked.")
except Exception as e:
await conn.rollback()
logging.error(f"Error: {e}")
async def get_matlab_cmd(cfg: object, unit: str, tool: str, pool: object) -> tuple:
"""Recupera le informazioni per l'esecuzione di un comando Matlab dal database.
Args:
cfg (object): L'oggetto di configurazione.
unit (str): Il nome dell'unità.
tool (str): Il nome dello strumento.
pool (object): Il pool di connessioni al database.
Returns:
tuple: Una tupla contenente le informazioni del comando Matlab, o None in caso di errore.
"""
async with pool.acquire() as conn:
async with conn.cursor() as cur:
try:
await cur.execute(f'''select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate
from matfuncs as m
inner join tools as t on t.matfunc = m.id
inner join units as u on u.id = t.unit_id
inner join statustools as s on t.statustool_id = s.id
where t.name = "{tool}" and u.name = "{unit}"''')
return cur.fetchone()
except Exception as e:
logging.error(f"Error: {e}")

View File

@@ -1,39 +0,0 @@
import logging
import aiomysql
logger = logging.getLogger(__name__)
async def get_matlab_command(cfg: object, tool: str, unit: str, pool: object) -> tuple:
"""Recupera le informazioni per l'esecuzione di un comando Matlab dal database.
Interroga il database per ottenere i dettagli necessari all'avvio di uno script
Matlab, basandosi sul nome dello strumento (tool) e dell'unità (unit).
Args:
cfg (object): L'oggetto di configurazione.
tool (str): Il nome dello strumento.
unit (str): Il nome dell'unità.
pool (object): Il pool di connessioni al database.
Returns:
tuple: Una tupla contenente le informazioni del comando Matlab,
o None se non viene trovato alcun comando.
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
await cur.execute(f"""
SELECT m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate from matfuncs as m
INNER JOIN tools as t on t.matfunc = m.id
INNER JOIN units as u on u.id = t.unit_id
INNER JOIN statustools as s on t.statustool_id = s.id
where t.name = '{tool}' AND u.name = '{unit}';
""")
result = await cur.fetchone()
if not result:
logger.error(f"{unit} - {tool}: Matlab command not found.")
return None
else:
return result

View File

@@ -1,139 +0,0 @@
from ftplib import FTP, FTP_TLS, all_errors
from io import BytesIO
import logging
import aiomysql
logger = logging.getLogger(__name__)
class FTPConnection:
"""
Manages an FTP or FTP_TLS connection, providing a context manager for automatic disconnection.
"""
def __init__(self, host, port=21, use_tls=False, user='', passwd='',
passive=True, timeout=None, debug=0, context=None):
self.use_tls = use_tls
if use_tls:
self.ftp = FTP_TLS(context=context, timeout=timeout) if context else FTP_TLS(timeout=timeout)
else:
self.ftp = FTP(timeout=timeout)
if debug > 0:
self.ftp.set_debuglevel(debug)
self.ftp.connect(host, port)
self.ftp.login(user, passwd)
self.ftp.set_pasv(passive)
if use_tls:
self.ftp.prot_p()
def __getattr__(self, name):
"""Delega tutti i metodi non definiti all'oggetto FTP sottostante"""
return getattr(self.ftp, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.ftp.quit()
async def send_csv_to_customer(cfg: dict, id: int, unit: str, tool: str, csv_data: str, pool: object) -> bool:
"""
Sends elaborated CSV data to a customer via FTP.
Retrieves FTP connection details from the database based on the unit name,
then establishes an FTP connection and uploads the CSV data.
Args:
cfg (dict): Configuration dictionary (not directly used in this function but passed for consistency).
id (int): The ID of the record being processed (used for logging).
unit (str): The name of the unit associated with the data.
tool (str): The name of the tool associated with the data.
csv_data (str): The CSV data as a string to be sent.
pool (object): The database connection pool.
Returns:
bool: True if the CSV data was sent successfully, False otherwise.
"""
query = """
select ftp_addrs, ftp_user, ftp_passwd, ftp_parm, ftp_filename, ftp_target, duedate from units
where name = '%s'";'
"""
async with pool.acquire() as conn:
async with conn.cursor(aiomysql.DictCursor) as cur:
try:
await cur.execute(query, (unit,))
send_ftp_info = await cur.fetchone()
logger.info(f"id {id} - {unit} - {tool}: estratti i dati per invio via ftp")
except Exception as e:
logging.error(f"id {id} - {unit} - {tool} - errore nel query per invio ftp: {e}")
try:
# Converti in bytes
csv_bytes = csv_data.encode('utf-8')
csv_buffer = BytesIO(csv_bytes)
ftp_parms = parse_ftp_parms(send_ftp_info["ftp_parm"])
use_tls = 'ssl_version' in ftp_parms
passive = ftp_parms.get('passive', True)
port = ftp_parms.get('port', 21)
# Connessione FTP
with FTPConnection(host=send_ftp_info["ftp_addrs"], port=port, use_tls=use_tls, user=send_ftp_info["ftp_user"], passwd=send_ftp_info["ftp_passwd"], passive=passive) as ftp:
# Cambia directory
if send_ftp_info["ftp_target"] != "/":
ftp.cwd(send_ftp_info["ftp_target"])
# Invia il file
result = ftp.storbinary(f'STOR {send_ftp_info["ftp_filename"]}', csv_buffer)
if result.startswith('226'):
logging.info(f"File {send_ftp_info["ftp_filename"]} inviato con successo")
return True
else:
logging.error(f"Errore nell'invio: {result}")
return False
except all_errors as e:
logging.error(f"Errore FTP: {e}")
return False
except Exception as e:
logging.error(f"Errore generico: {e}")
return False
finally:
csv_buffer.close()
def parse_ftp_parms(ftp_parms):
"""
Parses a string of FTP parameters into a dictionary.
Args:
ftp_parms (str): A string containing key-value pairs separated by commas,
with keys and values separated by '=>'.
Returns:
dict: A dictionary where keys are parameter names (lowercase) and values are their parsed values.
"""
# Rimuovere spazi e dividere per virgola
pairs = ftp_parms.split(',')
result = {}
for pair in pairs:
if '=>' in pair:
key, value = pair.split('=>', 1)
key = key.strip().lower()
value = value.strip().lower()
# Convertire i valori appropriati
if value.isdigit():
value = int(value)
elif value == '':
value = None
result[key] = value
return result

View File

@@ -1,52 +0,0 @@
import os
import logging
import mysql.connector
from utils.database.connection import connetti_db
from utils.csv.parser import extract_value
logger = logging.getLogger(__name__)
def on_file_received(self: object, file: str) -> None:
"""Handles the event when a file is successfully received.
Args:
file: The path to the received file.
"""
if not os.stat(file).st_size:
os.remove(file)
logging.info(f'File {file} is empty: removed.')
else:
cfg = self.cfg
path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt)
if (fileExtension.upper() in (cfg.fileext)):
with open(file, 'r', encoding='utf-8', errors='ignore') as csvfile:
lines = csvfile.readlines()
unit_name = extract_value(cfg.units_name, filename, str(lines[0:10]))
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
try:
conn = connetti_db(cfg)
except mysql.connector.Error as e:
print(f"Error: {e}")
logging.error(f'{e}')
# Create a cursor
cur = conn.cursor()
try:
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data) VALUES (%s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines)))
conn.commit()
conn.close()
except Exception as e:
logging.error(f'File {file} not loaded. Held in user path.')
logging.error(f'{e}')
else:
os.remove(file)
logging.info(f'File {file} loaded: removed.')

View File

@@ -1,35 +0,0 @@
import subprocess
import tempfile
import os
from utils.database.loader_action import DATA_LOADED, update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Eseguire il programma con il file temporaneo
result = await subprocess.run(['python3', 'old_script/TS_PiniScript.py', temp_filename], capture_output=True, text=True)
print(result.stdout)
print(result.stderr)
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if result.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result.stderr}")
raise Exception(f"Errore nel programma: {result.stderr}")
else:
logger.info(f"Programma TS_PiniScript.py eseguito con successo: {result.stdout}")
await update_status(cfg, id, DATA_LOADED, pool)
await unlock(cfg, id, pool)

View File

@@ -1,24 +0,0 @@
from datetime import datetime
def conforma_data(data_string: str)->str:
"""
Conforma una stringa di data al formato YYYY-MM-DD, provando diversi formati di input.
Args:
data_string (str): La stringa di data da conformare.
Returns:
str: La data conformata nel formato YYYY-MM-DD,
o None se la stringa non può essere interpretata come una data.
"""
formato_desiderato = "%Y-%m-%d"
formati_input = ["%Y/%m/%d", "%Y-%m-%d", "%d-%m-%Y","%d/%m/%Y", ] # Ordine importante: prova prima il più probabile
for formato_input in formati_input:
try:
data_oggetto = datetime.strptime(data_string, formato_input)
return data_oggetto.strftime(formato_desiderato)
except ValueError:
continue # Prova il formato successivo se quello attuale fallisce
return None # Se nessun formato ha avuto successo