pini
This commit is contained in:
@@ -8,6 +8,7 @@ CREATE TABLE `received` (
|
|||||||
`tool_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
|
`tool_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
|
||||||
`tool_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
|
`tool_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
|
||||||
`tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
|
`tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
|
||||||
|
`tool_info` json DEFAULT NULL,
|
||||||
`locked` int DEFAULT '0',
|
`locked` int DEFAULT '0',
|
||||||
`status` int DEFAULT '0',
|
`status` int DEFAULT '0',
|
||||||
`matlab_timestamp` timestamp NULL DEFAULT NULL,
|
`matlab_timestamp` timestamp NULL DEFAULT NULL,
|
||||||
|
|||||||
6
env/config.ini
vendored
Normal file
6
env/config.ini
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[mysql]
|
||||||
|
host = 10.211.114.173
|
||||||
|
database = ase_lar
|
||||||
|
user = root
|
||||||
|
password = batt1l0
|
||||||
|
|
||||||
9
env/ftp.ini
vendored
9
env/ftp.ini
vendored
@@ -21,12 +21,17 @@
|
|||||||
logFilename = ../logs/ftp_csv_rec.log
|
logFilename = ../logs/ftp_csv_rec.log
|
||||||
|
|
||||||
[unit]
|
[unit]
|
||||||
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
|
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
|
||||||
Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
|
Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
|
||||||
|
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
|
||||||
|
|
||||||
[tool]
|
[tool]
|
||||||
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK
|
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK
|
||||||
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
|
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
|
||||||
|
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
|
||||||
|
|
||||||
[csv]
|
[csv]
|
||||||
Infos = IP|Subnet|Gateway
|
Infos = IP|Subnet|Gateway
|
||||||
|
|
||||||
|
[ts_pini]:
|
||||||
|
path_match = [276_208_TS0003]:TS0003|[Neuchatel_CDP]:TS7|[TS0006_EP28]:=|[TS0007_ChesaArcoiris]:=|[TS0006_EP28_3]:=|[TS0006_EP28_4]:TS0006_EP28_4|[TS0006_EP28_5]:TS0006_EP28_5|[TS18800]:=|[Granges_19 100]:=|[Granges_19 200]:=|[Chesa_Arcoiris_2]:=|[TS0006_EP28_1]:=|[TS_PS_Petites_Croisettes]:=|[_Chesa_Arcoiris_1]:=|[TS_test]:=|[TS-VIME]:=
|
||||||
|
|||||||
4
env/load.ini
vendored
4
env/load.ini
vendored
@@ -1,5 +1,5 @@
|
|||||||
[logging]
|
[logging]:
|
||||||
logFilename = ../logs/load_raw_data.log
|
logFilename = ../logs/load_raw_data.log
|
||||||
|
|
||||||
[threads]
|
[threads]:
|
||||||
max_num = 5
|
max_num = 5
|
||||||
@@ -2565,14 +2565,17 @@ def getDataFromCsvAndInsert(pathFile):
|
|||||||
conn.commit()
|
conn.commit()
|
||||||
cursor.close()
|
cursor.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
"""
|
||||||
if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta
|
if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta
|
||||||
orig_folder = pathFile.split("/")[-2]
|
orig_folder = pathFile.split("/")[-2]
|
||||||
new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name)
|
new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name)
|
||||||
|
|
||||||
shutil.move(pathFile, new_pathFile)
|
shutil.move(pathFile, new_pathFile)
|
||||||
if not os.path.exists(pathFile):
|
if not os.path.exists(pathFile):
|
||||||
print(f"File moved successfully from {pathFile} to {new_pathFile}\n")
|
print(f"File moved successfully from {pathFile} to {new_pathFile}\n")
|
||||||
else:
|
else:
|
||||||
print("File move operation failed.\n")
|
print("File move operation failed.\n")
|
||||||
|
"""
|
||||||
#except Exception as e:
|
#except Exception as e:
|
||||||
# print(f"An unexpected error occurred: {str(e)}\n")
|
# print(f"An unexpected error occurred: {str(e)}\n")
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from configparser import ConfigParser
|
from configparser import ConfigParser
|
||||||
|
|
||||||
def read_db_config(filename='/home/battilo/scripts/config.ini', section='mysql'):
|
def read_db_config(filename='../env/config.ini', section='mysql'):
|
||||||
parser = ConfigParser()
|
parser = ConfigParser()
|
||||||
parser.read(filename)
|
parser.read(filename)
|
||||||
|
|
||||||
171
src/old_script/vulinkScript.py
Executable file
171
src/old_script/vulinkScript.py
Executable file
@@ -0,0 +1,171 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
def checkBatteryLevel(db_conn, db_cursor, unit, date_time, battery_perc):
|
||||||
|
print(date_time, battery_perc)
|
||||||
|
if(float(battery_perc) < 25):#sotto il 25%
|
||||||
|
query = "select unit_name, date_time from alarms where unit_name=%s and date_time < %s and type_id=2 order by date_time desc limit 1"
|
||||||
|
db_cursor.execute(query, [unit, date_time])
|
||||||
|
result = db_cursor.fetchall()
|
||||||
|
if(len(result) > 0):
|
||||||
|
alarm_date_time = result[0]["date_time"]#datetime not str
|
||||||
|
format1 = "%Y-%m-%d %H:%M"
|
||||||
|
dt1 = datetime.strptime(date_time, format1)
|
||||||
|
time_difference = abs(dt1 - alarm_date_time)
|
||||||
|
if time_difference.total_seconds() > 24 * 60 * 60:
|
||||||
|
print("The difference is above 24 hours. Creo allarme battery")
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
|
||||||
|
db_conn.commit()
|
||||||
|
else:
|
||||||
|
print("Creo allarme battery")
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
|
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
|
||||||
|
soglie = json.loads(soglie_str)
|
||||||
|
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
|
||||||
|
ph = soglia["data"]["ph"]
|
||||||
|
ph_uno = soglia["data"]["ph_uno"]
|
||||||
|
ph_due = soglia["data"]["ph_due"]
|
||||||
|
ph_tre = soglia["data"]["ph_tre"]
|
||||||
|
ph_uno_value = soglia["data"]["ph_uno_value"]
|
||||||
|
ph_due_value = soglia["data"]["ph_due_value"]
|
||||||
|
ph_tre_value = soglia["data"]["ph_tre_value"]
|
||||||
|
ph_uno_sms = soglia["data"]["ph_uno_sms"]
|
||||||
|
ph_due_sms = soglia["data"]["ph_due_sms"]
|
||||||
|
ph_tre_sms = soglia["data"]["ph_tre_sms"]
|
||||||
|
ph_uno_email = soglia["data"]["ph_uno_email"]
|
||||||
|
ph_due_email = soglia["data"]["ph_due_email"]
|
||||||
|
ph_tre_email = soglia["data"]["ph_tre_email"]
|
||||||
|
alert_uno = 0
|
||||||
|
alert_due = 0
|
||||||
|
alert_tre = 0
|
||||||
|
ph_value_prev = 0
|
||||||
|
#print(unit, tool, node_num, date_time)
|
||||||
|
query = "select XShift, EventDate, EventTime from ELABDATADISP where UnitName=%s and ToolNameID=%s and NodeNum=%s and concat(EventDate, ' ', EventTime) < %s order by concat(EventDate, ' ', EventTime) desc limit 1"
|
||||||
|
db_cursor.execute(query, [unit, tool, node_num, date_time])
|
||||||
|
resultPhPrev = db_cursor.fetchall()
|
||||||
|
if(len(resultPhPrev) > 0):
|
||||||
|
ph_value_prev = float(resultPhPrev[0]["XShift"])
|
||||||
|
#ph_value = random.uniform(7, 10)
|
||||||
|
print(tool, unit, node_num, date_time, ph_value)
|
||||||
|
#print(ph_value_prev, ph_value)
|
||||||
|
if(ph == 1):
|
||||||
|
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
|
||||||
|
if(ph_value_prev <= float(ph_tre_value)):
|
||||||
|
alert_tre = 1
|
||||||
|
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
|
||||||
|
if(ph_value_prev <= float(ph_due_value)):
|
||||||
|
alert_due = 1
|
||||||
|
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
|
||||||
|
if(ph_value_prev <= float(ph_uno_value)):
|
||||||
|
alert_uno = 1
|
||||||
|
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
|
||||||
|
if(alert_tre == 1):
|
||||||
|
print("level3",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_due == 1):
|
||||||
|
print("level2",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_uno == 1):
|
||||||
|
print("level1",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
folder_path, file_with_extension = os.path.split(pathFile)
|
||||||
|
file_name, _ = os.path.splitext(file_with_extension)#toolname
|
||||||
|
serial_number = file_name.split("_")[0]
|
||||||
|
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
|
||||||
|
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
|
||||||
|
query_nodes = "SELECT t.soglie, n.num as node_num, n.nodetype_id FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
cursor.execute(query, [serial_number])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
unit = result[0]["unit_name"]
|
||||||
|
tool = result[0]["tool_name"]
|
||||||
|
cursor.execute(query_node_depth, [unit, tool])
|
||||||
|
resultNode = cursor.fetchall()
|
||||||
|
cursor.execute(query_nodes, [unit, tool])
|
||||||
|
resultAllNodes = cursor.fetchall()
|
||||||
|
#print(resultAllNodes)
|
||||||
|
node_num_piezo = next((item for item in resultAllNodes if item.get('nodetype_id') == 2), None)["node_num"]
|
||||||
|
node_num_baro = next((item for item in resultAllNodes if item.get('nodetype_id') == 3), None)["node_num"]
|
||||||
|
node_num_conductivity = next((item for item in resultAllNodes if item.get('nodetype_id') == 94), None)["node_num"]
|
||||||
|
node_num_ph = next((item for item in resultAllNodes if item.get('nodetype_id') == 97), None)["node_num"]
|
||||||
|
#print(node_num_piezo, node_num_baro, node_num_conductivity, node_num_ph)
|
||||||
|
# 2 piezo
|
||||||
|
# 3 baro
|
||||||
|
# 94 conductivity
|
||||||
|
# 97 ph
|
||||||
|
node_depth = float(resultNode[0]["depth"]) #node piezo depth
|
||||||
|
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
|
||||||
|
date_time = date_time.split(" ")
|
||||||
|
date = date_time[0]
|
||||||
|
time = date_time[1]
|
||||||
|
temperature_unit = float(row[2])
|
||||||
|
battery_perc = float(row[3])
|
||||||
|
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
|
||||||
|
conductivity = float(row[6])
|
||||||
|
ph = float(row[11])
|
||||||
|
temperature_piezo = float(row[14])
|
||||||
|
pressure = float(row[16])*1000
|
||||||
|
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_baro, date, time, battery_perc, temperature_unit, pressure_baro])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_baro, date, time, pressure_baro])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_conductivity, date, time, battery_perc, temperature_unit, conductivity])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_conductivity, date, time, conductivity])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_ph, date, time, battery_perc, temperature_unit, ph])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_ph, date, time, ph])
|
||||||
|
conn.commit()
|
||||||
|
checkSogliePh(conn, cursor, unit, tool, node_num_ph, date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_piezo, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_piezo, date, time, temperature_piezo, depth, pressure])
|
||||||
|
conn.commit()
|
||||||
|
checkBatteryLevel(conn, cursor, unit, date_time[0]+" "+date_time[1], battery_perc)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
def main():
|
||||||
|
getDataFromCsv(sys.argv[1])
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -46,11 +46,29 @@ class Config:
|
|||||||
# unit setting
|
# unit setting
|
||||||
self.units_name = [part for part in c.get("unit", "Names").split('|')]
|
self.units_name = [part for part in c.get("unit", "Names").split('|')]
|
||||||
self.units_type = [part for part in c.get("unit", "Types").split('|')]
|
self.units_type = [part for part in c.get("unit", "Types").split('|')]
|
||||||
|
self.units_alias = {
|
||||||
|
key: value
|
||||||
|
for item in c.get("unit", "Alias").split('|')
|
||||||
|
for key, value in [item.split(':', 1)]
|
||||||
|
}
|
||||||
#self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]}
|
#self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]}
|
||||||
|
|
||||||
# tool setting
|
# tool setting
|
||||||
self.tools_name = [part for part in c.get("tool", "Names").split('|')]
|
self.tools_name = [part for part in c.get("tool", "Names").split('|')]
|
||||||
self.tools_type = [part for part in c.get("tool", "Types").split('|')]
|
self.tools_type = [part for part in c.get("tool", "Types").split('|')]
|
||||||
|
self.tools_alias = {
|
||||||
|
key: value
|
||||||
|
for item in c.get("tool", "Alias").split('|')
|
||||||
|
for key, value in [item.split(':', 1)]
|
||||||
|
}
|
||||||
|
|
||||||
# csv info
|
# csv info
|
||||||
self.csv_infos = [part for part in c.get("csv", "Infos").split('|')]
|
self.csv_infos = [part for part in c.get("csv", "Infos").split('|')]
|
||||||
|
|
||||||
|
# TS pini path match
|
||||||
|
self.ts_pini_path_match = {
|
||||||
|
key: key[1:-1] if value == '=' else value
|
||||||
|
for item in c.get("ts_pini", "path_match").split('|')
|
||||||
|
for key, value in [item.split(':', 1)]
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ async def get_data(cfg: object, id: int, pool: object) -> tuple:
|
|||||||
"""
|
"""
|
||||||
async with pool.acquire() as conn:
|
async with pool.acquire() as conn:
|
||||||
async with conn.cursor() as cur:
|
async with conn.cursor() as cur:
|
||||||
await cur.execute(f'select unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}')
|
await cur.execute(f'select filename, unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}')
|
||||||
unit_name, tool_name, tool_data = await cur.fetchone()
|
filename, unit_name, tool_name, tool_data = await cur.fetchone()
|
||||||
|
|
||||||
return unit_name, tool_name, tool_data
|
return filename, unit_name, tool_name, tool_data
|
||||||
|
|
||||||
async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
|
async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
|
||||||
"""
|
"""
|
||||||
@@ -39,7 +39,7 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
"""
|
"""
|
||||||
@@ -78,7 +78,7 @@ async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
@@ -112,7 +112,7 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
@@ -140,7 +140,7 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
@@ -173,7 +173,7 @@ async def make_tlp_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
valori_x_nodo = 2
|
valori_x_nodo = 2
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
@@ -200,7 +200,7 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
|
|||||||
Returns:
|
Returns:
|
||||||
list: A list of lists, where each inner list represents a row in the matrix.
|
list: A list of lists, where each inner list represents a row in the matrix.
|
||||||
"""
|
"""
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
righe = ToolData.splitlines()
|
righe = ToolData.splitlines()
|
||||||
matrice_valori = []
|
matrice_valori = []
|
||||||
pattern = r';-?\d+dB$'
|
pattern = r';-?\d+dB$'
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
def extract_value(patterns: list, primary_source: str, secondary_source: str, default='Not Defined') -> str:
|
def extract_value(patterns: list, primary_source: str, secondary_source: str = None, default='Not Defined') -> str:
|
||||||
|
|
||||||
for source in (primary_source, secondary_source):
|
for source in [source for source in (primary_source, secondary_source) if source is not None]:
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
matches = re.findall(pattern, source, re.IGNORECASE)
|
matches = re.findall(pattern, source, re.IGNORECASE)
|
||||||
if matches:
|
if matches:
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
import mysql.connector
|
import mysql.connector
|
||||||
|
|
||||||
from utils.database.connection import connetti_db
|
from utils.database.connection import connetti_db
|
||||||
@@ -30,17 +30,38 @@ def on_file_received(self: object, file: str) -> None:
|
|||||||
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
|
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
|
||||||
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
|
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
|
||||||
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
|
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
|
||||||
|
tool_info = "{}"
|
||||||
|
|
||||||
|
unit_type = cfg.units_alias.get(unit_type.upper(), unit_type)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conn = connetti_db(cfg)
|
conn = connetti_db(cfg)
|
||||||
except mysql.connector.Error as e:
|
except mysql.connector.Error as e:
|
||||||
print(f"Error: {e}")
|
|
||||||
logger.error(f'{e}')
|
logger.error(f'{e}')
|
||||||
|
|
||||||
# Create a cursor
|
# Create a cursor
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
|
|
||||||
|
# da estrarre in un modulo
|
||||||
|
if (unit_type.upper() == "ISI CSV LOG" and tool_type.upper() == "VULINK" ):
|
||||||
|
serial_number = filename.split('_')[0]
|
||||||
|
tool_info = f'{{"serial_number": {serial_number}}}'
|
||||||
|
try:
|
||||||
|
cur.execute(f"SELECT unit_name, tool_name FROM {cfg.dbname}.vulink_tools WHERE serial_number = '{serial_number}'")
|
||||||
|
unit_name, tool_name = cur.fetchone()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f'{tool_type} serial number {serial_number} not found in table vulink_tools. {e}')
|
||||||
|
|
||||||
|
# da estrarre in un modulo
|
||||||
|
if (unit_type.upper() == "STAZIONETOTALE" and tool_type.upper() == "INTEGRITY MONITOR" ):
|
||||||
|
escaped_keys = [re.escape(key) for key in cfg.ts_pini_path_match.keys()]
|
||||||
|
stazione = extract_value(escaped_keys, filename)
|
||||||
|
if stazione:
|
||||||
|
tool_info = f'{{"Stazione": "{cfg.ts_pini_path_match.get(stazione)}"}}'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data) VALUES (%s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines)))
|
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data, tool_info) VALUES (%s, %s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines), tool_info))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
|
# Creare un file temporaneo
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
|
temp_file.write(ToolData)
|
||||||
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Usa asyncio.subprocess per vero async
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
'python3', 'old_script/TS_PiniScript.py', temp_filename,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Pulire il file temporaneo
|
||||||
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
|
||||||
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
|
else:
|
||||||
|
logger.info("Programma TS_PiniScript.py eseguito con successo.")
|
||||||
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
|
await unlock(cfg, id, pool)
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import subprocess
|
import asyncio
|
||||||
import tempfile
|
import tempfile
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from utils.database.loader_action import DATA_LOADED, update_status, unlock
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
from utils.csv.data_preparation import get_data
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@@ -11,25 +12,34 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
# Creare un file temporaneo
|
# Creare un file temporaneo
|
||||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as temp_file:
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
temp_file.write(ToolData)
|
temp_file.write(ToolData)
|
||||||
temp_filename = temp_file.name
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Eseguire il programma con il file temporaneo
|
# Usa asyncio.subprocess per vero async
|
||||||
result = await subprocess.run(['python3', 'old_script/TS_PiniScript.py', temp_filename], capture_output=True, text=True)
|
process = await asyncio.create_subprocess_exec(
|
||||||
print(result.stdout)
|
'python3', 'old_script/vulinkScript.py', temp_filename,
|
||||||
print(result.stderr)
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Pulire il file temporaneo
|
# Pulire il file temporaneo
|
||||||
os.unlink(temp_filename)
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
if result.returncode != 0:
|
if process.returncode != 0:
|
||||||
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result.stderr}")
|
logger.error(f"Errore nell'esecuzione del programma vulinkScript.py: {result_stderr}")
|
||||||
raise Exception(f"Errore nel programma: {result.stderr}")
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Programma TS_PiniScript.py eseguito con successo: {result.stdout}")
|
logger.info("Programma vulinkScript.py eseguito con successo.")
|
||||||
await update_status(cfg, id, DATA_LOADED, pool)
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
await unlock(cfg, id, pool)
|
await unlock(cfg, id, pool)
|
||||||
45
src/utils/parsers/by_type/sisgeo_health-.py
Normal file
45
src/utils/parsers/by_type/sisgeo_health-.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
|
# Creare un file temporaneo
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
|
temp_file.write(ToolData)
|
||||||
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Usa asyncio.subprocess per vero async
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
'python3', 'old_script/TS_PiniScript.py', temp_filename,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Pulire il file temporaneo
|
||||||
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
|
||||||
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
|
else:
|
||||||
|
logger.info("Programma TS_PiniScript.py eseguito con successo.")
|
||||||
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
|
await unlock(cfg, id, pool)
|
||||||
45
src/utils/parsers/by_type/sisgeo_readings-.py
Normal file
45
src/utils/parsers/by_type/sisgeo_readings-.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
|
# Creare un file temporaneo
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
|
temp_file.write(ToolData)
|
||||||
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Usa asyncio.subprocess per vero async
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
'python3', 'old_script/TS_PiniScript.py', temp_filename,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Pulire il file temporaneo
|
||||||
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
|
||||||
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
|
else:
|
||||||
|
logger.info("Programma TS_PiniScript.py eseguito con successo.")
|
||||||
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
|
await unlock(cfg, id, pool)
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
|
# Creare un file temporaneo
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
|
temp_file.write(ToolData)
|
||||||
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Usa asyncio.subprocess per vero async
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
'python3', 'old_script/TS_PiniScript.py', temp_filename,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Pulire il file temporaneo
|
||||||
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
|
||||||
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
|
else:
|
||||||
|
logger.info("Programma TS_PiniScript.py eseguito con successo.")
|
||||||
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
|
await unlock(cfg, id, pool)
|
||||||
45
src/utils/parsers/by_type/stazionetotale_messpunktepini_.py
Normal file
45
src/utils/parsers/by_type/stazionetotale_messpunktepini_.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
from utils.database import WorkflowFlags
|
||||||
|
from utils.database.loader_action import update_status, unlock
|
||||||
|
from utils.csv.data_preparation import get_data
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def main_loader(cfg: object, id: int, pool: object) -> None:
|
||||||
|
|
||||||
|
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
|
||||||
|
# Creare un file temporaneo
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
|
||||||
|
temp_file.write(ToolData)
|
||||||
|
temp_filename = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Usa asyncio.subprocess per vero async
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
'python3', 'old_script/TS_PiniScript.py', temp_filename,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
stdout, stderr = await process.communicate()
|
||||||
|
|
||||||
|
result_stdout = stdout.decode('utf-8')
|
||||||
|
result_stderr = stderr.decode('utf-8')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Pulire il file temporaneo
|
||||||
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
|
||||||
|
raise Exception(f"Errore nel programma: {result_stderr}")
|
||||||
|
else:
|
||||||
|
logger.info("Programma TS_PiniScript.py eseguito con successo.")
|
||||||
|
logger.debug(f"Stdout: {result_stdout}")
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
|
||||||
|
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
|
||||||
|
await unlock(cfg, id, pool)
|
||||||
Reference in New Issue
Block a user