This commit is contained in:
2025-08-02 19:22:48 +02:00
parent 6ff97316dc
commit fdefd0a430
17 changed files with 633 additions and 173 deletions

View File

@@ -8,6 +8,7 @@ CREATE TABLE `received` (
`tool_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `tool_name` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
`tool_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL, `tool_type` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
`tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL, `tool_data` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`tool_info` json DEFAULT NULL,
`locked` int DEFAULT '0', `locked` int DEFAULT '0',
`status` int DEFAULT '0', `status` int DEFAULT '0',
`matlab_timestamp` timestamp NULL DEFAULT NULL, `matlab_timestamp` timestamp NULL DEFAULT NULL,

6
env/config.ini vendored Normal file
View File

@@ -0,0 +1,6 @@
[mysql]
host = 10.211.114.173
database = ase_lar
user = root
password = batt1l0

9
env/ftp.ini vendored
View File

@@ -21,12 +21,17 @@
logFilename = ../logs/ftp_csv_rec.log logFilename = ../logs/ftp_csv_rec.log
[unit] [unit]
Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG Types = G801|G201|G301|G802|D2W|GFLOW|CR1000X|TLP|GS1|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|ISI CSV LOG
Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9] Names = ID[0-9]{4}|IX[0-9]{4}|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
[tool] [tool]
Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|RIFKL|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK Types = MUX|MUMS|MODB|IPTM|MUSA|LOC|GD|D2W|CR1000X|G301|NESA|GS1|G201|TLP|DSAS|HORTUS|HEALTH-|READINGS-|INTEGRITY MONITOR|MESSPUNKTEPINI_|HIRPINIA|CO_[0-9]{4}_[0-9]|VULINK
Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9] Names = LOC[0-9]{4}|DT[0-9]{4}|GD[0-9]{4}|[0-9]{18}|MEASUREMENTS_|CHESA_ARCOIRIS_[0-9]*|TS_PS_PETITES_CROISETTES|CO_[0-9]{4}_[0-9]
Alias = HEALTH-:SISGEO|READINGS-:SISGEO|INTEGRITY MONITOR:STAZIONETOTALE|MESSPUNKTEPINI_:STAZIONETOTALE|CO_:SOROTECPINI
[csv] [csv]
Infos = IP|Subnet|Gateway Infos = IP|Subnet|Gateway
[ts_pini]:
path_match = [276_208_TS0003]:TS0003|[Neuchatel_CDP]:TS7|[TS0006_EP28]:=|[TS0007_ChesaArcoiris]:=|[TS0006_EP28_3]:=|[TS0006_EP28_4]:TS0006_EP28_4|[TS0006_EP28_5]:TS0006_EP28_5|[TS18800]:=|[Granges_19 100]:=|[Granges_19 200]:=|[Chesa_Arcoiris_2]:=|[TS0006_EP28_1]:=|[TS_PS_Petites_Croisettes]:=|[_Chesa_Arcoiris_1]:=|[TS_test]:=|[TS-VIME]:=

4
env/load.ini vendored
View File

@@ -1,5 +1,5 @@
[logging] [logging]:
logFilename = ../logs/load_raw_data.log logFilename = ../logs/load_raw_data.log
[threads] [threads]:
max_num = 5 max_num = 5

View File

@@ -35,7 +35,7 @@ def getDataFromCsvAndInsert(pathFile):
# print('Error:', e) # print('Error:', e)
folder_name = pathFile.split("/")[-2]#cartella folder_name = pathFile.split("/")[-2]#cartella
if "[276_208_TS0003]" in pathFile: if "[276_208_TS0003]" in pathFile:
folder_name = "TS0003" folder_name = "TS0003"
elif "[Neuchatel_CDP]" in pathFile: elif "[Neuchatel_CDP]" in pathFile:
@@ -319,7 +319,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -334,7 +334,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][63]), int(resultSoglie[0][64])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][63]), int(resultSoglie[0][64])])
conn.commit() conn.commit()
elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][46])) and abs(xPenultimo) <= abs(float(resultSoglie[0][47]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][47])) and abs(xPenultimo) <= abs(float(resultSoglie[0][48]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][48])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -366,7 +366,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "X", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or if not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -381,7 +381,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][17]), int(resultSoglie[0][18])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, x, 2, "X", int(resultSoglie[0][17]), int(resultSoglie[0][18])])
conn.commit() conn.commit()
elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or elif not ( (abs(xPenultimo) >= abs(float(resultSoglie[0][0])) and abs(xPenultimo) <= abs(float(resultSoglie[0][1]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or (abs(xPenultimo) >= abs(float(resultSoglie[0][1])) and abs(xPenultimo) <= abs(float(resultSoglie[0][2]))) or
(abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ): (abs(xPenultimo) >= abs(float(resultSoglie[0][2])) and abs(xPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -413,7 +413,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -428,7 +428,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][69]), int(resultSoglie[0][70])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][69]), int(resultSoglie[0][70])])
conn.commit() conn.commit()
elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][49])) and abs(yPenultimo) <= abs(float(resultSoglie[0][50]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][50])) and abs(yPenultimo) <= abs(float(resultSoglie[0][51]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][51])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -460,7 +460,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "Y", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or if not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -475,7 +475,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][23]), int(resultSoglie[0][24])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, y, 2, "Y", int(resultSoglie[0][23]), int(resultSoglie[0][24])])
conn.commit() conn.commit()
elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or elif not ( (abs(yPenultimo) >= abs(float(resultSoglie[0][3])) and abs(yPenultimo) <= abs(float(resultSoglie[0][4]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or (abs(yPenultimo) >= abs(float(resultSoglie[0][4])) and abs(yPenultimo) <= abs(float(resultSoglie[0][5]))) or
(abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ): (abs(yPenultimo) >= abs(float(resultSoglie[0][5])) and abs(yPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -511,7 +511,7 @@ def getDataFromCsvAndInsert(pathFile):
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
#print(abs(zPenultimo), ultimaDataDatoPenultimo) #print(abs(zPenultimo), ultimaDataDatoPenultimo)
if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ):
#print("creo") #print("creo")
@@ -527,7 +527,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][75]), int(resultSoglie[0][76])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][75]), int(resultSoglie[0][76])])
conn.commit() conn.commit()
elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][52])) and abs(zPenultimo) <= abs(float(resultSoglie[0][53]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][53])) and abs(zPenultimo) <= abs(float(resultSoglie[0][54]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][54])) and abs(zPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -560,7 +560,7 @@ def getDataFromCsvAndInsert(pathFile):
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
#print(abs(zPenultimo), ultimaDataDatoPenultimo) #print(abs(zPenultimo), ultimaDataDatoPenultimo)
if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or if not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ):
#print("creo") #print("creo")
@@ -576,7 +576,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][29]), int(resultSoglie[0][30])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, z, 2, "Z", int(resultSoglie[0][29]), int(resultSoglie[0][30])])
conn.commit() conn.commit()
elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or elif not ( (abs(zPenultimo) >= abs(float(resultSoglie[0][6])) and abs(zPenultimo) <= abs(float(resultSoglie[0][7]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or (abs(zPenultimo) >= abs(float(resultSoglie[0][7])) and abs(zPenultimo) <= abs(float(resultSoglie[0][8]))) or
(abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ): (abs(zPenultimo) >= abs(float(resultSoglie[0][8])) and abs(zPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -608,7 +608,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -623,7 +623,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][81]), int(resultSoglie[0][82])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][81]), int(resultSoglie[0][82])])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][55])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][56]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][56])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][57]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][57])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -655,7 +655,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R2D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or if not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -670,7 +670,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][35]), int(resultSoglie[0][36])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r2d, 2, "R2D", int(resultSoglie[0][35]), int(resultSoglie[0][36])])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or elif not ( (abs(r2dPenultimo) >= abs(float(resultSoglie[0][9])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][10]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or (abs(r2dPenultimo) >= abs(float(resultSoglie[0][10])) and abs(r2dPenultimo) <= abs(float(resultSoglie[0][11]))) or
(abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(resultSoglie[0][11])) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -702,7 +702,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -717,7 +717,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][87]), int(resultSoglie[0][88])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][87]), int(resultSoglie[0][88])])
conn.commit() conn.commit()
elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][58])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][59]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][59])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][60]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][60])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -749,7 +749,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato]) cursor.execute(query, ["upgeo-mira-id|"+str(mira_id), 1, "R3D", ultimaDataDato])
result = cursor.fetchall() result = cursor.fetchall()
if(len(result) <= 0): if(len(result) <= 0):
if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or if not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -764,7 +764,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][41]), int(resultSoglie[0][42])]) cursor.execute(query, [9, "upgeo-mira-id|"+str(mira_id), ultimaDataDato, r3d, 2, "R3D", int(resultSoglie[0][41]), int(resultSoglie[0][42])])
conn.commit() conn.commit()
elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or elif not ( (abs(r3dPenultimo) >= abs(float(resultSoglie[0][12])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][13]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or (abs(r3dPenultimo) >= abs(float(resultSoglie[0][13])) and abs(r3dPenultimo) <= abs(float(resultSoglie[0][14]))) or
(abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ): (abs(r3dPenultimo) >= abs(float(resultSoglie[0][14])) and abs(r3dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, description, send_email, send_sms) value(%s,%s,%s,%s,%s,%s,%s,%s)"
@@ -830,8 +830,8 @@ def getDataFromCsvAndInsert(pathFile):
fdate = drange.split(",")[0] fdate = drange.split(",")[0]
ldate = drange.split(",")[1] ldate = drange.split(",")[1]
params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate] params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc, l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc,
@@ -856,10 +856,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoCoppieSpostLat, l.sms_livello_unoCoppieSpostLat,
l.sms_livello_dueCoppieSpostLat, l.sms_livello_dueCoppieSpostLat,
l.sms_livello_treCoppieSpostLat l.sms_livello_treCoppieSpostLat
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -874,8 +874,8 @@ def getDataFromCsvAndInsert(pathFile):
arrayCoppie[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie) arrayCoppie[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie)
else: else:
params = [progetto_id, lavoro_id, coppiaMira[0]] params = [progetto_id, lavoro_id, coppiaMira[0]]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc, l.soglieCoppieUnitaMisura, l.areaAttenzioneInizioCoppieInc, l.areaInterventoInizioCoppieInc, l.areaInterventoImmediatoInizioCoppieInc,
@@ -900,10 +900,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoCoppieSpostLat, l.sms_livello_unoCoppieSpostLat,
l.sms_livello_dueCoppieSpostLat, l.sms_livello_dueCoppieSpostLat,
l.sms_livello_treCoppieSpostLat l.sms_livello_treCoppieSpostLat
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -939,18 +939,18 @@ def getDataFromCsvAndInsert(pathFile):
fdate = drange.split(",")[0] fdate = drange.split(",")[0]
ldate = drange.split(",")[1] ldate = drange.split(",")[1]
params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate] params = [progetto_id, lavoro_id, coppiaMira[0], fdate, ldate]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro, l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro,
l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro, l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro,
l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro, l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro,
l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -965,18 +965,18 @@ def getDataFromCsvAndInsert(pathFile):
arrayCoppieMuro[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie) arrayCoppieMuro[lavoro_name][coppia[1]][coppiaMira[1]].append(resultDataCoppie)
else: else:
params = [progetto_id, lavoro_id, coppiaMira[0]] params = [progetto_id, lavoro_id, coppiaMira[0]]
query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name, query = """select d.id as fake_id, d.id as id, l.name AS lavoro_name, l.id AS lavoro_id, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.operatore_id, d.strumento_id, d.nota_id,
uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate, uo.name as operatore_name, us.description as strumento_desc, un.description as nota_desc, d.sist_coordinate,
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, s.multipleDateRange as fasi_lavorazione,
l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro, l.soglieCoppieUnitaMisuraMuro, l.areaAttenzioneInizioCoppieIncMuro, l.areaInterventoInizioCoppieIncMuro, l.areaInterventoImmediatoInizioCoppieIncMuro,
l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro, l.areaAttenzioneInizioCoppieAssestMuro, l.areaInterventoInizioCoppieAssestMuro, l.areaInterventoImmediatoInizioCoppieAssestMuro,
l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro, l.areaAttenzioneInizioCoppieSpostLatMuro, l.areaInterventoInizioCoppieSpostLatMuro, l.areaInterventoImmediatoInizioCoppieSpostLatMuro,
l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro l.reportVarInclinMuro, l.reportAssestMuro, l.reportSpostLatMuro, l.parametroLettureMuro
from sites as s from sites as s
join upgeo_lavori as l on s.id=l.site_id join upgeo_lavori as l on s.id=l.site_id
join upgeo_mire as m on m.lavoro_id=l.id join upgeo_mire as m on m.lavoro_id=l.id
join ELABDATAUPGEO as d on d.mira_id=m.id join ELABDATAUPGEO as d on d.mira_id=m.id
left join upgeo_operatori AS uo ON uo.id = d.operatore_id left join upgeo_operatori AS uo ON uo.id = d.operatore_id
left join upgeo_strumenti AS us ON us.id = d.strumento_id left join upgeo_strumenti AS us ON us.id = d.strumento_id
left join upgeo_note AS un ON un.id = d.nota_id left join upgeo_note AS un ON un.id = d.nota_id
@@ -997,15 +997,15 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(sql, (lavoro_id, mira_id, mira_id)) cursor.execute(sql, (lavoro_id, mira_id, mira_id))
result_coppie = cursor.fetchall() result_coppie = cursor.fetchall()
for coppia in result_coppie: for coppia in result_coppie:
sql = """SELECT lavoro_id, num, lista sql = """SELECT lavoro_id, num, lista
FROM upgeo_mire_tralicci FROM upgeo_mire_tralicci
WHERE lavoro_id = %s AND JSON_CONTAINS(lista, CAST(%s AS JSON), '$') WHERE lavoro_id = %s AND JSON_CONTAINS(lista, CAST(%s AS JSON), '$')
ORDER BY num ASC""" ORDER BY num ASC"""
cursor.execute(sql, (lavoro_id, coppia[0])) cursor.execute(sql, (lavoro_id, coppia[0]))
result_tralicci = cursor.fetchall() result_tralicci = cursor.fetchall()
for traliccio in result_tralicci: for traliccio in result_tralicci:
sql = """SELECT id, name, multipleDateRange sql = """SELECT id, name, multipleDateRange
FROM upgeo_mire FROM upgeo_mire
WHERE abilitato = 1 AND lavoro_id = %s AND (id = %s OR id = %s)""" WHERE abilitato = 1 AND lavoro_id = %s AND (id = %s OR id = %s)"""
cursor.execute(sql, (coppia[1], coppia[3], coppia[4])) cursor.execute(sql, (coppia[1], coppia[3], coppia[4]))
result_coppia_mire = cursor.fetchall() result_coppia_mire = cursor.fetchall()
@@ -1023,14 +1023,14 @@ def getDataFromCsvAndInsert(pathFile):
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio,
s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio, s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio,
l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio, l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio,
l.areaInterventoImmediatoInizioCoppieIncTraliccio, l.areaInterventoImmediatoInizioCoppieIncTraliccio,
l.areaAttenzioneInizioCoppieAssestTraliccio, l.areaAttenzioneInizioCoppieAssestTraliccio,
l.areaInterventoInizioCoppieAssestTraliccio, l.areaInterventoInizioCoppieAssestTraliccio,
l.areaInterventoImmediatoInizioCoppieAssestTraliccio, l.areaInterventoImmediatoInizioCoppieAssestTraliccio,
l.areaAttenzioneInizioCoppieSpostLatTraliccio, l.areaAttenzioneInizioCoppieSpostLatTraliccio,
l.areaInterventoInizioCoppieSpostLatTraliccio, l.areaInterventoInizioCoppieSpostLatTraliccio,
l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio, l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio,
l.reportVarInclinTraliccio, l.reportAssestTraliccio, l.reportVarInclinTraliccio, l.reportAssestTraliccio,
l.reportSpostLatTraliccio, l.parametroLettureTraliccio l.reportSpostLatTraliccio, l.parametroLettureTraliccio
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
@@ -1060,14 +1060,14 @@ def getDataFromCsvAndInsert(pathFile):
l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio, l.areaAttenzioneInizio, l.areaInterventoInizio, l.areaInterventoImmediatoInizio,
s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio, s.multipleDateRange AS fasi_lavorazione, l.soglieCoppieUnitaMisuraTraliccio,
l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio, l.areaAttenzioneInizioCoppieIncTraliccio, l.areaInterventoInizioCoppieIncTraliccio,
l.areaInterventoImmediatoInizioCoppieIncTraliccio, l.areaInterventoImmediatoInizioCoppieIncTraliccio,
l.areaAttenzioneInizioCoppieAssestTraliccio, l.areaAttenzioneInizioCoppieAssestTraliccio,
l.areaInterventoInizioCoppieAssestTraliccio, l.areaInterventoInizioCoppieAssestTraliccio,
l.areaInterventoImmediatoInizioCoppieAssestTraliccio, l.areaInterventoImmediatoInizioCoppieAssestTraliccio,
l.areaAttenzioneInizioCoppieSpostLatTraliccio, l.areaAttenzioneInizioCoppieSpostLatTraliccio,
l.areaInterventoInizioCoppieSpostLatTraliccio, l.areaInterventoInizioCoppieSpostLatTraliccio,
l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio, l.areaInterventoImmediatoInizioCoppieSpostLatTraliccio,
l.reportVarInclinTraliccio, l.reportAssestTraliccio, l.reportVarInclinTraliccio, l.reportAssestTraliccio,
l.reportSpostLatTraliccio, l.parametroLettureTraliccio l.reportSpostLatTraliccio, l.parametroLettureTraliccio
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
@@ -1092,15 +1092,15 @@ def getDataFromCsvAndInsert(pathFile):
print(4, lavoro_id, mira_id) print(4, lavoro_id, mira_id)
print() print()
sql = """ sql = """
SELECT SELECT
mire.id AS mira_id, mire.id AS mira_id,
mire.name AS mira_name, mire.name AS mira_name,
mire.multipleDateRange, mire.multipleDateRange,
mire.progressiva_id, mire.progressiva_id,
progressivebinari.name AS progressiva_name, progressivebinari.name AS progressiva_name,
progressivebinari.offsetInizialeSghembo progressivebinari.offsetInizialeSghembo
FROM upgeo_mire AS mire FROM upgeo_mire AS mire
JOIN upgeo_mire_progressivebinari AS progressivebinari JOIN upgeo_mire_progressivebinari AS progressivebinari
ON mire.progressiva_id = progressivebinari.id ON mire.progressiva_id = progressivebinari.id
WHERE mire.abilitato = 1 AND mire.lavoro_id = %s AND mire.id = %s WHERE mire.abilitato = 1 AND mire.lavoro_id = %s AND mire.id = %s
ORDER BY progressivebinari.id ORDER BY progressivebinari.id
@@ -1120,23 +1120,23 @@ def getDataFromCsvAndInsert(pathFile):
fdate, ldate = range_item.split(",") fdate, ldate = range_item.split(",")
params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0], fdate, ldate] params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0], fdate, ldate]
sql = """ sql = """
SELECT SELECT
d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id, d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id,
s.id AS site_id, m.id AS mira_id, m.name AS mira_name, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon,
d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name, d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name,
us.description AS strumento_desc, un.description AS nota_desc, us.description AS strumento_desc, un.description AS nota_desc,
d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio, d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio,
l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione, l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione,
m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo, m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo,
l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv, l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv,
l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert,
l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert, l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert,
l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz, l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz,
l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo,
l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo, l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo,
l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz, l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz,
l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat, l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat,
%s AS offsetInizialeSghembo, l.parametroLettureBinari, %s AS offsetInizialeSghembo, l.parametroLettureBinari,
l.email_livello_unoBinariTrasv, l.email_livello_unoBinariTrasv,
l.email_livello_dueBinariTrasv, l.email_livello_dueBinariTrasv,
@@ -1162,14 +1162,14 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoBinariSghembo, l.sms_livello_unoBinariSghembo,
l.sms_livello_dueBinariSghembo, l.sms_livello_dueBinariSghembo,
l.sms_livello_treBinariSghembo l.sms_livello_treBinariSghembo
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
JOIN upgeo_mire AS m ON m.lavoro_id = l.id JOIN upgeo_mire AS m ON m.lavoro_id = l.id
JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id
LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id
LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id
LEFT JOIN upgeo_note AS un ON un.id = d.nota_id LEFT JOIN upgeo_note AS un ON un.id = d.nota_id
WHERE s.upgeo = 1 AND s.id = %s AND l.id = %s AND m.id = %s WHERE s.upgeo = 1 AND s.id = %s AND l.id = %s AND m.id = %s
AND d.EventTimestamp BETWEEN %s AND %s""" AND d.EventTimestamp BETWEEN %s AND %s"""
if(resultSoglie[0][94] != ''): if(resultSoglie[0][94] != ''):
sql += " and d.EventTimestamp >= %s" sql += " and d.EventTimestamp >= %s"
@@ -1184,23 +1184,23 @@ def getDataFromCsvAndInsert(pathFile):
else: else:
params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0]] params = [progressiva_mira[5], progetto_id, lavoro_id, progressiva_mira[0]]
sql = """ sql = """
SELECT SELECT
d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id, d.id AS fake_id, d.id AS id, l.name AS lavoro_name, l.id AS lavoro_id,
s.id AS site_id, m.id AS mira_id, m.name AS mira_name, s.id AS site_id, m.id AS mira_id, m.name AS mira_name,
d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon, d.EventTimestamp, d.north, d.east, d.elevation, d.lat, d.lon,
d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name, d.operatore_id, d.strumento_id, d.nota_id, uo.name AS operatore_name,
us.description AS strumento_desc, un.description AS nota_desc, us.description AS strumento_desc, un.description AS nota_desc,
d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio, d.sist_coordinate, l.areaAttenzioneInizio, l.areaInterventoInizio,
l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione, l.areaInterventoImmediatoInizio, s.multipleDateRange AS fasi_lavorazione,
m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo, m.progressiva_pos, l.passoLong, l.passoTrasv, l.passoSghembo,
l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv, l.areaAttenzioneInizioBinariTrasv, l.areaInterventoInizioBinariTrasv,
l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariTrasv, l.areaAttenzioneInizioBinariLongVert,
l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert, l.areaInterventoInizioBinariLongVert, l.areaInterventoImmediatoInizioBinariLongVert,
l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz, l.areaAttenzioneInizioBinariLongOriz, l.areaInterventoInizioBinariLongOriz,
l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariLongOriz, l.areaAttenzioneInizioBinariSghembo,
l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo, l.areaInterventoInizioBinariSghembo, l.areaInterventoImmediatoInizioBinariSghembo,
l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz, l.reportBinariSpostTrasv, l.reportBinariSpostLongVert, l.reportBinariSpostLongOriz,
l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat, l.reportBinariSghembo, l.reportVarInclin, l.reportAssest, l.reportSpostLat,
%s AS offsetInizialeSghembo, l.parametroLettureBinari, %s AS offsetInizialeSghembo, l.parametroLettureBinari,
l.email_livello_unoBinariTrasv, l.email_livello_unoBinariTrasv,
l.email_livello_dueBinariTrasv, l.email_livello_dueBinariTrasv,
@@ -1226,10 +1226,10 @@ def getDataFromCsvAndInsert(pathFile):
l.sms_livello_unoBinariSghembo, l.sms_livello_unoBinariSghembo,
l.sms_livello_dueBinariSghembo, l.sms_livello_dueBinariSghembo,
l.sms_livello_treBinariSghembo l.sms_livello_treBinariSghembo
FROM sites AS s FROM sites AS s
JOIN upgeo_lavori AS l ON s.id = l.site_id JOIN upgeo_lavori AS l ON s.id = l.site_id
JOIN upgeo_mire AS m ON m.lavoro_id = l.id JOIN upgeo_mire AS m ON m.lavoro_id = l.id
JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id JOIN ELABDATAUPGEO AS d ON d.mira_id = m.id
LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id LEFT JOIN upgeo_operatori AS uo ON uo.id = d.operatore_id
LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id LEFT JOIN upgeo_strumenti AS us ON us.id = d.strumento_id
LEFT JOIN upgeo_note AS un ON un.id = d.nota_id LEFT JOIN upgeo_note AS un ON un.id = d.nota_id
@@ -1475,7 +1475,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), 1, dato_date, 41]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), 1, dato_date, 41])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or if not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1489,8 +1489,8 @@ def getDataFromCsvAndInsert(pathFile):
if(abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))): if(abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 2, sms_livello_dueBinariTrasv, email_livello_dueBinariTrasv]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 2, sms_livello_dueBinariTrasv, email_livello_dueBinariTrasv])
conn.commit() conn.commit()
elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1509,7 +1509,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 3, sms_livello_treBinariTrasv, email_livello_treBinariTrasv]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyTrasv), dato_date, dz, 3, sms_livello_treBinariTrasv, email_livello_treBinariTrasv])
conn.commit() conn.commit()
elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or elif not ( (abs(dz_penultimo) >= abs(float(area_attenzione_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or (abs(dz_penultimo) >= abs(float(area_intervento_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_trasv))) or
(abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ): (abs(dz_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_trasv)) and abs(dz_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,41,%s,%s)"
@@ -1607,7 +1607,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0]) print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0])
print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000) print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000): abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000):
zdx = nearestElementDx[1] zdx = nearestElementDx[1]
zdxPrev = nearestElementDxPrev[1] zdxPrev = nearestElementDxPrev[1]
@@ -1626,7 +1626,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or if not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1641,7 +1641,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 2, "R", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 2, "R", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1660,7 +1660,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 3, "R", sms_livello_treBinariLongVert, email_livello_treBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_dx, 3, "R", sms_livello_treBinariLongVert, email_livello_treBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1678,7 +1678,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0]) print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0])
print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000) print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
zsx = nearestElementSx[1] zsx = nearestElementSx[1]
zsxPrev = nearestElementSxPrev[1] zsxPrev = nearestElementSxPrev[1]
@@ -1697,7 +1697,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 43])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or if not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1712,7 +1712,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 2, "L", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 2, "L", sms_livello_dueBinariLongVert, email_livello_dueBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1731,7 +1731,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 3, "L", sms_livello_treBinariLongVert, email_livello_treBinariLongVert]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_vert_sx, 3, "L", sms_livello_treBinariLongVert, email_livello_treBinariLongVert])
conn.commit() conn.commit()
elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or elif not ( (abs(spost_long_vert_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_vert))) or
(abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_vert_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_vert)) and abs(spost_long_vert_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,43,%s,%s,%s)"
@@ -1825,7 +1825,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0]) print("nearestElementDxPrev[0]: ", nearestElementDxPrev[0], "nearestElementDx[0]: ", nearestElementDx[0])
print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000) print(abs(arrDxPrev[0][0] - arrDx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000): abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000):
ndx = nearestElementDx[1] ndx = nearestElementDx[1]
ndx0 = arrDx[0][1] ndx0 = arrDx[0][1]
@@ -1856,7 +1856,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or if not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1871,7 +1871,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 2, "R", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 2, "R", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1890,7 +1890,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 3, "R", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_dx, 3, "R", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_dx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_dx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1908,7 +1908,7 @@ def getDataFromCsvAndInsert(pathFile):
print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0]) print("nearestElementSxPrev[0]: ", nearestElementSxPrev[0], "nearestElementSx[0]: ", nearestElementSx[0])
print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000) print(abs(arrSxPrev[0][0] - arrSx[0][0]), parametro_letture_binari * 1000)
if ( if (
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
nsx = nearestElementSx[1] nsx = nearestElementSx[1]
nsx0 = arrSx[0][1] nsx0 = arrSx[0][1]
@@ -1939,7 +1939,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 44])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or if not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1954,7 +1954,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 2, "L", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 2, "L", sms_livello_dueBinariLongOriz, email_livello_dueBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -1973,7 +1973,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 3, "L", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, spost_long_oriz_sx, 3, "L", sms_livello_treBinariLongOriz, email_livello_treBinariLongOriz])
conn.commit() conn.commit()
elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or elif not ( (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_attenzione_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(float(area_intervento_immediato_inizio_binari_long_oriz))) or
(abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ): (abs(spost_long_oriz_sx_penultimo) >= abs(float(area_intervento_immediato_inizio_binari_long_oriz)) and abs(spost_long_oriz_sx_penultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,44,%s,%s,%s)"
@@ -2066,9 +2066,9 @@ def getDataFromCsvAndInsert(pathFile):
max_millis = max(nearestElementDx[0], nearestElementSx[0], nearestElementDxPenultimo[0], nearestElementSxPenultimo[0]) max_millis = max(nearestElementDx[0], nearestElementSx[0], nearestElementDxPenultimo[0], nearestElementSxPenultimo[0])
dato_date = datetime.fromtimestamp(max_millis / 1000).strftime("%Y-%m-%d %H:%M:%S") dato_date = datetime.fromtimestamp(max_millis / 1000).strftime("%Y-%m-%d %H:%M:%S")
if ( if (
abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementDxPrev[0] - nearestElementDx[0]) <= parametro_letture_binari * 1000 and
abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000 and abs(arrDxPrev[0][0] - arrDx[0][0]) <= parametro_letture_binari * 1000 and
abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and abs(nearestElementSxPrev[0] - nearestElementSx[0]) <= parametro_letture_binari * 1000 and
abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000): abs(arrSxPrev[0][0] - arrSx[0][0]) <= parametro_letture_binari * 1000):
zdx = nearestElementDx[1] zdx = nearestElementDx[1]
zdxPrev = nearestElementDxPrev[1] zdxPrev = nearestElementDxPrev[1]
@@ -2095,7 +2095,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 42]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), 1, dato_date, 42])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or if not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2110,7 +2110,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 2, sms_livello_dueBinariSghembo, email_livello_dueBinariSghembo]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 2, sms_livello_dueBinariSghembo, email_livello_dueBinariSghembo])
conn.commit() conn.commit()
elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2129,7 +2129,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 3, sms_livello_treBinariSghembo, email_livello_treBinariSghembo]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+str(keyProgressivaPrev)+" - "+str(keyProgressiva), dato_date, sghembo, 3, sms_livello_treBinariSghembo, email_livello_treBinariSghembo])
conn.commit() conn.commit()
elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or elif not ( (abs(sghemboPenultimo) >= abs(float(area_attenzione_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or (abs(sghemboPenultimo) >= abs(float(area_intervento_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(float(area_intervento_immediato_inizio_binari_sghembo))) or
(abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ): (abs(sghemboPenultimo) >= abs(float(area_intervento_immediato_inizio_binari_sghembo)) and abs(sghemboPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, send_sms, send_email) value(%s,%s,%s,%s,%s,42,%s,%s)"
@@ -2417,7 +2417,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], 1, dato_date, 11]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], 1, dato_date, 11])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or if not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2432,7 +2432,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 2, soglieCoppieUnitaMisura, sms_livello_dueCoppieInc, email_livello_dueCoppieInc]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 2, soglieCoppieUnitaMisura, sms_livello_dueCoppieInc, email_livello_dueCoppieInc])
conn.commit() conn.commit()
elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2451,7 +2451,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 3, soglieCoppieUnitaMisura, sms_livello_treCoppieInc, email_livello_treCoppieInc]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" "+daArrayMireName[key][i], dato_date, da, 3, soglieCoppieUnitaMisura, sms_livello_treCoppieInc, email_livello_treCoppieInc])
conn.commit() conn.commit()
elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or elif not ( (abs(daPenultimo) >= abs(float(areaAttenzioneInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or (abs(daPenultimo) >= abs(float(areaInterventoInizioCoppieInc)) and abs(daPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieInc))) or
(abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ): (abs(daPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieInc)) and abs(daPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,11,%s,%s,%s)"
@@ -2470,7 +2470,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 12]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 12])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or if not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2485,7 +2485,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 2, "mm", sms_livello_dueCoppieAssest, email_livello_dueCoppieAssest]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 2, "mm", sms_livello_dueCoppieAssest, email_livello_dueCoppieAssest])
conn.commit() conn.commit()
elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2504,7 +2504,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 3, "mm", sms_livello_treCoppieAssest, email_livello_treCoppieAssest]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, dz, 3, "mm", sms_livello_treCoppieAssest, email_livello_treCoppieAssest])
conn.commit() conn.commit()
elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or elif not ( (abs(dzPenultimo) >= abs(float(areaAttenzioneInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or (abs(dzPenultimo) >= abs(float(areaInterventoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieAssest))) or
(abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ): (abs(dzPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieAssest)) and abs(dzPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,12,%s,%s,%s)"
@@ -2523,7 +2523,7 @@ def getDataFromCsvAndInsert(pathFile):
cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 13]) cursor.execute(query, ["upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, 1, dato_date, 13])
resultAlarm = cursor.fetchall() resultAlarm = cursor.fetchall()
if(len(resultAlarm) <= 0):#non c'è if(len(resultAlarm) <= 0):#non c'è
if not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or if not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2538,7 +2538,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 2, "mm", sms_livello_dueCoppieSpostLat, email_livello_dueCoppieSpostLat]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 2, "mm", sms_livello_dueCoppieSpostLat, email_livello_dueCoppieSpostLat])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2557,7 +2557,7 @@ def getDataFromCsvAndInsert(pathFile):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 3, "mm", sms_livello_treCoppieSpostLat, email_livello_treCoppieSpostLat]) cursor.execute(query, [9, "upgeo|"+str(lavoro_id)+"|"+serieName+" "+str(i+1)+" - "+mira_name, dato_date, r2d, 3, "mm", sms_livello_treCoppieSpostLat, email_livello_treCoppieSpostLat])
conn.commit() conn.commit()
elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or elif not ( (abs(r2dPenultimo) >= abs(float(areaAttenzioneInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or (abs(r2dPenultimo) >= abs(float(areaInterventoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(float(areaInterventoImmediatoInizioCoppieSpostLat))) or
(abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ): (abs(r2dPenultimo) >= abs(float(areaInterventoImmediatoInizioCoppieSpostLat)) and abs(r2dPenultimo) <= abs(maxValue)) ):
query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)" query = "insert ignore into alarms (type_id, tool_name, date_time, registered_value, alarm_level, tipologia, description, send_sms, send_email) value(%s,%s,%s,%s,%s,13,%s,%s,%s)"
@@ -2565,14 +2565,17 @@ def getDataFromCsvAndInsert(pathFile):
conn.commit() conn.commit()
cursor.close() cursor.close()
conn.close() conn.close()
"""
if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta if "[276_208_TS0003]" in pathFile or "[Neuchatel_CDP]" in pathFile or "[TS0006_EP28]" in pathFile or "[TS0007_ChesaArcoiris]" in pathFile or "[TS0006_EP28_3]" in pathFile or "[TS0006_EP28_4]" in pathFile or "[TS0006_EP28_5]" in pathFile or "[TS18800]" in pathFile or "[Granges_19 100]" in pathFile or "[Granges_19 200]" in pathFile or "[Chesa_Arcoiris_2]" in pathFile or "[TS0006_EP28_1]" in pathFile or "[TS_PS_Petites_Croisettes]" in pathFile or "[_Chesa_Arcoiris_1]" in pathFile or "[TS-VIME]" in pathFile:#sposto il file nella cartella della stazione corretta
orig_folder = pathFile.split("/")[-2] orig_folder = pathFile.split("/")[-2]
new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name) new_pathFile = pathFile.replace(orig_folder,"home/"+folder_name)
shutil.move(pathFile, new_pathFile) shutil.move(pathFile, new_pathFile)
if not os.path.exists(pathFile): if not os.path.exists(pathFile):
print(f"File moved successfully from {pathFile} to {new_pathFile}\n") print(f"File moved successfully from {pathFile} to {new_pathFile}\n")
else: else:
print("File move operation failed.\n") print("File move operation failed.\n")
"""
#except Exception as e: #except Exception as e:
# print(f"An unexpected error occurred: {str(e)}\n") # print(f"An unexpected error occurred: {str(e)}\n")

View File

@@ -1,6 +1,6 @@
from configparser import ConfigParser from configparser import ConfigParser
def read_db_config(filename='/home/battilo/scripts/config.ini', section='mysql'): def read_db_config(filename='../env/config.ini', section='mysql'):
parser = ConfigParser() parser = ConfigParser()
parser.read(filename) parser.read(filename)

171
src/old_script/vulinkScript.py Executable file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
import sys
import os
from mysql.connector import MySQLConnection, Error
from dbconfig import read_db_config
from datetime import datetime
import json
def checkBatteryLevel(db_conn, db_cursor, unit, date_time, battery_perc):
print(date_time, battery_perc)
if(float(battery_perc) < 25):#sotto il 25%
query = "select unit_name, date_time from alarms where unit_name=%s and date_time < %s and type_id=2 order by date_time desc limit 1"
db_cursor.execute(query, [unit, date_time])
result = db_cursor.fetchall()
if(len(result) > 0):
alarm_date_time = result[0]["date_time"]#datetime not str
format1 = "%Y-%m-%d %H:%M"
dt1 = datetime.strptime(date_time, format1)
time_difference = abs(dt1 - alarm_date_time)
if time_difference.total_seconds() > 24 * 60 * 60:
print("The difference is above 24 hours. Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
else:
print("Creo allarme battery")
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
db_conn.commit()
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
soglie = json.loads(soglie_str)
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
ph = soglia["data"]["ph"]
ph_uno = soglia["data"]["ph_uno"]
ph_due = soglia["data"]["ph_due"]
ph_tre = soglia["data"]["ph_tre"]
ph_uno_value = soglia["data"]["ph_uno_value"]
ph_due_value = soglia["data"]["ph_due_value"]
ph_tre_value = soglia["data"]["ph_tre_value"]
ph_uno_sms = soglia["data"]["ph_uno_sms"]
ph_due_sms = soglia["data"]["ph_due_sms"]
ph_tre_sms = soglia["data"]["ph_tre_sms"]
ph_uno_email = soglia["data"]["ph_uno_email"]
ph_due_email = soglia["data"]["ph_due_email"]
ph_tre_email = soglia["data"]["ph_tre_email"]
alert_uno = 0
alert_due = 0
alert_tre = 0
ph_value_prev = 0
#print(unit, tool, node_num, date_time)
query = "select XShift, EventDate, EventTime from ELABDATADISP where UnitName=%s and ToolNameID=%s and NodeNum=%s and concat(EventDate, ' ', EventTime) < %s order by concat(EventDate, ' ', EventTime) desc limit 1"
db_cursor.execute(query, [unit, tool, node_num, date_time])
resultPhPrev = db_cursor.fetchall()
if(len(resultPhPrev) > 0):
ph_value_prev = float(resultPhPrev[0]["XShift"])
#ph_value = random.uniform(7, 10)
print(tool, unit, node_num, date_time, ph_value)
#print(ph_value_prev, ph_value)
if(ph == 1):
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
if(ph_value_prev <= float(ph_tre_value)):
alert_tre = 1
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
if(ph_value_prev <= float(ph_due_value)):
alert_due = 1
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
if(ph_value_prev <= float(ph_uno_value)):
alert_uno = 1
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
if(alert_tre == 1):
print("level3",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
db_conn.commit()
elif(alert_due == 1):
print("level2",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
db_conn.commit()
elif(alert_uno == 1):
print("level1",tool, unit, node_num, date_time, ph_value)
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
db_conn.commit()
def getDataFromCsv(pathFile):
try:
folder_path, file_with_extension = os.path.split(pathFile)
file_name, _ = os.path.splitext(file_with_extension)#toolname
serial_number = file_name.split("_")[0]
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
query_nodes = "SELECT t.soglie, n.num as node_num, n.nodetype_id FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
db_config = read_db_config()
conn = MySQLConnection(**db_config)
cursor = conn.cursor(dictionary=True)
cursor.execute(query, [serial_number])
result = cursor.fetchall()
unit = result[0]["unit_name"]
tool = result[0]["tool_name"]
cursor.execute(query_node_depth, [unit, tool])
resultNode = cursor.fetchall()
cursor.execute(query_nodes, [unit, tool])
resultAllNodes = cursor.fetchall()
#print(resultAllNodes)
node_num_piezo = next((item for item in resultAllNodes if item.get('nodetype_id') == 2), None)["node_num"]
node_num_baro = next((item for item in resultAllNodes if item.get('nodetype_id') == 3), None)["node_num"]
node_num_conductivity = next((item for item in resultAllNodes if item.get('nodetype_id') == 94), None)["node_num"]
node_num_ph = next((item for item in resultAllNodes if item.get('nodetype_id') == 97), None)["node_num"]
#print(node_num_piezo, node_num_baro, node_num_conductivity, node_num_ph)
# 2 piezo
# 3 baro
# 94 conductivity
# 97 ph
node_depth = float(resultNode[0]["depth"]) #node piezo depth
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
data = file.readlines()
data = [row.rstrip() for row in data]
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
data.pop(0) #rimuove header
for row in data:
row = row.split(",")
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
date_time = date_time.split(" ")
date = date_time[0]
time = date_time[1]
temperature_unit = float(row[2])
battery_perc = float(row[3])
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
conductivity = float(row[6])
ph = float(row[11])
temperature_piezo = float(row[14])
pressure = float(row[16])*1000
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_baro, date, time, battery_perc, temperature_unit, pressure_baro])
cursor.execute(queryInsElab, [unit, tool, node_num_baro, date, time, pressure_baro])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_conductivity, date, time, battery_perc, temperature_unit, conductivity])
cursor.execute(queryInsElab, [unit, tool, node_num_conductivity, date, time, conductivity])
conn.commit()
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_ph, date, time, battery_perc, temperature_unit, ph])
cursor.execute(queryInsElab, [unit, tool, node_num_ph, date, time, ph])
conn.commit()
checkSogliePh(conn, cursor, unit, tool, node_num_ph, date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(queryInsRaw, [unit, tool, node_num_piezo, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
cursor.execute(queryInsElab, [unit, tool, node_num_piezo, date, time, temperature_piezo, depth, pressure])
conn.commit()
checkBatteryLevel(conn, cursor, unit, date_time[0]+" "+date_time[1], battery_perc)
except Error as e:
print('Error:', e)
def main():
getDataFromCsv(sys.argv[1])
if __name__ == '__main__':
main()

View File

@@ -46,11 +46,29 @@ class Config:
# unit setting # unit setting
self.units_name = [part for part in c.get("unit", "Names").split('|')] self.units_name = [part for part in c.get("unit", "Names").split('|')]
self.units_type = [part for part in c.get("unit", "Types").split('|')] self.units_type = [part for part in c.get("unit", "Types").split('|')]
self.units_alias = {
key: value
for item in c.get("unit", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
#self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]} #self.units_header = {key: int(value) for pair in c.get("unit", "Headers").split('|') for key, value in [pair.split(':')]}
# tool setting # tool setting
self.tools_name = [part for part in c.get("tool", "Names").split('|')] self.tools_name = [part for part in c.get("tool", "Names").split('|')]
self.tools_type = [part for part in c.get("tool", "Types").split('|')] self.tools_type = [part for part in c.get("tool", "Types").split('|')]
self.tools_alias = {
key: value
for item in c.get("tool", "Alias").split('|')
for key, value in [item.split(':', 1)]
}
# csv info # csv info
self.csv_infos = [part for part in c.get("csv", "Infos").split('|')] self.csv_infos = [part for part in c.get("csv", "Infos").split('|')]
# TS pini path match
self.ts_pini_path_match = {
key: key[1:-1] if value == '=' else value
for item in c.get("ts_pini", "path_match").split('|')
for key, value in [item.split(':', 1)]
}

View File

@@ -23,10 +23,10 @@ async def get_data(cfg: object, id: int, pool: object) -> tuple:
""" """
async with pool.acquire() as conn: async with pool.acquire() as conn:
async with conn.cursor() as cur: async with conn.cursor() as cur:
await cur.execute(f'select unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}') await cur.execute(f'select filename, unit_name, tool_name, tool_data from {cfg.dbrectable} where id = {id}')
unit_name, tool_name, tool_data = await cur.fetchone() filename, unit_name, tool_name, tool_data = await cur.fetchone()
return unit_name, tool_name, tool_data return filename, unit_name, tool_name, tool_data
async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list: async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
""" """
@@ -39,7 +39,7 @@ async def make_pipe_sep_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
""" """
@@ -78,7 +78,7 @@ async def make_ain_din_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
@@ -112,7 +112,7 @@ async def make_channels_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
@@ -140,7 +140,7 @@ async def make_musa_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool) node_channels, node_types, node_ains, node_dins = await get_nodes_type(cfg, ToolNameID, UnitName, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
@@ -173,7 +173,7 @@ async def make_tlp_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
valori_x_nodo = 2 valori_x_nodo = 2
matrice_valori = [] matrice_valori = []
@@ -200,7 +200,7 @@ async def make_gd_matrix(cfg: object, id: int, pool: object) -> list:
Returns: Returns:
list: A list of lists, where each inner list represents a row in the matrix. list: A list of lists, where each inner list represents a row in the matrix.
""" """
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
righe = ToolData.splitlines() righe = ToolData.splitlines()
matrice_valori = [] matrice_valori = []
pattern = r';-?\d+dB$' pattern = r';-?\d+dB$'

View File

@@ -1,10 +1,10 @@
import re import re
def extract_value(patterns: list, primary_source: str, secondary_source: str, default='Not Defined') -> str: def extract_value(patterns: list, primary_source: str, secondary_source: str = None, default='Not Defined') -> str:
for source in (primary_source, secondary_source): for source in [source for source in (primary_source, secondary_source) if source is not None]:
for pattern in patterns: for pattern in patterns:
matches = re.findall(pattern, source, re.IGNORECASE) matches = re.findall(pattern, source, re.IGNORECASE)
if matches: if matches:
return matches[0] # Return the first match immediately return matches[0] # Return the first match immediately
return default # Return default if no matches are found return default # Return default if no matches are found

View File

@@ -1,6 +1,6 @@
import os import os
import logging import logging
import re
import mysql.connector import mysql.connector
from utils.database.connection import connetti_db from utils.database.connection import connetti_db
@@ -30,17 +30,38 @@ def on_file_received(self: object, file: str) -> None:
unit_type = extract_value(cfg.units_type, filename, str(lines[0:10])) unit_type = extract_value(cfg.units_type, filename, str(lines[0:10]))
tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10])) tool_name = extract_value(cfg.tools_name, filename, str(lines[0:10]))
tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10])) tool_type = extract_value(cfg.tools_type, filename, str(lines[0:10]))
tool_info = "{}"
unit_type = cfg.units_alias.get(unit_type.upper(), unit_type)
try: try:
conn = connetti_db(cfg) conn = connetti_db(cfg)
except mysql.connector.Error as e: except mysql.connector.Error as e:
print(f"Error: {e}")
logger.error(f'{e}') logger.error(f'{e}')
# Create a cursor # Create a cursor
cur = conn.cursor() cur = conn.cursor()
# da estrarre in un modulo
if (unit_type.upper() == "ISI CSV LOG" and tool_type.upper() == "VULINK" ):
serial_number = filename.split('_')[0]
tool_info = f'{{"serial_number": {serial_number}}}'
try:
cur.execute(f"SELECT unit_name, tool_name FROM {cfg.dbname}.vulink_tools WHERE serial_number = '{serial_number}'")
unit_name, tool_name = cur.fetchone()
except Exception as e:
logger.warning(f'{tool_type} serial number {serial_number} not found in table vulink_tools. {e}')
# da estrarre in un modulo
if (unit_type.upper() == "STAZIONETOTALE" and tool_type.upper() == "INTEGRITY MONITOR" ):
escaped_keys = [re.escape(key) for key in cfg.ts_pini_path_match.keys()]
stazione = extract_value(escaped_keys, filename)
if stazione:
tool_info = f'{{"Stazione": "{cfg.ts_pini_path_match.get(stazione)}"}}'
try: try:
cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data) VALUES (%s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines))) cur.execute(f"INSERT INTO {cfg.dbname}.{cfg.dbrectable} (filename, unit_name, unit_type, tool_name, tool_type, tool_data, tool_info) VALUES (%s, %s, %s, %s, %s, %s, %s)", (filename, unit_name.upper(), unit_type.upper(), tool_name.upper(), tool_type.upper(), ''.join(lines), tool_info))
conn.commit() conn.commit()
conn.close() conn.close()

View File

@@ -0,0 +1,45 @@
import asyncio
import tempfile
import os
from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', 'old_script/TS_PiniScript.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info("Programma TS_PiniScript.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

View File

@@ -1,8 +1,9 @@
import subprocess import asyncio
import tempfile import tempfile
import os import os
from utils.database.loader_action import DATA_LOADED, update_status, unlock from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data from utils.csv.data_preparation import get_data
import logging import logging
@@ -11,25 +12,34 @@ logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None: async def main_loader(cfg: object, id: int, pool: object) -> None:
UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool) filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo # Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as temp_file: with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData) temp_file.write(ToolData)
temp_filename = temp_file.name temp_filename = temp_file.name
try: try:
# Eseguire il programma con il file temporaneo # Usa asyncio.subprocess per vero async
result = await subprocess.run(['python3', 'old_script/TS_PiniScript.py', temp_filename], capture_output=True, text=True) process = await asyncio.create_subprocess_exec(
print(result.stdout) 'python3', 'old_script/vulinkScript.py', temp_filename,
print(result.stderr) stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally: finally:
# Pulire il file temporaneo # Pulire il file temporaneo
os.unlink(temp_filename) os.unlink(temp_filename)
if result.returncode != 0: if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result.stderr}") logger.error(f"Errore nell'esecuzione del programma vulinkScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result.stderr}") raise Exception(f"Errore nel programma: {result_stderr}")
else: else:
logger.info(f"Programma TS_PiniScript.py eseguito con successo: {result.stdout}") logger.info("Programma vulinkScript.py eseguito con successo.")
await update_status(cfg, id, DATA_LOADED, pool) logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool) await unlock(cfg, id, pool)

View File

@@ -0,0 +1,45 @@
import asyncio
import tempfile
import os
from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', 'old_script/TS_PiniScript.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info("Programma TS_PiniScript.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

View File

@@ -0,0 +1,45 @@
import asyncio
import tempfile
import os
from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', 'old_script/TS_PiniScript.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info("Programma TS_PiniScript.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

View File

@@ -0,0 +1,45 @@
import asyncio
import tempfile
import os
from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', 'old_script/TS_PiniScript.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info("Programma TS_PiniScript.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)

View File

@@ -0,0 +1,45 @@
import asyncio
import tempfile
import os
from utils.database import WorkflowFlags
from utils.database.loader_action import update_status, unlock
from utils.csv.data_preparation import get_data
import logging
logger = logging.getLogger(__name__)
async def main_loader(cfg: object, id: int, pool: object) -> None:
filename, UnitName, ToolNameID, ToolData = await get_data(cfg, id, pool)
# Creare un file temporaneo
with tempfile.NamedTemporaryFile(mode='w', prefix= filename, suffix='.csv', delete=False) as temp_file:
temp_file.write(ToolData)
temp_filename = temp_file.name
try:
# Usa asyncio.subprocess per vero async
process = await asyncio.create_subprocess_exec(
'python3', 'old_script/TS_PiniScript.py', temp_filename,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
result_stdout = stdout.decode('utf-8')
result_stderr = stderr.decode('utf-8')
finally:
# Pulire il file temporaneo
os.unlink(temp_filename)
if process.returncode != 0:
logger.error(f"Errore nell'esecuzione del programma TS_PiniScript.py: {result_stderr}")
raise Exception(f"Errore nel programma: {result_stderr}")
else:
logger.info("Programma TS_PiniScript.py eseguito con successo.")
logger.debug(f"Stdout: {result_stdout}")
await update_status(cfg, id, WorkflowFlags.DATA_LOADED, pool)
await update_status(cfg, id, WorkflowFlags.DATA_ELABORATED, pool)
await unlock(cfg, id, pool)