refresh script
This commit is contained in:
@@ -13,11 +13,11 @@ use MIME::Lite;
|
|||||||
$|++; # Autoflush
|
$|++; # Autoflush
|
||||||
|
|
||||||
my ( $filename, $sqlout );
|
my ( $filename, $sqlout );
|
||||||
my $username = 'ase_lar';
|
my $username = 'aselar';
|
||||||
my $password = 'laravel';
|
my $password = 'laravel';
|
||||||
my $db_lar = 'ase_lar';
|
my $db_lar = 'ase_lar';
|
||||||
my $db_sp = 'ase_lar';
|
my $db_sp = 'ase_lar';
|
||||||
my $server = 'localhost';
|
my $server = '85.235.153.201';
|
||||||
my $db_name = "ase_lar";
|
my $db_name = "ase_lar";
|
||||||
my $table = "RAWDATACOR";
|
my $table = "RAWDATACOR";
|
||||||
my ( @CSVData, @sql );
|
my ( @CSVData, @sql );
|
||||||
|
|||||||
17
FTPCSVRAW_v2.sh
Executable file
17
FTPCSVRAW_v2.sh
Executable file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
HOST=$1
|
||||||
|
USER=$5
|
||||||
|
PASSWD=$6
|
||||||
|
FILEPATH=$3
|
||||||
|
REMOTEPATH=$2
|
||||||
|
FILENAME=$4
|
||||||
|
|
||||||
|
ftp -inv -p $HOST <<END_SCRIPT
|
||||||
|
quote USER $USER
|
||||||
|
quote PASS $PASSWD
|
||||||
|
binary
|
||||||
|
cd $REMOTEPATH
|
||||||
|
put $FILEPATH /$REMOTEPATH/$FILENAME
|
||||||
|
quit
|
||||||
|
END_SCRIPT
|
||||||
|
exit 0
|
||||||
671
LoadCSVData.pl
671
LoadCSVData.pl
@@ -9,15 +9,16 @@ use List::Util qw( max );
|
|||||||
use Data::Dumper qw(Dumper);
|
use Data::Dumper qw(Dumper);
|
||||||
use Cwd;
|
use Cwd;
|
||||||
use MIME::Lite;
|
use MIME::Lite;
|
||||||
|
use Time::Piece;
|
||||||
|
|
||||||
$|++; # Autoflush
|
$|++; # Autoflush
|
||||||
|
|
||||||
my ( $filename, $sqlout );
|
my ( $filename, $sqlout );
|
||||||
my $username = 'ase_lar';
|
my $username = 'aselar';
|
||||||
my $password = 'laravel';
|
my $password = 'laravel';
|
||||||
my $db_lar = 'ase_lar';
|
my $db_lar = 'ase_lar';
|
||||||
my $db_sp = 'ase_lar';
|
my $db_sp = 'ase_lar';
|
||||||
my $server = '80.211.107.201';
|
my $server = '85.235.153.201';
|
||||||
my $db_name = "ase_lar";
|
my $db_name = "ase_lar";
|
||||||
my $table = "RAWDATACOR";
|
my $table = "RAWDATACOR";
|
||||||
my ( @CSVData, @sql );
|
my ( @CSVData, @sql );
|
||||||
@@ -28,12 +29,17 @@ my (
|
|||||||
$tool, $toolbatt, $tooltemp, $tooltype, @channels,
|
$tool, $toolbatt, $tooltemp, $tooltype, @channels,
|
||||||
@NewData, @ain, @din, @nodetype, $GDEventDate
|
@NewData, @ain, @din, @nodetype, $GDEventDate
|
||||||
);
|
);
|
||||||
my $matlab_proxy_ssh = "80.211.107.201";
|
my $matlab_proxy_ssh = "localhost"; #"80.211.107.201";
|
||||||
my $matlab_proxy_ssh_port = 2222;
|
my $matlab_proxy_ssh_port = 22; #2222;
|
||||||
my $matlab_exec_user = 'aselab';
|
my $matlab_exec_user = 'asega';
|
||||||
my $matlab_cmd = "";
|
my $matlab_cmd = "";
|
||||||
my $matlab_timestamp = "";
|
my $matlab_timestamp = "";
|
||||||
my $ftp_send = 1;
|
my $ftp_send = 1;
|
||||||
|
my $api_send = 0;
|
||||||
|
my $inoltro_api = 0;
|
||||||
|
my $inoltro_api_url = "";
|
||||||
|
my $inoltro_api_bearer_token = "";
|
||||||
|
my $unit_duedate = "";
|
||||||
my $ftp_addrs;
|
my $ftp_addrs;
|
||||||
my $ftp_user;
|
my $ftp_user;
|
||||||
my $ftp_passwd;
|
my $ftp_passwd;
|
||||||
@@ -48,16 +54,18 @@ my $matlab_func_dir = "/usr/local/matlab_func/";
|
|||||||
my $matlab_error = '';
|
my $matlab_error = '';
|
||||||
my $matlab_timeout = '1800';
|
my $matlab_timeout = '1800';
|
||||||
my $email_addr =
|
my $email_addr =
|
||||||
'andrea.carri@aseltd.eu,alessandro.battilani@gmail.com,alessandro.valletta@aseltd.eu';
|
'andrea.carri@aseltd.eu,alessandro.battilani@gmail.com,alessandro.valletta@aseltd.eu,alberto.sillani@aseltd.eu,majd.saidani@aseltd.eu';
|
||||||
my $from = 'ASE Alert System<alert@aseltd.eu>';
|
my $from = 'ASE Alert System<alert@aseltd.eu>';
|
||||||
my ( $email_msg, $email_obj );
|
my ( $email_msg, $email_obj );
|
||||||
my $G201_fcheck = 0;
|
my $G201_fcheck = 0;
|
||||||
|
my $TLP_fcheck = 0;
|
||||||
|
my $GS1_fcheck = 0;
|
||||||
my $D2W_fcheck = 0;
|
my $D2W_fcheck = 0;
|
||||||
my $G301_fcheck = 0;
|
my $G301_fcheck = 0;
|
||||||
my $CR1000X_fcheck = 0;
|
my $CR1000X_fcheck = 0;
|
||||||
my $FtpToCustomerCmd = 'SendFtpElabData.pl';
|
my $FtpToCustomerCmd = 'SendFtpElabData.pl';
|
||||||
my ( $scriptname, $scriptpath );
|
my ( $scriptname, $scriptpath );
|
||||||
my $MatlabErrorPath = "/mnt/elab-error/";
|
my $MatlabErrorPath = "/tmp/";
|
||||||
my $MatlabErrorFilename = $MatlabErrorPath;
|
my $MatlabErrorFilename = $MatlabErrorPath;
|
||||||
my @matlabOutputErrorArray;
|
my @matlabOutputErrorArray;
|
||||||
my @errors;
|
my @errors;
|
||||||
@@ -97,16 +105,15 @@ sub writeOutSql {
|
|||||||
. $outfile
|
. $outfile
|
||||||
. ":$!\n" );
|
. ":$!\n" );
|
||||||
print getTimeStamp("log") . " - pid $$ >> file $outfile created\n";
|
print getTimeStamp("log") . " - pid $$ >> file $outfile created\n";
|
||||||
|
|
||||||
print SQLOUT "/* lock "
|
print SQLOUT "/* lock "
|
||||||
. $db_name
|
. $db_name
|
||||||
. ".$table table */\nLOCK TABLES "
|
. ".$table table */\nLOCK TABLES "
|
||||||
. $db_name
|
. $db_name
|
||||||
. ".$table WRITE;\n";
|
. ".$table WRITE;\n";
|
||||||
print SQLOUT "INSERT IGNORE INTO " . $db_name . ".$table\n";
|
print SQLOUT "INSERT IGNORE INTO " . $db_name . ".$table\n";
|
||||||
print SQLOUT
|
print SQLOUT "(`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,\n";
|
||||||
"(`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,\n";
|
print SQLOUT "`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`)\n";
|
||||||
print SQLOUT
|
|
||||||
"`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`)\n";
|
|
||||||
print SQLOUT "VALUES\n";
|
print SQLOUT "VALUES\n";
|
||||||
print SQLOUT @sql;
|
print SQLOUT @sql;
|
||||||
@sql = ();
|
@sql = ();
|
||||||
@@ -126,11 +133,76 @@ sub writeOutSql {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
sub matlabCalc {
|
sub modifyAndWriteOutSql {
|
||||||
|
my ($tn) = @_;
|
||||||
|
my $outfile2 = $sqlout . "_" . $outfilenum++;
|
||||||
|
open SQLOUT, ">", $outfile2
|
||||||
|
or die( getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Error: opening output file "
|
||||||
|
. $outfile2
|
||||||
|
. ":$!\n" );
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> file $outfile2 IPI created\n";
|
||||||
|
|
||||||
|
print SQLOUT "/* lock "
|
||||||
|
. $db_name
|
||||||
|
. ".$table table */\nLOCK TABLES "
|
||||||
|
. $db_name
|
||||||
|
. ".$table WRITE;\n";
|
||||||
|
print SQLOUT "INSERT IGNORE INTO " . $db_name . ".$table\n";
|
||||||
|
print SQLOUT "(`UnitName`,`ToolNameID`,`NodeNum`,`EventDate`,`EventTime`,`BatLevel`,`Temperature`,\n";
|
||||||
|
print SQLOUT "`Val0`,`Val1`,`Val2`,`Val3`,`Val4`,`Val5`,`Val6`,`Val7`,`Val8`,`Val9`,`ValA`,`ValB`,`ValC`,`ValD`,`ValE`,`ValF`)\n";
|
||||||
|
print SQLOUT "VALUES\n";
|
||||||
|
foreach my $row (@sql) {
|
||||||
|
my @row_values = split /','/, $row;
|
||||||
|
if ($row_values[0] eq "('ID0070" && $row_values[1] eq 'DT0111') {
|
||||||
|
$row_values[0] = "('ID0070";
|
||||||
|
$row_values[1] = 'DT0111 IPI';
|
||||||
|
}
|
||||||
|
elsif ($row_values[0] eq "('ID0071" && $row_values[1] eq 'DT0112') {
|
||||||
|
$row_values[0] = "('ID0071";
|
||||||
|
$row_values[1] = 'DT0112 IPI';
|
||||||
|
}
|
||||||
|
elsif ($row_values[0] eq "('ID0072" && $row_values[1] eq 'DT0113') {
|
||||||
|
$row_values[0] = "('ID0072";
|
||||||
|
$row_values[1] = 'DT0113 IPI';
|
||||||
|
}
|
||||||
|
elsif ($row_values[0] eq "('ID0073" && $row_values[1] eq 'DT0114') {
|
||||||
|
$row_values[0] = "('ID0073";
|
||||||
|
$row_values[1] = 'DT0114 IPI';
|
||||||
|
}
|
||||||
|
elsif ($row_values[0] eq "('ID0273" && $row_values[1] eq 'DT0001') {
|
||||||
|
$row_values[0] = "('ID0273";
|
||||||
|
$row_values[1] = 'DT0002';
|
||||||
|
}
|
||||||
|
elsif ($row_values[0] eq "('ID00279" && $row_values[1] eq 'DT0008') {
|
||||||
|
$row_values[0] = "('ID0279";
|
||||||
|
$row_values[1] = 'DT0008';
|
||||||
|
}
|
||||||
|
my $modified_row_string = "" . join("','", @row_values) . "\n";
|
||||||
|
print SQLOUT $modified_row_string;
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> row IPI[".$row_values[0]." ".$row_values[1]."] $modified_row_string\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
print SQLOUT "/* unlock table */\nUNLOCK TABLES;\n";
|
||||||
|
|
||||||
|
close SQLOUT
|
||||||
|
or die( getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Error in closing file "
|
||||||
|
. $outfile2
|
||||||
|
. "\n" );
|
||||||
|
|
||||||
|
my @args = ( "mysql", "--login-path=asepath", "-e source " . $outfile2 );
|
||||||
|
system(@args) == 0
|
||||||
|
or die( getTimeStamp("log") . " - pid $$ >> system @args failed: $?\n" );
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> file $outfile2 IPI loaded into DB\n";
|
||||||
|
|
||||||
|
unlink $outfile2;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
sub matlabCalc {
|
||||||
$matlab_timestamp = getTimeStamp("db_ts");
|
$matlab_timestamp = getTimeStamp("db_ts");
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log") . " - pid $$ >> $unit - $tool MatLab calc started...\n";
|
||||||
. " - pid $$ >> $unit - $tool MatLab calc started...\n";
|
|
||||||
if ( $matlab_cmd =~ /_lnx$/ ) {
|
if ( $matlab_cmd =~ /_lnx$/ ) {
|
||||||
$matlab_cmd = './run_' . $matlab_cmd . '.sh';
|
$matlab_cmd = './run_' . $matlab_cmd . '.sh';
|
||||||
$MatlabErrorFilename .= "${unit}${tool}_output_error.txt";
|
$MatlabErrorFilename .= "${unit}${tool}_output_error.txt";
|
||||||
@@ -138,18 +210,16 @@ sub matlabCalc {
|
|||||||
#print $ENV{"HOME"} . "\n";
|
#print $ENV{"HOME"} . "\n";
|
||||||
$ENV{"HOME"} = "/" . $ENV{"HOME"};
|
$ENV{"HOME"} = "/" . $ENV{"HOME"};
|
||||||
|
|
||||||
my $cmd_elab =
|
chdir($matlab_func_dir)
|
||||||
"timeout $matlab_timeout $matlab_cmd $matlab_rt $unit $tool";
|
or die "cannot change: $!\n";
|
||||||
|
print getTimeStamp("log")
|
||||||
#my $args = "sh -c \'cd $matlab_func_dir; pwd; $cmd_elab\'";
|
. " - pid $$ >> current dir: $matlab_func_dir\n";
|
||||||
|
my $args = "timeout $matlab_timeout $matlab_cmd $matlab_rt $unit $tool";
|
||||||
my $args =
|
|
||||||
"ssh $matlab_exec_user\@$matlab_proxy_ssh -p $matlab_proxy_ssh_port \'cd $matlab_func_dir; $cmd_elab\'";
|
|
||||||
|
|
||||||
if ( system($args) != 0 ) {
|
if ( system($args) != 0 ) {
|
||||||
my $argscat =
|
my $argscat = "cd $MatlabErrorPath; cat _${unit}_${tool}*_\*_output_error.txt > ${unit}${tool}_output_error.txt";
|
||||||
"sh -c \'cd $MatlabErrorPath; cat _${unit}_${tool}*_\*_output_error.txt > ${unit}${tool}_output_error.txt\'";
|
|
||||||
system($argscat);
|
system($argscat);
|
||||||
|
|
||||||
open( my $fh, '<', $MatlabErrorFilename )
|
open( my $fh, '<', $MatlabErrorFilename )
|
||||||
or warn print getTimeStamp("log")
|
or warn print getTimeStamp("log")
|
||||||
. " - pid $$ >> Cannot open Matlab output error file: ${MatlabErrorFilename}\n";
|
. " - pid $$ >> Cannot open Matlab output error file: ${MatlabErrorFilename}\n";
|
||||||
@@ -167,8 +237,7 @@ sub matlabCalc {
|
|||||||
if ( $exit_value == 124 ) {
|
if ( $exit_value == 124 ) {
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
. " - pid $$ >> system $args excessive duration: killed after $matlab_timeout seconds\n";
|
. " - pid $$ >> system $args excessive duration: killed after $matlab_timeout seconds\n";
|
||||||
$matlab_error =
|
$matlab_error = "Matlab elab excessive duration: killed after $matlab_timeout seconds";
|
||||||
"Matlab elab excessive duration: killed after $matlab_timeout seconds";
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
@@ -189,8 +258,7 @@ sub matlabCalc {
|
|||||||
|
|
||||||
sub trxelab {
|
sub trxelab {
|
||||||
my $user = getpwuid($>);
|
my $user = getpwuid($>);
|
||||||
my $ftpcmd =
|
my $ftpcmd = "$scriptpath$FtpToCustomerCmd -m \"$matlab_timestamp\" -u $unit -t $tool >> /home/$user/log/loadcsvdata.log 2>&1";
|
||||||
"$scriptpath$FtpToCustomerCmd -m \"$matlab_timestamp\" -u $unit -t $tool >> /home/$user/log/loadcsvdata.log 2>&1";
|
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
. " - pid $$ >> Fork FTP command to set GID mysql: $ftpcmd.\n";
|
. " - pid $$ >> Fork FTP command to set GID mysql: $ftpcmd.\n";
|
||||||
|
|
||||||
@@ -201,14 +269,22 @@ sub trxelab {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sub trxelabApi {
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro ELAB API: /home/battilo/scripts/inoltroViaApiElab.py $matlab_timestamp $unit $tool $inoltro_api_url $inoltro_api_bearer_token\n";
|
||||||
|
unless ( fork() ) {
|
||||||
|
exec("/home/battilo/scripts/inoltroViaApiElab.py \"$matlab_timestamp\" \"$unit\" \"$tool\" \"$inoltro_api_url\" \"$inoltro_api_bearer_token\" >> /home/asega/log/logInoltroViaApiElab.log 2>&1");
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
sub getNodesType {
|
sub getNodesType {
|
||||||
my $dbh =
|
my $dbh =
|
||||||
DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password )
|
DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password )
|
||||||
or die getTimeStamp("log")
|
or die getTimeStamp("log")
|
||||||
. " - pid $$ >> Could not connect to database: $DBI::errstr";
|
. " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
|
||||||
my $sth = $dbh->prepare(
|
my $sth = $dbh->prepare('select t.name as name, n.seq as seq, n.num as num, n.channels as channels, y.type as type, n.ain as ain, n.din as din
|
||||||
'select t.name as name, n.seq as seq, n.num as num, n.channels as channels, y.type as type, n.ain as ain, n.din as din
|
|
||||||
from nodes as n
|
from nodes as n
|
||||||
inner join tools as t on t.id = n.tool_id
|
inner join tools as t on t.id = n.tool_id
|
||||||
inner join units as u on u.id = t.unit_id
|
inner join units as u on u.id = t.unit_id
|
||||||
@@ -261,6 +337,8 @@ sub getUdbPwd {
|
|||||||
|
|
||||||
$sth->execute();
|
$sth->execute();
|
||||||
if ( $sth->rows eq 0 ) {
|
if ( $sth->rows eq 0 ) {
|
||||||
|
#$password = "laravel";
|
||||||
|
#$username = "ase_lar";
|
||||||
die getTimeStamp("log") . " - pid $$ >> No password selected.\n";
|
die getTimeStamp("log") . " - pid $$ >> No password selected.\n";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -283,8 +361,7 @@ sub getMatlabCmd {
|
|||||||
or die getTimeStamp("log")
|
or die getTimeStamp("log")
|
||||||
. " - pid $$ >> Could not connect to database: $DBI::errstr";
|
. " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
|
||||||
my $sth = $dbh->prepare(
|
my $sth = $dbh->prepare('select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools, t.api_send, u.inoltro_api, u.inoltro_api_url, u.inoltro_api_bearer_token, IFNULL(u.duedate, "") as duedate from matfuncs as m
|
||||||
'select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools from matfuncs as m
|
|
||||||
inner join tools as t on t.matfunc = m.id
|
inner join tools as t on t.matfunc = m.id
|
||||||
inner join units as u on u.id = t.unit_id
|
inner join units as u on u.id = t.unit_id
|
||||||
inner join statustools as s on t.statustool_id = s.id
|
inner join statustools as s on t.statustool_id = s.id
|
||||||
@@ -302,6 +379,11 @@ sub getMatlabCmd {
|
|||||||
$ftp_send = $results->{'ftp_send'};
|
$ftp_send = $results->{'ftp_send'};
|
||||||
$unit_id = $results->{'unit_id'};
|
$unit_id = $results->{'unit_id'};
|
||||||
$tool_status = $results->{'statustools'};
|
$tool_status = $results->{'statustools'};
|
||||||
|
$api_send = $results->{'api_send'};
|
||||||
|
$inoltro_api = $results->{'inoltro_api'};
|
||||||
|
$inoltro_api_url = $results->{'inoltro_api_url'};
|
||||||
|
$inoltro_api_bearer_token = $results->{'inoltro_api_bearer_token'};
|
||||||
|
$unit_duedate = $results->{'duedate'};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -314,8 +396,8 @@ sub getMatlabCmd {
|
|||||||
|
|
||||||
sub makeEmailMsg {
|
sub makeEmailMsg {
|
||||||
$email_msg = <<"END_MSG";
|
$email_msg = <<"END_MSG";
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||||
@@ -363,6 +445,8 @@ END_MSG
|
|||||||
|
|
||||||
sub emailSend {
|
sub emailSend {
|
||||||
$email_obj = 'Matlab error on unit ' . $unit . ' - tool ' . $tool;
|
$email_obj = 'Matlab error on unit ' . $unit . ' - tool ' . $tool;
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> $email_msg\n";
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> $email_obj\n";
|
||||||
my $msg = MIME::Lite->new(
|
my $msg = MIME::Lite->new(
|
||||||
From => $from,
|
From => $from,
|
||||||
To => $email_addr,
|
To => $email_addr,
|
||||||
@@ -370,17 +454,9 @@ sub emailSend {
|
|||||||
Data => $email_msg
|
Data => $email_msg
|
||||||
);
|
);
|
||||||
|
|
||||||
|
#AuthUser => "alert\@aseltd.eu", #AuthPass => "Ase#2013!20\@bat",
|
||||||
$msg->attr( "content-type" => "text/html" );
|
$msg->attr( "content-type" => "text/html" );
|
||||||
if (
|
if ($msg->send('smtp', "smtp.aseltd.eu", AuthUser => "alert\@aseltd.eu", AuthPass => "Ase#2013!20\@bat", Port => 587, Debug => 0))
|
||||||
$msg->send(
|
|
||||||
'smtp', "smtps.aruba.it",
|
|
||||||
AuthUser => "alert\@aseltd.eu",
|
|
||||||
AuthPass => "Ase#2013!20\@bat",
|
|
||||||
Port => 465,
|
|
||||||
SSL => 1,
|
|
||||||
Debug => 0
|
|
||||||
)
|
|
||||||
)
|
|
||||||
{
|
{
|
||||||
print getTimeStamp("log") . " - pid $$ >> $email_obj\n";
|
print getTimeStamp("log") . " - pid $$ >> $email_obj\n";
|
||||||
print getTimeStamp("log") . " - pid $$ >> Mail sent to: $email_addr.\n";
|
print getTimeStamp("log") . " - pid $$ >> Mail sent to: $email_addr.\n";
|
||||||
@@ -393,36 +469,104 @@ sub emailSend {
|
|||||||
|
|
||||||
my $starttime = getTimeStamp("log");
|
my $starttime = getTimeStamp("log");
|
||||||
print "$starttime - pid $$ >> Start execution.\n";
|
print "$starttime - pid $$ >> Start execution.\n";
|
||||||
|
|
||||||
GetOptions(
|
GetOptions(
|
||||||
"filename=s" => \$filename,
|
"filename=s" => \$filename,
|
||||||
"sqlout=s" => \$sqlout,
|
"sqlout=s" => \$sqlout,
|
||||||
"dbname=s" => \$db_name
|
"dbname=s" => \$db_name
|
||||||
) or die("Error in command line arguments\n");
|
) or die("Error in command line arguments\n");
|
||||||
|
|
||||||
|
####### EDIT FILE PER ID0013 ##########
|
||||||
open FILE, $filename
|
open FILE, $filename
|
||||||
or die( "Error: opening input file " . $filename . "\n" );
|
or die( "Error: opening input file " . $filename . "\n" );
|
||||||
|
my ( $filecsvname, $path, $suffix ) = fileparse( $filename, qr/\.[^.]*/ );
|
||||||
|
if ( $filecsvname =~ m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d.*|GD\d*)_\d*$/i ) {
|
||||||
|
my @lines = <FILE>; # Read all lines of the file into an array
|
||||||
|
close(FILE) or die "Error closing1 $filename: $!"; # Close the file after reading all lines
|
||||||
|
( $fileDate, $fileTime ) = split( /\s/, $lines[0] ); # Assuming first line is File Creation Date
|
||||||
|
( $unittype, $unit ) = split( /\s/, uc $lines[1] ); # Extract unit from second line
|
||||||
|
$unit =~ s/;+$//; # Clean up the unit string
|
||||||
|
if ($unit eq "ID0013") { # Process only for unit "ID0013"
|
||||||
|
my $section_count = 0;
|
||||||
|
my $keep = 0;
|
||||||
|
my @filtered_lines;
|
||||||
|
my @current_section; # Temporarily store lines of the current section
|
||||||
|
for my $line (@lines) {
|
||||||
|
# Identify the start of a new section
|
||||||
|
if ($line =~ /^File Creation Date:/) {
|
||||||
|
$section_count++;
|
||||||
|
# If previous section was marked to be kept, store it
|
||||||
|
push @filtered_lines, @current_section if $keep;
|
||||||
|
# Reset section data
|
||||||
|
@current_section = ();
|
||||||
|
$keep = 0;
|
||||||
|
}
|
||||||
|
# Always store the current section's lines in a buffer
|
||||||
|
push @current_section, $line;
|
||||||
|
# If the SD path contains "/modb/", mark this section to be kept
|
||||||
|
if ($line =~ /SD path: .*?\/modb\//) {
|
||||||
|
$keep = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
# Ensure the last section is processed
|
||||||
|
push @filtered_lines, @current_section if $keep;
|
||||||
|
# Only modify the file if there are 2 or more sections
|
||||||
|
if ($section_count > 1) {
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> (EDIT PER ID0013) Detected multiple sections ($section_count). Keeping only /modb/ section.\n";
|
||||||
|
# Rewrite the file with filtered content (only the relevant section)
|
||||||
|
open FILE, '>', $filename or die "Error: (EDIT PER ID0013) opening file for writing $filename: $!\n";
|
||||||
|
print FILE @filtered_lines;
|
||||||
|
close(FILE) or die "Error closing2 $filename: $!";
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> File updated, only /modb/ section retained for ID0013.\n";
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> Only one section found, no changes made to file.\n";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> Unit is not ID0013, continue normal processing.\n";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
close(FILE) or die "Error closing3 $filename: $!";
|
||||||
|
}
|
||||||
|
|
||||||
|
######### NORMAL PROCESSING ###########
|
||||||
|
open FILE, $filename
|
||||||
|
or die( "Error: opening input file " . $filename . "\n" );
|
||||||
( $scriptname, $scriptpath ) = fileparse($0);
|
( $scriptname, $scriptpath ) = fileparse($0);
|
||||||
|
|
||||||
if ( $db_name ne 'asega' ) {
|
if ( $db_name ne 'asega' ) {
|
||||||
$db_name =~ s/aseu/dbn/;
|
$db_name =~ s/aseu/dbn/;
|
||||||
|
#$db_name = "ase_lar";
|
||||||
$db_sp = $db_name;
|
$db_sp = $db_name;
|
||||||
|
#$db_sp = "ase_lar";
|
||||||
getUdbPwd();
|
getUdbPwd();
|
||||||
|
#die getTimeStamp("log") . " - pid $$ >> debug dbname:".$db_name."\n";
|
||||||
$db_lar = $db_name;
|
$db_lar = $db_name;
|
||||||
|
#$db_lar = "ase_lar";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$db_name = 'ase_lar';
|
$db_name = 'ase_lar';
|
||||||
}
|
}
|
||||||
|
( $filecsvname, $path, $suffix ) = fileparse( $filename, qr/\.[^.]*/ );
|
||||||
my ( $filecsvname, $path, $suffix ) = fileparse( $filename, qr/\.[^.]*/ );
|
|
||||||
if ( $filecsvname =~ m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d.*|GD\d*)_\d*$/i ) {
|
if ( $filecsvname =~ m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d.*|GD\d*)_\d*$/i ) {
|
||||||
( $fileDate, $fileTime ) = split( /\s/, <FILE> );
|
( $fileDate, $fileTime ) = split( /\s/, <FILE> );
|
||||||
( $unittype, $unit ) = split( /\s/, uc <FILE> );
|
( $unittype, $unit ) = split( /\s/, uc <FILE> );
|
||||||
$unit =~ s/;+$//;
|
$unit =~ s/;+$//;
|
||||||
|
if ($unit eq "ID0013") {#non mette dati e non fa elab se dentro i dt della id0013 c'è /mums/ nel path sd
|
||||||
|
my $offset_after_second_line = tell(FILE);#salva la posizione
|
||||||
|
my @linesTmp = <FILE>;
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG ID0013 - $linesTmp[3]\n";
|
||||||
|
#my ( $unused_iptext, $unused_ip ) = split( /\s/, $linesTmp[0] );
|
||||||
|
#my ( $unused_subnettext, $unused_subnet ) = split( /\s/, $linesTmp[1] );
|
||||||
|
#my ( $unused_gatewaytext, $unused_gatewayip ) = split( /\s/, $linesTmp[2] );
|
||||||
|
my ( $unused_sdtext, $unused_pathtext, $sd_path ) = split( /\s/, $linesTmp[3] );
|
||||||
|
if($sd_path =~ /\/mums\//){
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> in path SD there is /mums/ for unit=$unit, sd_path=$sd_path EXITING the script\n";
|
||||||
|
close FILE;
|
||||||
|
exit; # Exit the script successfully
|
||||||
|
}
|
||||||
|
seek(FILE, $offset_after_second_line, 0);#torna alla posizione dopo la seconda riga letta
|
||||||
|
}
|
||||||
}
|
}
|
||||||
elsif ( $filecsvname =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
elsif ( $filecsvname =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
||||||
my @strings = $filecsvname =~
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
/(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
|
||||||
$unittype = $strings[0];
|
$unittype = $strings[0];
|
||||||
$unit = $strings[1];
|
$unit = $strings[1];
|
||||||
$tool = $strings[2];
|
$tool = $strings[2];
|
||||||
@@ -430,6 +574,81 @@ elsif ( $filecsvname =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
|||||||
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
$tooltype = 'MUX';
|
$tooltype = 'MUX';
|
||||||
}
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'MUX';
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'TLP';
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'TLP';
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'GS1';
|
||||||
|
if($unit eq "ID0273" && $tool eq "DT0001"){
|
||||||
|
$tool = "DT0002";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'GS1';
|
||||||
|
if($unit eq "ID0273" && $tool eq "DT0001"){
|
||||||
|
$tool = "DT0002";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {#per id con 1 cifra in +
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,7})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'GS1';
|
||||||
|
if($unit eq "ID00279" && $tool eq "DT0008"){
|
||||||
|
$unit = "ID0279";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d\d*_\d*$/i ) {#per id con 1 cifra in +
|
||||||
|
my @strings = $filecsvname =~ /(.{1,4})_(.{1,7})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'GS1';
|
||||||
|
if($unit eq "ID00279" && $tool eq "DT0008"){
|
||||||
|
$unit = "ID0279";
|
||||||
|
}
|
||||||
|
}
|
||||||
elsif ( $filecsvname =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
elsif ( $filecsvname =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
my @strings = $filecsvname =~ /(.{1,3})_(.{1,6})_(.{1,6}).*/;
|
my @strings = $filecsvname =~ /(.{1,3})_(.{1,6})_(.{1,6}).*/;
|
||||||
$unittype = $strings[0];
|
$unittype = $strings[0];
|
||||||
@@ -444,6 +663,15 @@ elsif ( $filecsvname =~ m/^CR1000X_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
|||||||
$tool = $strings[2];
|
$tool = $strings[2];
|
||||||
$tooltype = 'CR1000X';
|
$tooltype = 'CR1000X';
|
||||||
}
|
}
|
||||||
|
elsif ( $filecsvname =~ m/^Hortus_ID\d\d\d\d_DT\d\d\d\d_\d*_\d*$/i ) {
|
||||||
|
my @strings = $filecsvname =~ /(.{1,6})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$tool = $strings[2];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
$tooltype = 'CR1000X';
|
||||||
|
}
|
||||||
elsif ( $filecsvname =~ m/^(\d*_|)(G301_ID\d\d\d\d_DT\d\d\d\d_\d*)$/i ) {
|
elsif ( $filecsvname =~ m/^(\d*_|)(G301_ID\d\d\d\d_DT\d\d\d\d_\d*)$/i ) {
|
||||||
my $filecsvname_clean =
|
my $filecsvname_clean =
|
||||||
substr( $filecsvname, index( $filecsvname, "G301" ) );
|
substr( $filecsvname, index( $filecsvname, "G301" ) );
|
||||||
@@ -453,18 +681,39 @@ elsif ( $filecsvname =~ m/^(\d*_|)(G301_ID\d\d\d\d_DT\d\d\d\d_\d*)$/i ) {
|
|||||||
$tool = $strings[2];
|
$tool = $strings[2];
|
||||||
$tooltype = 'G301';
|
$tooltype = 'G301';
|
||||||
}
|
}
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG SONO QUA!!!!!\n";
|
||||||
while ( my $line = <FILE> ) {
|
while ( my $line = <FILE> ) {
|
||||||
|
|
||||||
#if ( $line =~ m/\A [[:ascii:]]* \Z/xms ) {
|
#if ( $line =~ m/\A [[:ascii:]]* \Z/xms ) {
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG $unittype , $tooltype\n";
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG !!!!!!! $line\n";
|
||||||
if ( $line !~ /\x00/ ) {
|
if ( $line !~ /\x00/ ) {
|
||||||
$line =~ tr /\,/\;/ if ( $line =~ m/^\"\d\d\d\d.*/i ); #CR1000X
|
$line =~ tr /\,/\;/ if ( $line =~ m/^\"\d\d\d\d.*/i ); #CR1000X
|
||||||
$line =~ s/\"//g;
|
$line =~ s/\"//g;
|
||||||
|
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG line: $line\n";
|
||||||
my ( $first, $NodeData ) = split( /;/, $line, 2 );
|
my ( $first, $NodeData ) = split( /;/, $line, 2 );
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG nodedata: $NodeData\n";
|
||||||
if ( defined $first and ( $first ne '' ) ) {
|
if ( defined $first and ( $first ne '' ) ) {
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG first:" .$first ."\n";
|
||||||
$first =~ s/;+$//;
|
$first =~ s/;+$//;
|
||||||
$first =~ tr /\-/\//;
|
$first =~ tr /\-/\//;
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG first: $first\n";
|
||||||
|
if ( ( $NodeData =~ m/^;+/ ) ) {#se bat e/o temp sono vuoti metto -1
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG nodedata: $NodeData\n";
|
||||||
|
my (@batTempVuotiSplit) = split( /;/, $NodeData );
|
||||||
|
if($batTempVuotiSplit[0] eq "" || $batTempVuotiSplit[1] eq ""){
|
||||||
|
if($batTempVuotiSplit[0] eq ""){#bat
|
||||||
|
$batTempVuotiSplit[0] = "-1";
|
||||||
|
}
|
||||||
|
if($batTempVuotiSplit[1] eq ""){#temp
|
||||||
|
$batTempVuotiSplit[1] = "-273";
|
||||||
|
}
|
||||||
|
$NodeData = join(';', @batTempVuotiSplit);
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG nodedata modificato: $NodeData\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
if ( !defined $NodeData or ( $NodeData =~ m/^;+/ ) ) {
|
if ( !defined $NodeData or ( $NodeData =~ m/^;+/ ) ) {
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG NodeData3:" .$NodeData."\n";
|
||||||
my @info = ( split( /[\/,\.]/, $first ) );
|
my @info = ( split( /[\/,\.]/, $first ) );
|
||||||
if ( defined $info[3] && $info[3] =~ m/^DT\d\d\d\d$/i ) { #G801
|
if ( defined $info[3] && $info[3] =~ m/^DT\d\d\d\d$/i ) { #G801
|
||||||
$tool = uc $info[3];
|
$tool = uc $info[3];
|
||||||
@@ -502,6 +751,11 @@ while ( my $line = <FILE> ) {
|
|||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
||||||
}
|
}
|
||||||
|
elsif($unittype eq 'Hortus' and $tooltype eq 'CR1000X'){ #Hortus CR1000X
|
||||||
|
print getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Found Hortus $tooltype tool name: $tool\n";
|
||||||
|
getNodesType();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$NodeData =~ s/\x0d{0,1}\x0a\Z//s;
|
$NodeData =~ s/\x0d{0,1}\x0a\Z//s;
|
||||||
@@ -514,6 +768,18 @@ while ( my $line = <FILE> ) {
|
|||||||
getNodesType();
|
getNodesType();
|
||||||
$G201_fcheck = 1;
|
$G201_fcheck = 1;
|
||||||
}
|
}
|
||||||
|
if ( $unittype eq 'TLP' and $TLP_fcheck eq 0 ) {
|
||||||
|
print getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
||||||
|
getNodesType();
|
||||||
|
$TLP_fcheck = 1;
|
||||||
|
}
|
||||||
|
if ( $unittype eq 'GS1' and $GS1_fcheck eq 0 ) {
|
||||||
|
print getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
||||||
|
getNodesType();
|
||||||
|
$GS1_fcheck = 1;
|
||||||
|
}
|
||||||
if ( $unittype eq 'D2W' and $D2W_fcheck eq 0 ) {
|
if ( $unittype eq 'D2W' and $D2W_fcheck eq 0 ) {
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
. " - pid $$ >> Found $tooltype tool name: $tool\n";
|
||||||
@@ -532,6 +798,11 @@ while ( my $line = <FILE> ) {
|
|||||||
getNodesType();
|
getNodesType();
|
||||||
$G301_fcheck = 1;
|
$G301_fcheck = 1;
|
||||||
}
|
}
|
||||||
|
if($unittype eq 'Hortus' and $tooltype eq 'CR1000X'){ #Hortus CR1000X
|
||||||
|
print getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Found Hortus $tooltype tool name: $tool\n";
|
||||||
|
getNodesType();
|
||||||
|
}
|
||||||
|
|
||||||
if ( $tooltype eq "MUX"
|
if ( $tooltype eq "MUX"
|
||||||
or $tooltype eq "D2W"
|
or $tooltype eq "D2W"
|
||||||
@@ -586,6 +857,8 @@ while ( my $line = <FILE> ) {
|
|||||||
my $used_din = 0;
|
my $used_din = 0;
|
||||||
my $used_ain = 0;
|
my $used_ain = 0;
|
||||||
for my $i ( 0 .. $#din ) {
|
for my $i ( 0 .. $#din ) {
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG LOC din: ". $din[$i] ." ain1:".$Ain1." ain2:".$Ain2." din1:". $Din1. " din2:". $Din2. "\n";
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG LOC ain: ". $ain[$i] ." ain1:".$Ain1." ain2:".$Ain2." din1:". $Din1. " din2:". $Din2. "\n";
|
||||||
if ( $din[$i] eq 1 ) {
|
if ( $din[$i] eq 1 ) {
|
||||||
if ( $used_din eq 0 ) {
|
if ( $used_din eq 0 ) {
|
||||||
$CSVData[ $idx++ ] = $Din1;
|
$CSVData[ $idx++ ] = $Din1;
|
||||||
@@ -605,10 +878,14 @@ while ( my $line = <FILE> ) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
elsif ( $din[$i] eq 2 ) {
|
elsif ( $din[$i] eq 2 ) {
|
||||||
$CSVData[ $idx++ ] = $Din1 . ";" . $Din2;
|
#$CSVData[ $idx++ ] = $Din1 . ";" . $Din2;
|
||||||
|
#$CSVData[ $idx++ ] = $Din1;
|
||||||
|
$CSVData[ $idx++ ] = $Din2;
|
||||||
}
|
}
|
||||||
elsif ( $ain[$i] eq 2 ) {
|
elsif ( $ain[$i] eq 2 ) {
|
||||||
$CSVData[ $idx++ ] = $Ain1 . ";" . $Ain2;
|
#$CSVData[ $idx++ ] = $Ain1 . ";" . $Ain2;
|
||||||
|
#$CSVData[ $idx++ ] = $Ain1;
|
||||||
|
$CSVData[ $idx++ ] = $Ain2;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$CSVData[ $idx++ ] = "N/A";
|
$CSVData[ $idx++ ] = "N/A";
|
||||||
@@ -630,46 +907,126 @@ while ( my $line = <FILE> ) {
|
|||||||
. $h . ':'
|
. $h . ':'
|
||||||
. ( $min + 1 );
|
. ( $min + 1 );
|
||||||
}
|
}
|
||||||
|
elsif( $tooltype eq "TLP") {
|
||||||
|
@NewData = grep /\S/, split( /\||;/, $NodeData );
|
||||||
|
if ( scalar(@NewData) != 0 ) {
|
||||||
|
my $idx = 0;
|
||||||
|
my $batTempPressione = join( ';', splice( @NewData, 0, 3 ) );#3 valori togliere il terzo e metterlo nell'ultimo nodo che dovrebbe essere sempre vuoto
|
||||||
|
my @batTempPressioneParts = split(/;/, $batTempPressione);
|
||||||
|
|
||||||
|
$CSVData[ $idx++ ] = join( ';', @batTempPressioneParts[0,1] );
|
||||||
|
#print "inzio\n";
|
||||||
|
#print $idx++."\n";
|
||||||
|
#print join( ';', @batTempPressioneParts[0,1] )."\n";
|
||||||
|
#print "inizio for\n";
|
||||||
|
foreach my $ch (@channels) {
|
||||||
|
$CSVData[ $idx++ ] = join( ';', splice( @NewData, 0, $ch ) );
|
||||||
|
#print $idx++."\n";
|
||||||
|
#print "canale: $ch\n";
|
||||||
|
#print join( ';', splice( @NewData, 0, $ch ) )."\n";
|
||||||
|
#print "fine giro\n";
|
||||||
|
}
|
||||||
|
$CSVData[ $idx ] = $batTempPressioneParts[2];
|
||||||
|
#print $batTempPressioneParts[2];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elsif( $tooltype eq "GS1") {
|
||||||
|
@NewData = grep /\S/, split( /\||;/, $NodeData );
|
||||||
|
if ( scalar(@NewData) != 0 ) {
|
||||||
|
my $idx = 0;
|
||||||
|
my $batTempPressione = join( ';', splice( @NewData, 0, 3 ) );#3 valori togliere il terzo e metterlo nell'ultimo nodo che dovrebbe essere sempre vuoto
|
||||||
|
my @batTempPressioneParts = split(/;/, $batTempPressione);
|
||||||
|
|
||||||
|
$CSVData[ $idx++ ] = join( ';', @batTempPressioneParts[0,1] );
|
||||||
|
#print "inzio\n";
|
||||||
|
#print $idx++."\n";
|
||||||
|
#print join( ';', @batTempPressioneParts[0,1] )."\n";
|
||||||
|
#print "inizio for\n";
|
||||||
|
foreach my $ch (@channels) {
|
||||||
|
$CSVData[ $idx++ ] = join( ';', splice( @NewData, 0, $ch ) );
|
||||||
|
#print $idx++."\n";
|
||||||
|
#print "canale: $ch\n";
|
||||||
|
#print join( ';', splice( @NewData, 0, $ch ) )."\n";
|
||||||
|
#print "fine giro\n";
|
||||||
|
}
|
||||||
|
$CSVData[ $idx ] = $batTempPressioneParts[2];
|
||||||
|
#print $batTempPressioneParts[2];
|
||||||
|
}
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
@CSVData = split( /\|/, $NodeData );
|
@CSVData = split( /\|/, $NodeData );
|
||||||
}
|
}
|
||||||
|
|
||||||
my $nodenum = 0;
|
my $nodenum = 0;
|
||||||
foreach (@CSVData) {
|
foreach (@CSVData) {
|
||||||
$sql[ ( $outcount % $maxsqllines ) ] = "";
|
|
||||||
my (@data) = grep { /\S/ } split( /\;/, $_ );
|
#print getTimeStamp("log") . " - pid $$ >> CSVdata: ". $_ ."\n";
|
||||||
if ( $nodenum eq 0 ) {
|
if($_ ne ""){
|
||||||
$toolbatt = $data[0];
|
$sql[ ( $outcount % $maxsqllines ) ] = "";
|
||||||
$tooltemp = $data[1];
|
my (@data) = grep { /\S/ } split( /\;/, $_ );
|
||||||
|
if ( $nodenum eq 0 ) {
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG batt: ". $data[0] . " " . $data[1] ."\n";
|
||||||
|
$toolbatt = $data[0];
|
||||||
|
$tooltemp = $data[1];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
my ( $y, $m, $d, $t ) = split( /[\s\/]/, $first );
|
||||||
|
if ( length($y) != 4 )
|
||||||
|
{ # G201/G301 con la data invertita
|
||||||
|
my $w = $y;
|
||||||
|
$y = $d;
|
||||||
|
$m = sprintf( '%02d', $m );
|
||||||
|
$d = sprintf( '%02d', $w );
|
||||||
|
|
||||||
|
}
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG data $y-$m-$d\n";
|
||||||
|
foreach (@data) {
|
||||||
|
if ( $sql[ ( $outcount % $maxsqllines ) ] eq "" ) {
|
||||||
|
$sql[ ( $outcount % $maxsqllines ) ] = "('$unit','$tool','$nodenum','$y-$m-$d','$t','$toolbatt','$tooltemp'";
|
||||||
|
}
|
||||||
|
$sql[ ( $outcount % $maxsqllines ) ] .= ",'" . $_ . "'";
|
||||||
|
}
|
||||||
|
my $InsCompl = ",NULL" x ( 15 - $#data );
|
||||||
|
$sql[ ( $outcount % $maxsqllines ) ] .= $InsCompl . ")";
|
||||||
|
if ( ++$outcount % $maxsqllines eq 0 ) {
|
||||||
|
$sql[ ( $outcount % $maxsqllines ) - 1 ] .= ";\n";
|
||||||
|
my $tool_status2 = "";
|
||||||
|
my $dbh2 = DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password ) or die getTimeStamp("log") . " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
my $sth2 = $dbh2->prepare(
|
||||||
|
'select m.matcall, t.ftp_send , t.unit_id, s.`desc` as statustools from matfuncs as m
|
||||||
|
inner join tools as t on t.matfunc = m.id
|
||||||
|
inner join units as u on u.id = t.unit_id
|
||||||
|
inner join statustools as s on t.statustool_id = s.id
|
||||||
|
where t.name = "' . $tool . '" and u.name = "' . $unit . '";'
|
||||||
|
) or die getTimeStamp("log") . " - pid $$ >> $DBI::errstr";
|
||||||
|
$sth2->execute();
|
||||||
|
if ( $sth2->rows eq 0 ) {
|
||||||
|
die getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Check tool status later - No tool's matlab function selected.\n";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
while ( my $results2 = $sth2->fetchrow_hashref ) {
|
||||||
|
$tool_status2 = $results2->{'statustools'};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$sth2->finish;
|
||||||
|
# Disconnect
|
||||||
|
$dbh2->disconnect;
|
||||||
|
if($tool_status2 eq "Test"){
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> tool status: $tool_status2 nothing to do.\n";
|
||||||
|
}else{
|
||||||
|
if(($unit eq "ID0070" && $tool eq "DT0111") || ($unit eq "ID0071" && $tool eq "DT0112") || ($unit eq "ID0072" && $tool eq "DT0113") || ($unit eq "ID0073" && $tool eq "DT0114") || ($unit eq "ID0273" && $tool eq "DT0001") || ($unit eq "ID0279" && $tool eq "DT0008")){
|
||||||
|
modifyAndWriteOutSql($tool);
|
||||||
|
}
|
||||||
|
writeOutSql($tool);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$sql[ ( $outcount % $maxsqllines ) - 1 ] .= ",\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$nodenum++;
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
my ( $y, $m, $d, $t ) = split( /[\s\/]/, $first );
|
|
||||||
if ( length($y) != 4 )
|
|
||||||
{ # G201/G301 con la data invertita
|
|
||||||
my $w = $y;
|
|
||||||
$y = $d;
|
|
||||||
$m = sprintf( '%02d', $m );
|
|
||||||
$d = sprintf( '%02d', $w );
|
|
||||||
}
|
|
||||||
foreach (@data) {
|
|
||||||
if ( $sql[ ( $outcount % $maxsqllines ) ] eq "" ) {
|
|
||||||
$sql[ ( $outcount % $maxsqllines ) ] =
|
|
||||||
"('$unit','$tool','$nodenum','$y-$m-$d','$t','$toolbatt','$tooltemp'";
|
|
||||||
}
|
|
||||||
$sql[ ( $outcount % $maxsqllines ) ] .=
|
|
||||||
",'" . $_ . "'";
|
|
||||||
}
|
|
||||||
my $InsCompl = ",NULL" x ( 15 - $#data );
|
|
||||||
$sql[ ( $outcount % $maxsqllines ) ] .= $InsCompl . ")";
|
|
||||||
if ( ++$outcount % $maxsqllines eq 0 ) {
|
|
||||||
$sql[ ( $outcount % $maxsqllines ) - 1 ] .= ";\n";
|
|
||||||
writeOutSql($tool);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$sql[ ( $outcount % $maxsqllines ) - 1 ] .= ",\n";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
$nodenum++;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -677,29 +1034,151 @@ while ( my $line = <FILE> ) {
|
|||||||
}
|
}
|
||||||
close FILE;
|
close FILE;
|
||||||
$sql[ ( $outcount % $maxsqllines ) - 1 ] =~ s/,$/;/g;
|
$sql[ ( $outcount % $maxsqllines ) - 1 ] =~ s/,$/;/g;
|
||||||
writeOutSql($tool);
|
my $tool_status3 = "";
|
||||||
|
#FTP INOLTRO RAW
|
||||||
|
my $inoltro_ftp_raw = 0;
|
||||||
|
my $ftp_send_raw = "";
|
||||||
|
my $ftp_mode_raw = "";
|
||||||
|
my $ftp_addrs_raw = "";
|
||||||
|
my $ftp_user_raw = "";
|
||||||
|
my $ftp_passwd_raw = "";
|
||||||
|
my $ftp_filename_raw = "";
|
||||||
|
my $ftp_target_raw = "";
|
||||||
|
#api raw
|
||||||
|
my $inoltro_api_raw = 0;
|
||||||
|
my $inoltro_api_url_raw = "";
|
||||||
|
my $inoltro_api_bearer_token_raw = "";
|
||||||
|
my $api_send_raw = 0;
|
||||||
|
#
|
||||||
|
my $dbh3 = DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password ) or die getTimeStamp("log") . " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
my $sth3 = $dbh3->prepare(
|
||||||
|
'select m.matcall, t.ftp_send_raw, IFNULL(u.ftp_mode_raw, "") as ftp_mode_raw, IFNULL(u.ftp_addrs_raw, "") as ftp_addrs_raw, IFNULL(u.ftp_user_raw, "") as ftp_user_raw, IFNULL(u.ftp_passwd_raw, "") as ftp_passwd_raw, IFNULL(u.ftp_filename_raw, "") as ftp_filename_raw, IFNULL(u.ftp_parm_raw, "") as ftp_parm_raw, IFNULL(u.ftp_target_raw, "") as ftp_target_raw, t.unit_id, s.`desc` as statustools, u.inoltro_ftp_raw,
|
||||||
|
u.inoltro_api_raw, IFNULL(u.inoltro_api_url_raw, "") as inoltro_api_url_raw, IFNULL(u.inoltro_api_bearer_token_raw, "") as inoltro_api_bearer_token_raw, t.api_send_raw, IFNULL(u.duedate, "") as duedate from matfuncs as m
|
||||||
|
inner join tools as t on t.matfunc = m.id
|
||||||
|
inner join units as u on u.id = t.unit_id
|
||||||
|
inner join statustools as s on t.statustool_id = s.id
|
||||||
|
where t.name = "' . $tool . '" and u.name = "' . $unit . '";'
|
||||||
|
) or die getTimeStamp("log") . " - pid $$ >> $DBI::errstr";
|
||||||
|
$sth3->execute();
|
||||||
|
if ( $sth3->rows eq 0 ) {
|
||||||
|
die getTimeStamp("log")
|
||||||
|
. " - pid $$ >> Check tool status later - No tool's matlab function selected.\n";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
while ( my $results3 = $sth3->fetchrow_hashref ) {
|
||||||
|
$tool_status3 = $results3->{'statustools'};
|
||||||
|
$ftp_send_raw = $results3->{'ftp_send_raw'};
|
||||||
|
$ftp_mode_raw = $results3->{'ftp_mode_raw'};
|
||||||
|
$ftp_addrs_raw = $results3->{'ftp_addrs_raw'};
|
||||||
|
$ftp_user_raw = $results3->{'ftp_user_raw'};
|
||||||
|
$ftp_passwd_raw = $results3->{'ftp_passwd_raw'};
|
||||||
|
$ftp_filename_raw = $results3->{'ftp_filename_raw'};
|
||||||
|
$ftp_target_raw = $results3->{'ftp_target_raw'};
|
||||||
|
$inoltro_ftp_raw = $results3->{'inoltro_ftp_raw'};
|
||||||
|
#
|
||||||
|
$inoltro_api_raw = $results3->{'inoltro_api_raw'};
|
||||||
|
$inoltro_api_url_raw = $results3->{'inoltro_api_url_raw'};
|
||||||
|
$inoltro_api_bearer_token_raw = $results3->{'inoltro_api_bearer_token_raw'};
|
||||||
|
$api_send_raw = $results3->{'api_send_raw'};
|
||||||
|
#
|
||||||
|
$unit_duedate = $results3->{'duedate'};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$sth3->finish;
|
||||||
|
# Disconnect
|
||||||
|
$dbh3->disconnect;
|
||||||
|
#INOLTRO RAW CSV
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW filename: $filename\n";
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> $ftp_send_raw $ftp_addrs_raw $ftp_user_raw $ftp_passwd_raw\n";
|
||||||
|
if($inoltro_ftp_raw eq 1 && $ftp_send_raw eq 1 && $ftp_addrs_raw ne "" && $ftp_user_raw ne "" && $ftp_passwd_raw ne ""){
|
||||||
|
if($ftp_target_raw eq ""){
|
||||||
|
$ftp_target_raw = "/";
|
||||||
|
}
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW filenameraw: $ftp_filename_raw -- $tool\n";
|
||||||
|
if($ftp_filename_raw eq ""){
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW: 1\n";
|
||||||
|
$ftp_filename_raw = $filecsvname;
|
||||||
|
}else{
|
||||||
|
#print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW: 2 $filecsvname\n";
|
||||||
|
$ftp_filename_raw =~ s/\$tool/$filecsvname/g;
|
||||||
|
}
|
||||||
|
#$ftp_filename_raw = "test";
|
||||||
|
if (!defined $unit_duedate || $unit_duedate eq '' || $unit_duedate eq '0000-00-00 00:00:00') {
|
||||||
|
# duedate is empty or "0000-00-00 00:00:00", so proceed
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW: /home/battilo/scripts/FTPCSVRAW_v2.sh $ftp_addrs_raw $ftp_target_raw $filename $ftp_filename_raw$suffix $ftp_user_raw $ftp_passwd_raw\n";
|
||||||
|
system("sh /home/battilo/scripts/FTPCSVRAW_v2.sh $ftp_addrs_raw $ftp_target_raw $filename $ftp_filename_raw$suffix $ftp_user_raw $ftp_passwd_raw");
|
||||||
|
} else {
|
||||||
|
my $duedateTmp1 = Time::Piece->strptime($unit_duedate, "%Y-%m-%d %H:%M:%S");
|
||||||
|
my $now1 = localtime;
|
||||||
|
if ($duedateTmp1 >= $now1) {
|
||||||
|
# duedate is valid and not expired, so proceed
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW: /home/battilo/scripts/FTPCSVRAW_v2.sh $ftp_addrs_raw $ftp_target_raw $filename $ftp_filename_raw$suffix $ftp_user_raw $ftp_passwd_raw\n";
|
||||||
|
system("sh /home/battilo/scripts/FTPCSVRAW_v2.sh $ftp_addrs_raw $ftp_target_raw $filename $ftp_filename_raw$suffix $ftp_user_raw $ftp_passwd_raw");
|
||||||
|
}else{
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> centralina ($filename) scaduta! no ftp inoltro raw.\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#INOLTRO RAW API
|
||||||
|
if($inoltro_api_raw eq 1 && $api_send_raw eq 1 && $inoltro_api_url_raw ne ""){
|
||||||
|
if (!defined $unit_duedate || $unit_duedate eq '' || $unit_duedate eq '0000-00-00 00:00:00') {
|
||||||
|
# duedate is empty or "0000-00-00 00:00:00", so proceed
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW API: /home/battilo/scripts/inoltroViaApiRaw.py $filename $inoltro_api_url_raw $inoltro_api_bearer_token_raw\n";
|
||||||
|
exec("/home/battilo/scripts/inoltroViaApiRaw.py \"$filename\" \"$inoltro_api_url_raw\" \"$inoltro_api_bearer_token_raw\" >> /home/asega/log/logInoltroViaApiRaw.log 2>&1");
|
||||||
|
} else {
|
||||||
|
my $duedateTmp2 = Time::Piece->strptime($unit_duedate, "%Y-%m-%d %H:%M:%S");
|
||||||
|
my $now2 = localtime;
|
||||||
|
if ($duedateTmp2 >= $now2) {
|
||||||
|
# duedate is valid and not expired, so proceed
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> DEBUG inoltro RAW API: /home/battilo/scripts/inoltroViaApiRaw.py $filename $inoltro_api_url_raw $inoltro_api_bearer_token_raw\n";
|
||||||
|
exec("/home/battilo/scripts/inoltroViaApiRaw.py \"$filename\" \"$inoltro_api_url_raw\" \"$inoltro_api_bearer_token_raw\" >> /home/asega/log/logInoltroViaApiRaw.log 2>&1");
|
||||||
|
}else{
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> centralina ($filename) scaduta! no api inoltro raw.\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#
|
||||||
|
if($tool_status3 eq "Test"){
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> tool status: $tool_status3 nothing to do.\n";
|
||||||
|
}else{#insert in RAW table
|
||||||
|
if(($unit eq "ID0070" && $tool eq "DT0111") || ($unit eq "ID0071" && $tool eq "DT0112") || ($unit eq "ID0072" && $tool eq "DT0113") || ($unit eq "ID0073" && $tool eq "DT0114") || ($unit eq "ID0273" && $tool eq "DT0001") || ($unit eq "ID0279" && $tool eq "DT0008")){
|
||||||
|
modifyAndWriteOutSql($tool);
|
||||||
|
}
|
||||||
|
writeOutSql($tool);
|
||||||
|
}
|
||||||
if ( $tooltype eq "GD" ) {
|
if ( $tooltype eq "GD" ) {
|
||||||
print getTimeStamp("log") . " - pid $$ >> tool GD: nothing to do.\n";
|
print getTimeStamp("log") . " - pid $$ >> tool GD: nothing to do.\n";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
||||||
getMatlabCmd();
|
getMatlabCmd();
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log") . " - pid $$ >> $unit - $tool - Status $tool_status.\n";
|
||||||
. " - pid $$ >> $unit - $tool - Status $tool_status.\n";
|
if ( $tool_status eq 'Active' || $tool_status eq 'Manual Upload' ) {
|
||||||
if ( $tool_status eq 'Monitoring Completed' ) {
|
|
||||||
print getTimeStamp("log")
|
|
||||||
. " - pid $$ >> $unit - $tool - Monitoring completed: MatLab calc by-passed.\n";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
|
|
||||||
matlabCalc();
|
matlabCalc();
|
||||||
|
|
||||||
if ($ftp_send) {
|
if ($ftp_send) {
|
||||||
if ( $tool eq 'DT0076' ) {
|
if ( $tool eq 'DT0076' ) {
|
||||||
sleep(600);
|
sleep(600);
|
||||||
}
|
}
|
||||||
trxelab();
|
trxelab();
|
||||||
}
|
}
|
||||||
|
if($inoltro_api eq 1 && $api_send eq 1){
|
||||||
|
if (!defined $unit_duedate || $unit_duedate eq '' || $unit_duedate eq '0000-00-00 00:00:00') {
|
||||||
|
# duedate is empty or "0000-00-00 00:00:00", so proceed
|
||||||
|
trxelabApi();
|
||||||
|
} else {
|
||||||
|
my $duedateTmp = Time::Piece->strptime($unit_duedate, "%Y-%m-%d %H:%M:%S");
|
||||||
|
my $now = localtime;
|
||||||
|
if ($duedateTmp >= $now) {
|
||||||
|
# duedate is valid and not expired, so proceed
|
||||||
|
trxelabApi();
|
||||||
|
}else{
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> $unit: scaduta! no api inoltro elab.\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
print getTimeStamp("log") . " - pid $$ >> $unit - $tool - $tool_status: MatLab calc by-passed.\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
0
LoadCSVData.py
Normal file
0
LoadCSVData.py
Normal file
@@ -7,10 +7,10 @@ use Net::FTP;
|
|||||||
|
|
||||||
$|++; # Autoflush
|
$|++; # Autoflush
|
||||||
|
|
||||||
my $username = 'ase_lar';
|
my $username = 'aselar';
|
||||||
my $password = 'laravel';
|
my $password = 'laravel';
|
||||||
my $db_name = 'ase_lar';
|
my $db_name = 'ase_lar';
|
||||||
my $server = 'localhost';
|
my $server = '85.235.153.201';
|
||||||
my $matlab_timestamp;
|
my $matlab_timestamp;
|
||||||
my $ftp_send = 1;
|
my $ftp_send = 1;
|
||||||
my $ftp_addrs;
|
my $ftp_addrs;
|
||||||
@@ -21,6 +21,7 @@ my $ftp_filename = '';
|
|||||||
my $ftp_target;
|
my $ftp_target;
|
||||||
my $duedate = '';
|
my $duedate = '';
|
||||||
my ( $unit, $tool );
|
my ( $unit, $tool );
|
||||||
|
my $dest = '/tmp/';
|
||||||
|
|
||||||
sub getTimeStamp
|
sub getTimeStamp
|
||||||
{ # parm [ts] => timestamp for filename; log => timestamp for log
|
{ # parm [ts] => timestamp for filename; log => timestamp for log
|
||||||
@@ -71,6 +72,7 @@ sub trxelab {
|
|||||||
# Disconnect
|
# Disconnect
|
||||||
|
|
||||||
my $fileelab;
|
my $fileelab;
|
||||||
|
my $filetosend;
|
||||||
|
|
||||||
if ( !defined $ftp_filename or $ftp_filename eq "" ) {
|
if ( !defined $ftp_filename or $ftp_filename eq "" ) {
|
||||||
$fileelab =
|
$fileelab =
|
||||||
@@ -78,6 +80,11 @@ sub trxelab {
|
|||||||
. $tool . '_'
|
. $tool . '_'
|
||||||
. getTimeStamp()
|
. getTimeStamp()
|
||||||
. '.csv'; #mettere quello che si prende da query *** aggiungere $unit
|
. '.csv'; #mettere quello che si prende da query *** aggiungere $unit
|
||||||
|
$filetosend =
|
||||||
|
'/tmp/'
|
||||||
|
. $tool . '_'
|
||||||
|
. getTimeStamp()
|
||||||
|
. '.csv'; #mettere quello che si prende da query *** aggiungere $unit
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$ftp_filename =~ s/(\$\w+)/$1/eeg;
|
$ftp_filename =~ s/(\$\w+)/$1/eeg;
|
||||||
@@ -86,6 +93,10 @@ sub trxelab {
|
|||||||
'/var/lib/mysql-files/'
|
'/var/lib/mysql-files/'
|
||||||
. $ftp_filename . '_'
|
. $ftp_filename . '_'
|
||||||
. getTimeStamp() . '.csv';
|
. getTimeStamp() . '.csv';
|
||||||
|
$filetosend =
|
||||||
|
'/tmp/'
|
||||||
|
. $ftp_filename . '_'
|
||||||
|
. getTimeStamp() . '.csv';
|
||||||
}
|
}
|
||||||
|
|
||||||
my $sthdo = $dbh->do(
|
my $sthdo = $dbh->do(
|
||||||
@@ -108,6 +119,8 @@ sub trxelab {
|
|||||||
|
|
||||||
$dbh->disconnect;
|
$dbh->disconnect;
|
||||||
|
|
||||||
|
system("scp -i /home/battilo/scripts/key -P6229 alex\@" . $server . ":" . $fileelab . " /tmp");
|
||||||
|
|
||||||
$ftp_parm //= "";
|
$ftp_parm //= "";
|
||||||
$ftp_parm =~ s/\s//g;
|
$ftp_parm =~ s/\s//g;
|
||||||
|
|
||||||
@@ -154,7 +167,7 @@ sub trxelab {
|
|||||||
. " - pid $$ >> ftp target: "
|
. " - pid $$ >> ftp target: "
|
||||||
. $ftp->pwd() . ".\n";
|
. $ftp->pwd() . ".\n";
|
||||||
|
|
||||||
$ftp->put($fileelab)
|
$ftp->put($filetosend)
|
||||||
or die getTimeStamp("log")
|
or die getTimeStamp("log")
|
||||||
. " - pid $$ >> Put failed: "
|
. " - pid $$ >> Put failed: "
|
||||||
. $ftp->message . "\n";
|
. $ftp->message . "\n";
|
||||||
|
|||||||
2553
TS_PiniScript.py
Executable file
2553
TS_PiniScript.py
Executable file
File diff suppressed because it is too large
Load Diff
2536
TS_PiniScript_old.py
Executable file
2536
TS_PiniScript_old.py
Executable file
File diff suppressed because it is too large
Load Diff
@@ -7,10 +7,16 @@ use File::Basename qw( fileparse );
|
|||||||
use File::Path qw( make_path );
|
use File::Path qw( make_path );
|
||||||
use File::Copy qw( move );
|
use File::Copy qw( move );
|
||||||
use POSIX;
|
use POSIX;
|
||||||
|
use DBI;
|
||||||
$SIG{CHLD} = 'IGNORE';
|
$SIG{CHLD} = 'IGNORE';
|
||||||
|
|
||||||
$|++; # Autoflush
|
$|++; # Autoflush
|
||||||
|
|
||||||
|
my $username = 'aselar';
|
||||||
|
my $password = 'laravel';
|
||||||
|
my $db_lar = 'ase_lar';
|
||||||
|
my $server = '85.235.153.201';
|
||||||
|
|
||||||
sub getTimeStamp
|
sub getTimeStamp
|
||||||
{ # parm [ts] => timestamp for filename; log => timestamp for log
|
{ # parm [ts] => timestamp for filename; log => timestamp for log
|
||||||
my $format = "%04d%02d%02d%02d%02d%02d";
|
my $format = "%04d%02d%02d%02d%02d%02d";
|
||||||
@@ -29,23 +35,79 @@ sub getUnitName { # parm => file received ($trfile)
|
|||||||
my ($filename) = @_;
|
my ($filename) = @_;
|
||||||
my ( $fileDate, $fileTime );
|
my ( $fileDate, $fileTime );
|
||||||
my ( $unittype, $unit );
|
my ( $unittype, $unit );
|
||||||
|
my ( $ipdescgd, $ipgd ); #per gd modb
|
||||||
|
my ( $subnetdescgd, $subnetgd ); #per gd modb
|
||||||
|
my ( $gatewaydescgd, $gatewaygd ); #per gd modb
|
||||||
|
my ( $sdpathdescgd, $dfpathdescpathgd, $sdpathgd ); #per gd modb
|
||||||
my ( $filenamecsv, $path, $suffix ) = fileparse( $filename, qr/\.[^.]*/ );
|
my ( $filenamecsv, $path, $suffix ) = fileparse( $filename, qr/\.[^.]*/ );
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG -> $filenamecsv\n";
|
||||||
if ( $path =~ m/ID\d\d\d\d\/*$/i ) {
|
if ( $path =~ m/ID\d\d\d\d\/*$/i ) {
|
||||||
my @strings = $path =~ /.*\/(ID.{1,4})\/*$/;
|
my @strings = $path =~ /.*\/(ID.{1,4})\/*$/;
|
||||||
$unit = $strings[0];
|
$unit = $strings[0];
|
||||||
}
|
}
|
||||||
|
elsif( $path =~ m/TS[^\/]*\/*$/i ){#cartelle stazioni totale, per le stazioni totali la cartella deve iniziare con TS seguito da qualsiasi carattere non /
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG DENTRO!\n";
|
||||||
|
my @strings = $path =~ /.*\/(TS[^\/]*)\/*$/;
|
||||||
|
$unit = $strings[0];
|
||||||
|
}
|
||||||
if ( $filenamecsv =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
if ( $filenamecsv =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
my @strings = $filenamecsv =~
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
/(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d\d*$/i ) {#se manca _ prima della data
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d\d*$/i ) {#se manca _ prima della data
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d\d*$/i ) {#se manca _ prima della data
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,6})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,7})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
|
$unittype = $strings[0];
|
||||||
|
$unit = $strings[1];
|
||||||
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d\d*$/i ) {#se manca _ prima della data
|
||||||
|
my @strings = $filenamecsv =~ /(.{1,4})_(.{1,7})_(.{1,6})(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
$unittype = $strings[0];
|
$unittype = $strings[0];
|
||||||
$unit = $strings[1];
|
$unit = $strings[1];
|
||||||
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
$fileTime = $strings[6] . ":" . $strings[7] . ":" . $strings[8];
|
||||||
}
|
}
|
||||||
elsif ( $filenamecsv =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
elsif ( $filenamecsv =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i ) {
|
||||||
my @strings = $filenamecsv =~
|
my @strings = $filenamecsv =~ /(.{1,5})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
||||||
/(.{1,5})_(.{1,6})_(.{1,6})_(.{1,4})(.{1,2})(.{1,2})(.{1,2})(.{1,2})(.{1,2}).*/;
|
|
||||||
$unittype = $strings[0];
|
$unittype = $strings[0];
|
||||||
$unit = $strings[1];
|
$unit = $strings[1];
|
||||||
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
$fileDate = $strings[3] . "/" . $strings[4] . "/" . $strings[5];
|
||||||
@@ -75,21 +137,53 @@ sub getUnitName { # parm => file received ($trfile)
|
|||||||
$unittype = $strings[0];
|
$unittype = $strings[0];
|
||||||
$unit = $strings[1];
|
$unit = $strings[1];
|
||||||
}
|
}
|
||||||
|
elsif( $filenamecsv =~ m/^(\d*-|)health-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i ){ # sisgeo unit
|
||||||
|
#56371-health-2022_12_01_05_31_05.csv
|
||||||
|
#56371-readings-2022_12_01_06_00_00.csv
|
||||||
|
$unittype = "SISGEO";
|
||||||
|
}
|
||||||
|
elsif( $filenamecsv =~ m/^(\d*-|)readings-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i ){ # sisgeo unit
|
||||||
|
#56371-health-2022_12_01_05_31_05.csv
|
||||||
|
#56371-readings-2022_12_01_06_00_00.csv
|
||||||
|
$unittype = "SISGEO";
|
||||||
|
}
|
||||||
|
elsif($filenamecsv =~ m/^MesspunktePini_/ or $filenamecsv =~ m/^Integrity Monitor /){ #stazione totale mire
|
||||||
|
$unittype = "STAZIONETOTALE";
|
||||||
|
}
|
||||||
|
elsif($filenamecsv =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d$/i ){#sorotec pini
|
||||||
|
$unittype = "SOROTECPINI";
|
||||||
|
}
|
||||||
|
elsif($filenamecsv =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i ){#sorotec pini
|
||||||
|
$unittype = "SOROTECPINI";
|
||||||
|
}
|
||||||
|
elsif($filenamecsv =~ m/^HIRPINIA/){#hirpinia
|
||||||
|
$unittype = "HIRPINIA";
|
||||||
|
}
|
||||||
|
elsif ( $filenamecsv =~ m/^\d\d\d\d\d\d\d_\d\d\d\d-\d\d-\d\d_\d\d-\d\d-\d\d_\d.log$/i ) {#vulink pini
|
||||||
|
$unittype = "VULINK";
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
open FILE, $filename
|
open FILE, $filename
|
||||||
or warn getTimeStamp("log")
|
or warn getTimeStamp("log")
|
||||||
. " >> Error: opening input file $filename\n";
|
. " >> Error: opening input file $filename\n";
|
||||||
( $fileDate, $fileTime ) = split( /\s/, <FILE> );
|
( $fileDate, $fileTime ) = split( /\s/, <FILE> );
|
||||||
( $unittype, $unit ) = split( /\s/, uc <FILE> );
|
( $unittype, $unit ) = split( /\s/, uc <FILE> );
|
||||||
|
if($filenamecsv =~ m/^GD\d\d\d\d$/i){ #per gd modb
|
||||||
|
( $ipdescgd, $ipgd ) = split( /\s/, <FILE> );
|
||||||
|
( $subnetdescgd, $subnetgd ) = split( /\s/, <FILE> );
|
||||||
|
( $gatewaydescgd, $gatewaygd ) = split( /\s/, <FILE> );
|
||||||
|
( $sdpathdescgd, $dfpathdescpathgd, $sdpathgd ) = split( /\s/, <FILE> );
|
||||||
|
#print getTimeStamp("log"). " >> DEBUG unittype: $unittype unit: $unit SD path: $sdpathgd\n";
|
||||||
|
}
|
||||||
$unit =~ s/;+$//;
|
$unit =~ s/;+$//;
|
||||||
close FILE;
|
close FILE;
|
||||||
}
|
}
|
||||||
return ( $unit, $unittype );
|
return ( $unit, $unittype, $sdpathgd );
|
||||||
}
|
}
|
||||||
|
|
||||||
my $readingFile;
|
my $readingFile;
|
||||||
my $recvOKstr = "OK UPLOAD";
|
my $recvOKstr = "OK UPLOAD";
|
||||||
my @ext = ( ".csv", ".txt" );
|
my @ext = ( ".csv", ".txt", ".ods" );
|
||||||
|
|
||||||
GetOptions( "file=s" => \$readingFile )
|
GetOptions( "file=s" => \$readingFile )
|
||||||
or die("Error in command line arguments\n");
|
or die("Error in command line arguments\n");
|
||||||
@@ -102,32 +196,132 @@ $tail->WatchFile( -file => $readingFile, -type => "UNIX", -timeout => '10' );
|
|||||||
while ( my $line = $tail->GetLine() ) {
|
while ( my $line = $tail->GetLine() ) {
|
||||||
if ( index( $line, $recvOKstr ) != -1 ) {
|
if ( index( $line, $recvOKstr ) != -1 ) {
|
||||||
my (
|
my (
|
||||||
undef, undef, undef, $truser, undef,
|
undef, undef, undef, $truser, undef,
|
||||||
$trip, undef, $trfile, $trstat
|
$trip, undef, $trfile, $trstat
|
||||||
) = split( /[\"\[\]]/, $line );
|
) = split( /[\"\[\]]/, $line );
|
||||||
|
|
||||||
|
if(index($line, "Integrity Monitor [") != -1){#se è stazione di pini
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG line $line\n";
|
||||||
|
#my @sssa = split( /[\"\[\]]/, $line);
|
||||||
|
#foreach my $value (@sssa) {
|
||||||
|
# print "$value\n";
|
||||||
|
#}
|
||||||
|
my (
|
||||||
|
undef, undef, undef, $trusertmp, undef, undef,
|
||||||
|
$triptmp, $trfileuno, $trfiledue, $trfiletre, $trstattmp
|
||||||
|
) = split( /[\"\[\]]/, $line );
|
||||||
|
$trfile = $trfileuno."[".$trfiledue."]".$trfiletre;#per pini ts
|
||||||
|
$truser = $trusertmp;
|
||||||
|
$trip = $triptmp;
|
||||||
|
$trstat = $trstattmp;
|
||||||
|
#print("######\n");
|
||||||
|
#print($trfile."\n");
|
||||||
|
#print("---\n");
|
||||||
|
#print($truser."\n");
|
||||||
|
#print("---\n");
|
||||||
|
#print($trip."\n");
|
||||||
|
#print("---\n");
|
||||||
|
#print($trstat."\n");
|
||||||
|
#print("---\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG line $line\n";
|
||||||
my ( $login, $pass, $uid, $gid ) = getpwnam($truser)
|
my ( $login, $pass, $uid, $gid ) = getpwnam($truser)
|
||||||
or warn getTimeStamp("log") . " >> $truser not in passwd file.\n";
|
or warn getTimeStamp("log") . " >> $truser not in passwd file.\n";
|
||||||
|
|
||||||
|
my $dbh = DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password ) or die getTimeStamp("log") . " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
my $sth = $dbh->prepare("SELECT stop_elab FROM admin_panel LIMIT 1");
|
||||||
|
$sth->execute();
|
||||||
|
my $stop_elab;
|
||||||
|
if (my $row = $sth->fetchrow_hashref) {
|
||||||
|
$stop_elab = $row->{'stop_elab'};
|
||||||
|
} else {
|
||||||
|
$stop_elab = 0;
|
||||||
|
}
|
||||||
|
print getTimeStamp("log") . " >> stop_elab value -> $stop_elab.\n";
|
||||||
|
$sth->finish;
|
||||||
|
if ($stop_elab == 0) {
|
||||||
|
my $sth2 = $dbh->prepare("SELECT id, command, truser FROM elaborazioni_stoppate WHERE eseguito = 0");
|
||||||
|
$sth2->execute();
|
||||||
|
my $sth3 = $dbh->prepare("UPDATE elaborazioni_stoppate SET eseguito = 1 WHERE eseguito = 0");
|
||||||
|
$sth3->execute();
|
||||||
|
$sth3->finish;
|
||||||
|
while (my $row2 = $sth2->fetchrow_hashref) {
|
||||||
|
my $id = $row2->{'id'};
|
||||||
|
my $command = $row2->{'command'};
|
||||||
|
my $truser2 = $row2->{'truser'};
|
||||||
|
my ( $login2, $pass2, $uid2, $gid2 ) = getpwnam($truser2)
|
||||||
|
or warn getTimeStamp("log") . " >> $truser2 not in passwd file.\n";
|
||||||
|
print getTimeStamp("log") . " >> DEBUG exec command -> $command.\n";
|
||||||
|
my $pid = fork();
|
||||||
|
unless ($pid) {
|
||||||
|
setgid($gid2);
|
||||||
|
setuid($uid2);
|
||||||
|
$ENV{"HOME"} = 'home/' . $truser2;
|
||||||
|
exec($command) or die "Failed to exec command for id $id: $!";
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
waitpid($pid, 0);
|
||||||
|
}
|
||||||
|
$sth2->finish;
|
||||||
|
}
|
||||||
|
$dbh->disconnect;
|
||||||
|
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG $trfile \n";
|
||||||
my ( $filename, $path, $suffix ) = fileparse( $trfile, qr/\.[^.]*/ );
|
my ( $filename, $path, $suffix ) = fileparse( $trfile, qr/\.[^.]*/ );
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG $filename --- $path --- $suffix \n";
|
||||||
$path = "/home/" . $truser . "/";
|
$path = "/home/" . $truser . "/";
|
||||||
$trfile = "/home/" . $truser . $trfile;
|
$trfile = "/home/" . $truser . $trfile;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
( grep( /^$suffix$/i, @ext ) )
|
( grep( /^$suffix$/i, @ext ) )
|
||||||
and ( $filename =~
|
and ( $filename =~ m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d\d\d\d|GD\d\d\d\d)$/i
|
||||||
m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d\d\d\d|GD\d\d\d\d)$/i
|
|
||||||
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
or $filename =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
or $filename =~ m/^CR1000X_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^CR1000X_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^Hortus_ID\d\d\d\d_DT\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
or $filename =~ m/^(\d*_|)G301_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^(\d*_|)G301_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
or $filename =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
or $filename =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
or $filename =~ m/^measurements_\d\d_\d\d_\d\d\d\d__\d\d_\d\d$/i
|
or $filename =~ m/^measurements_\d\d_\d\d_\d\d\d\d__\d\d_\d\d$/i
|
||||||
or $filename =~ m/^\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i )
|
or $filename =~ m/^\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^(\d*-|)health-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i
|
||||||
|
or $filename =~ m/^(\d*-|)readings-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i
|
||||||
|
or $filename =~ m/^MesspunktePini_/
|
||||||
|
or $filename =~ m/^Integrity Monitor/
|
||||||
|
or $filename =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^HIRPINIA/
|
||||||
|
or $filename =~ m/^\d\d\d\d\d\d\d_\d\d\d\d-\d\d-\d\d_\d\d-\d\d-\d\d_\d.log$/i )
|
||||||
)
|
)
|
||||||
|
|
||||||
{
|
{
|
||||||
my ( $unit, $unittype ) = getUnitName($trfile);
|
if($filename =~ m/Hortus_ID\d\d\d\d_DT\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d/){#mancano i ; nelle prime 6 righe, aggiungo ; alla fine delle prime 6 righe dell'header
|
||||||
print getTimeStamp("log") . " >> Unit $unit - Filename $trfile\n";
|
open my $hortus_file, '+<', $trfile;
|
||||||
|
my @hortus_file_lines = <$hortus_file>;
|
||||||
|
$hortus_file_lines[0] =~ tr /\r/\n/;
|
||||||
|
my @line_splitted = split(/\n/, $hortus_file_lines[0]);
|
||||||
|
for(my $i_var_for=0; $i_var_for < 6; $i_var_for++){
|
||||||
|
unless($line_splitted[$i_var_for] =~ m';'){
|
||||||
|
$line_splitted[$i_var_for] = $line_splitted[$i_var_for].';';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$hortus_file_lines[0] = join("\n", @line_splitted);
|
||||||
|
|
||||||
|
seek($hortus_file, 0, 0);
|
||||||
|
print $hortus_file @hortus_file_lines;
|
||||||
|
truncate($hortus_file, tell($hortus_file));
|
||||||
|
close $hortus_file;
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG Filename $trfile, $hortus_file_lines[0]\n";
|
||||||
|
}
|
||||||
|
my ( $unit, $unittype, $sdpathgd ) = getUnitName($trfile);
|
||||||
|
#print getTimeStamp("log") . " >> Unit $unit - Filename $trfile\n";
|
||||||
|
|
||||||
if ( !-d "$path/log" ) {
|
if ( !-d "$path/log" ) {
|
||||||
make_path "$path/log",
|
make_path "$path/log",
|
||||||
@@ -137,6 +331,9 @@ while ( my $line = $tail->GetLine() ) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
my $outpath = $path . $unit;
|
my $outpath = $path . $unit;
|
||||||
|
if ($trfile =~ /home.*home/ && $trfile !~ /Integrity Monitor/) {
|
||||||
|
$outpath = $path . "home/".$unit;
|
||||||
|
}
|
||||||
if ( !-d "$outpath/SQL" ) {
|
if ( !-d "$outpath/SQL" ) {
|
||||||
make_path "$outpath/SQL",
|
make_path "$outpath/SQL",
|
||||||
{ mode => 0755, owner => $truser, group => $gid }
|
{ mode => 0755, owner => $truser, group => $gid }
|
||||||
@@ -152,25 +349,39 @@ while ( my $line = $tail->GetLine() ) {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
print getTimeStamp("log") . " >> Moved $trfile -> $dest.\n";
|
print getTimeStamp("log") . " >> Moved $trfile -> $dest.\n";
|
||||||
chmod 0664, $dest;
|
chmod 0666, $dest;
|
||||||
my @fname = ($dest);
|
my @fname = ($dest);
|
||||||
chown $uid, $gid, @fname;
|
chown $uid, $gid, @fname;
|
||||||
if (
|
if (
|
||||||
(
|
(
|
||||||
$filename =~
|
$filename =~ m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d.*|GD\d*)$/i
|
||||||
m/^(\d\d_\d\d\d\d_|)(DT\d\d\d\d|LOC\d.*|GD\d*)$/i
|
|
||||||
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^G201_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^TLP_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
|
or $filename =~ m/^GS1_ID\d\d\d\d\d_DT\d\d\d\d\d*$/i
|
||||||
or $filename =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^D2W_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
or $filename =~ m/^CR1000X_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^CR1000X_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^Hortus_ID\d\d\d\d_DT\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
or $filename =~ m/^(\d*_|)G301_ID\d\d\d\d_DT\d\d\d\d$/i
|
or $filename =~ m/^(\d*_|)G301_ID\d\d\d\d_DT\d\d\d\d$/i
|
||||||
or $filename =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
or $filename =~ m/^GFLOW_ID\d\d\d\d_DT\d\d\d\d_\d*$/i
|
||||||
or $filename =~
|
or $filename =~ m/^measurements_\d\d_\d\d_\d\d\d\d__\d\d_\d\d$/i
|
||||||
m/^measurements_\d\d_\d\d_\d\d\d\d__\d\d_\d\d$/i
|
or $filename =~ m/^\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
or $filename =~
|
or $filename =~ m/^(\d*-|)health-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i
|
||||||
m/^\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
or $filename =~ m/^(\d*-|)readings-\d\d\d\d_\d\d_\d\d_\d\d_\d\d_\d\d$/i
|
||||||
|
or $filename =~ m/^MesspunktePini_/
|
||||||
|
or $filename =~ m/^Integrity Monitor /
|
||||||
|
or $filename =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^CO_\d\d\d\d_\d_\d\d\d\d\d\d\d\d_\d\d\d\d\d\d_\d\d\d\d\d\d\d\d\d\d\d\d\d\d$/i
|
||||||
|
or $filename =~ m/^HIRPINIA/
|
||||||
|
or $filename =~ m/^\d\d\d\d\d\d\d_\d\d\d\d-\d\d-\d\d_\d\d-\d\d-\d\d_\d.log$/i
|
||||||
)
|
)
|
||||||
and ( $unit ne 'ID9999' )
|
and ( $unit ne 'ID9999' )
|
||||||
and ( $truser ne 'corra' )
|
and ( $truser ne 'corra' )
|
||||||
|
#and ( $unit ne 'ID0008' )
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
print getTimeStamp("log")
|
print getTimeStamp("log")
|
||||||
@@ -193,15 +404,129 @@ while ( my $line = $tail->GetLine() ) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
elsif ( $unittype eq 'SISGEO' ) {
|
||||||
|
exec( $scriptpath . "sisgeoLoadScript.py \"$dest\" >> /home/$truser/log/loadsisgeo.log 2>&1");
|
||||||
|
}
|
||||||
|
elsif ($unittype eq 'STAZIONETOTALE'){
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG file STAZIONETOTALE\n";
|
||||||
|
exec( $scriptpath . "TS_PiniScript.py \"$dest\" >> /home/$truser/log/loadTS.log 2>&1");
|
||||||
|
}
|
||||||
|
elsif ( defined $sdpathgd and ($sdpathgd =~ m'/modb/GD' or $sdpathgd =~ m'/dsas/GD') ){
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG file GD e modb\n";
|
||||||
|
exec( $scriptpath."loadCSVModbGDLora.py \"$dest\" >> /home/$truser/log/loadgdmodblora.log 2>&1");
|
||||||
|
}
|
||||||
|
elsif( $unittype eq 'SOROTECPINI' ){
|
||||||
|
exec( $scriptpath . "sorotecPini.py \"$dest\" >> /home/$truser/log/loadSorotecPini.log 2>&1");
|
||||||
|
}
|
||||||
|
elsif($unittype eq 'HIRPINIA'){
|
||||||
|
exec( $scriptpath . "hirpiniaLoadScript.py \"$dest\" >> /home/$truser/log/loadHirpinia.log 2>&1");
|
||||||
|
}
|
||||||
|
elsif($unittype eq 'VULINK'){
|
||||||
|
exec( $scriptpath . "vulinkScript.py \"$dest\" >> /home/$truser/log/loadVulink.log 2>&1");
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
exec( $scriptpath
|
if (defined $stop_elab) {
|
||||||
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
if ($stop_elab == 1) {
|
||||||
. "_"
|
print getTimeStamp("log") . " >> DEBUG 1 stop_elab 1\n";
|
||||||
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
#exec( $scriptpath
|
||||||
);
|
# . "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
# . "_"
|
||||||
|
# . "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
#);
|
||||||
|
my $dbh2 = DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password ) or die getTimeStamp("log") . " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
my $insert_sth = $dbh2->prepare("INSERT IGNORE INTO elaborazioni_stoppate (type, command, truser) VALUES (0,?,?)");
|
||||||
|
my $string_to_insert = $scriptpath. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename". "_". "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1";
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 1 $string_to_insert\n";
|
||||||
|
$insert_sth->execute($string_to_insert, $truser);
|
||||||
|
$insert_sth->finish;
|
||||||
|
$dbh2->disconnect;
|
||||||
|
} elsif ($stop_elab == 0) {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 1 stop_elab 0\n";
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 1 else\n";
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 1 pre-else\n";
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG $dest\n";#debug
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
|
if ( ( $truser ne 'asega' ) and ( $truser ne 'corra' ) ) {
|
||||||
|
print getTimeStamp("log")
|
||||||
|
. " >> Sender user $truser: duplicate as asega user -> load data into DB.\n";
|
||||||
|
my $realtruser = $truser;
|
||||||
|
$truser = "asega";
|
||||||
|
$outpath =~ s/$realtruser/$truser/;
|
||||||
|
my ( $login, $pass, $uid, $gid ) = getpwnam($truser)
|
||||||
|
or warn getTimeStamp("log")
|
||||||
|
. " >> $truser not in passwd file.\n";
|
||||||
|
unless ( fork() ) {
|
||||||
|
setgid($gid);
|
||||||
|
setuid($uid);
|
||||||
|
$ENV{"HOME"} = 'home/' . $truser;
|
||||||
|
if ( defined $sdpathgd and ($sdpathgd =~ m'/modb/GD' or $sdpathgd =~ m'/dsas/GD') ){
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG file GD e modb duplica\n";#debug
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (defined $stop_elab) {
|
||||||
|
if ($stop_elab == 1) {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 2 stop_elab 1\n";
|
||||||
|
#exec( $scriptpath
|
||||||
|
# . "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
# . "_"
|
||||||
|
# . "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
#);
|
||||||
|
my $dbh2 = DBI->connect( "DBI:mysql:$db_lar;host=$server", $username, $password ) or die getTimeStamp("log") . " - pid $$ >> Could not connect to database: $DBI::errstr";
|
||||||
|
my $insert_sth = $dbh2->prepare("INSERT IGNORE INTO elaborazioni_stoppate (type, command, truser) VALUES (0,?,?)");
|
||||||
|
my $string_to_insert = $scriptpath. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename". "_". "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1";
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 2 $string_to_insert\n";
|
||||||
|
$insert_sth->execute($string_to_insert, $truser);
|
||||||
|
$insert_sth->finish;
|
||||||
|
$dbh2->disconnect;
|
||||||
|
} elsif ($stop_elab == 0) {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 2 stop_elab 0\n";
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 2 else\n";
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
print getTimeStamp("log") . " >> DEBUG 2 pre-else\n";
|
||||||
|
#print getTimeStamp("log") . " >> DEBUG $dest\n";#debug
|
||||||
|
exec( $scriptpath
|
||||||
|
. "LoadCSVData.pl -f \"$dest\" -s \"$outpath/SQL/$filename"
|
||||||
|
. "_"
|
||||||
|
. "$timestamp.sql\" -d $truser >> /home/$truser/log/loadcsvdata.log 2>&1"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
15
dbconfig.py
Executable file
15
dbconfig.py
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
from configparser import ConfigParser
|
||||||
|
|
||||||
|
def read_db_config(filename='/home/battilo/scripts/config.ini', section='mysql'):
|
||||||
|
parser = ConfigParser()
|
||||||
|
parser.read(filename)
|
||||||
|
|
||||||
|
db = {}
|
||||||
|
if parser.has_section(section):
|
||||||
|
items = parser.items(section)
|
||||||
|
for item in items:
|
||||||
|
db[item[0]] = item[1]
|
||||||
|
else:
|
||||||
|
raise Exception('{0} not found in the {1} file'.format(section, filename))
|
||||||
|
|
||||||
|
return db
|
||||||
55
gflowScript.py
Executable file
55
gflowScript.py
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
if dati != "null":
|
||||||
|
query = "INSERT INTO ELABDATAGFLOW(unit_name, tool_name, EventDateTime, xacc, yacc, zacc, cacc, battery_level, wifi_signal) " \
|
||||||
|
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executemany(query, dati)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_RSN_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+"")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def getDataFromFile(pathFile):
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
header = data[0].split(",")
|
||||||
|
unitName = header[0]
|
||||||
|
if unitName.startswith('ID'):
|
||||||
|
toolName = header[1]
|
||||||
|
batteryLevel = header[2].replace("V","")
|
||||||
|
wifiSignal = header[3].replace("dBm","")
|
||||||
|
data.pop(0)
|
||||||
|
dati = []
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
if row[5] != "0.00":
|
||||||
|
row[0] = row[0].replace("/","-",2).split("-")
|
||||||
|
dateTime = row[0][2] + "-" + row[0][1] + "-" + row[0][0] + " " + row[1]
|
||||||
|
dati.append((unitName,toolName,dateTime,row[2],row[3],row[4],row[5],batteryLevel,wifiSignal))
|
||||||
|
|
||||||
|
return dati
|
||||||
|
else:
|
||||||
|
return "null"
|
||||||
|
|
||||||
|
def main():
|
||||||
|
insertData(getDataFromFile(sys.argv[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
63
hirpiniaLoadScript.py
Executable file
63
hirpiniaLoadScript.py
Executable file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from decimal import Decimal
|
||||||
|
from datetime import datetime
|
||||||
|
import ezodf
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
folder_path, file_with_extension = os.path.split(pathFile)
|
||||||
|
unit_name = os.path.basename(folder_path)#unitname
|
||||||
|
tool_name, _ = os.path.splitext(file_with_extension)#toolname
|
||||||
|
tool_name = tool_name.replace("HIRPINIA_", "")
|
||||||
|
tool_name = tool_name.split("_")[0]
|
||||||
|
print(unit_name, tool_name)
|
||||||
|
datiRaw = []
|
||||||
|
doc = ezodf.opendoc(pathFile)
|
||||||
|
for sheet in doc.sheets:
|
||||||
|
node_num = sheet.name.replace("S-", "")
|
||||||
|
print(f"Sheet Name: {sheet.name}")
|
||||||
|
rows_to_skip = 2
|
||||||
|
for i, row in enumerate(sheet.rows()):
|
||||||
|
if i < rows_to_skip:
|
||||||
|
continue
|
||||||
|
row_data = [cell.value for cell in row]
|
||||||
|
date_time = datetime.strptime(row_data[0], "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S").split(" ")
|
||||||
|
date = date_time[0]
|
||||||
|
time = date_time[1]
|
||||||
|
val0 = row_data[2]
|
||||||
|
val1 = row_data[4]
|
||||||
|
val2 = row_data[6]
|
||||||
|
val3 = row_data[8]
|
||||||
|
datiRaw.append((unit_name, tool_name, node_num, date, time, -1, -273, val0, val1, val2, val3))
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0,Val1,Val2,Val3) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.executemany(queryRaw, datiRaw)
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
finally:
|
||||||
|
queryMatlab = "select m.matcall from tools as t join units as u on u.id=t.unit_id join matfuncs as m on m.id=t.matfunc where u.name=%s and t.name=%s"
|
||||||
|
cursor.execute(queryMatlab, [unit_name, tool_name])
|
||||||
|
resultMatlab = cursor.fetchall()
|
||||||
|
if(resultMatlab):
|
||||||
|
print("Avvio "+str(resultMatlab[0]["matcall"]))
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_"+str(resultMatlab[0]["matcall"])+".sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+str(unit_name)+" "+str(tool_name)+"")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An unexpected error occurred: {str(e)}\n")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print("Avviato.")
|
||||||
|
getDataFromCsv(sys.argv[1])
|
||||||
|
print("Finito.")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
78
inoltroViaApiElab.py
Normal file
78
inoltroViaApiElab.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from decimal import Decimal
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
|
||||||
|
def send_elab_data_as_json(matlab_timestamp, unit, tool, api_url, bearer_token):
|
||||||
|
"""Function to fetch elaborated data from MySQL and send it as a JSON string via API"""
|
||||||
|
try:
|
||||||
|
# Connect to MySQL database
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
# Query to select elaborated data
|
||||||
|
query = """SELECT UnitName, ToolNameID, EventTimestamp, NodeNum, NodeType, NodeDepth,
|
||||||
|
XShift, YShift, ZShift, X, Y, Z, HShift, HShiftDir, HShift_local,
|
||||||
|
speed, speed_local, acceleration, acceleration_local,
|
||||||
|
T_node, water_level, pressure, load_value, AlfaX, AlfaY, Area, calcerr
|
||||||
|
FROM elabdataview
|
||||||
|
WHERE UnitName = %s AND ToolNameID = %s AND updated_at > %s
|
||||||
|
ORDER BY ToolNameID DESC, EventTimestamp, CONVERT(NodeNum, UNSIGNED INTEGER) DESC"""
|
||||||
|
|
||||||
|
# Execute query
|
||||||
|
cursor.execute(query, (unit, tool, matlab_timestamp))
|
||||||
|
resultData = cursor.fetchall()
|
||||||
|
|
||||||
|
# Close cursor and connection
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# Convert Decimal objects to float in resultData
|
||||||
|
for row in resultData:
|
||||||
|
for key, value in row.items():
|
||||||
|
if isinstance(value, Decimal):
|
||||||
|
row[key] = format(float(value), '.6f') # Convert Decimal to float with full precision
|
||||||
|
#print(resultData)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'json': json.dumps(resultData) # Convert result to JSON string
|
||||||
|
}
|
||||||
|
print(payload)
|
||||||
|
#with open("payload.json", "w") as file:
|
||||||
|
# json.dump(payload, file, indent=4)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add Authorization header if the bearer token is provided
|
||||||
|
if bearer_token:
|
||||||
|
headers['Authorization'] = f'Bearer {bearer_token}'
|
||||||
|
else:
|
||||||
|
print("No Bearer token provided. Authorization header will be omitted.")
|
||||||
|
|
||||||
|
# Send the JSON data via POST request
|
||||||
|
response = requests.post(api_url, headers=headers, json=payload)
|
||||||
|
response.raise_for_status() # Raise an error for bad responses (4xx or 5xx)
|
||||||
|
|
||||||
|
# Log the response
|
||||||
|
print(f"Elab Data Response: {response.status_code} - {response.text}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error occurred while sending elab data as JSON: {e}")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 6:
|
||||||
|
print("Usage: python3 inoltroViaApiElab.py <matlab_timestamp> <unit> <tool> <api_url> <bearer_token>")
|
||||||
|
sys.exit(1)
|
||||||
|
matlab_timestamp = sys.argv[1]
|
||||||
|
unit = sys.argv[2]
|
||||||
|
tool = sys.argv[3]
|
||||||
|
api_url = sys.argv[4]
|
||||||
|
bearer_token = sys.argv[5]
|
||||||
|
send_elab_data_as_json(matlab_timestamp, unit, tool, api_url, bearer_token)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
47
inoltroViaApiRaw.py
Normal file
47
inoltroViaApiRaw.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
def send_raw_csv_as_json(filename, api_url, bearer_token):
|
||||||
|
# Function to send raw CSV data as a JSON string
|
||||||
|
try:
|
||||||
|
# Read the CSV file and prepare it as a JSON string
|
||||||
|
with open(filename, 'r') as file:
|
||||||
|
csv_data = file.read() # Reading the CSV content as a string
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'json': csv_data # The key `json` with the CSV content as a string
|
||||||
|
}
|
||||||
|
print(payload)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json' # The API expects JSON payload
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add the Authorization header if the bearer token is not empty
|
||||||
|
if bearer_token:
|
||||||
|
headers['Authorization'] = f'Bearer {bearer_token}'
|
||||||
|
else:
|
||||||
|
print("No Bearer token provided. Authorization header will be omitted.")
|
||||||
|
|
||||||
|
# Send the JSON data via POST request
|
||||||
|
response = requests.post(api_url, headers=headers, json=payload)
|
||||||
|
response.raise_for_status() # Raise an error for bad responses (4xx or 5xx)
|
||||||
|
|
||||||
|
# Log the response
|
||||||
|
print(f"Raw Data Response: {response.status_code} - {response.text}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error occurred while sending raw file as JSON: {e}")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 4:
|
||||||
|
print("Usage: python3 inoltroViaApiRaw.py <csv_filename> <api_url> <bearer_token>")
|
||||||
|
sys.exit(1)
|
||||||
|
filename = sys.argv[1]
|
||||||
|
api_url = sys.argv[2]
|
||||||
|
bearer_token = sys.argv[3]
|
||||||
|
send_raw_csv_as_json(filename, api_url, bearer_token)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
180
loadCSVModbGDLora.py
Executable file
180
loadCSVModbGDLora.py
Executable file
@@ -0,0 +1,180 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
print("DATI:")
|
||||||
|
print(dati)
|
||||||
|
if dati != "null" and dati is not None:
|
||||||
|
print(dati[0][1])
|
||||||
|
print(dati[0][0])
|
||||||
|
matlab_func = ""
|
||||||
|
conn_via_radio = 0
|
||||||
|
operation_mode = 0
|
||||||
|
queryMatFunc = "select m.matcall, t.conn_via_radio, t.operation_mode from matfuncs as m " \
|
||||||
|
"inner join tools as t on t.matfunc = m.id " \
|
||||||
|
"inner join units as u on u.id = t.unit_id " \
|
||||||
|
"inner join statustools as s on t.statustool_id = s.id " \
|
||||||
|
"where t.name = '"+dati[0][1]+"' and u.name = '"+dati[0][0]+"'"
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(queryMatFunc)
|
||||||
|
|
||||||
|
result = cursor.fetchall()
|
||||||
|
matlab_func = result[0][0]
|
||||||
|
conn_via_radio = result[0][1]
|
||||||
|
operation_mode = result[0][2]
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
if conn_via_radio == 1:
|
||||||
|
if operation_mode == 1:#listening mode(no rssi 'al momento')
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule, Val0, RssiModule) " \
|
||||||
|
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executemany(query, dati)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> Inserito in RAWDATACOR "+dati[0][0]+" "+dati[0][1])
|
||||||
|
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if matlab_func != "":
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> Avvio "+matlab_func)
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_"+matlab_func+".sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+" >> /home/asega/log/loadgdmodblora.log 2>&1")
|
||||||
|
else:#standard no val0 con rssi
|
||||||
|
try:
|
||||||
|
for drow in dati:
|
||||||
|
unit_name = drow[0]
|
||||||
|
tool_name = drow[1]
|
||||||
|
date = drow[3]
|
||||||
|
time = drow[4]
|
||||||
|
batM = drow[7]
|
||||||
|
tempM = drow[8]
|
||||||
|
rssiM = drow[10].replace("dB", "")
|
||||||
|
|
||||||
|
query = "UPDATE RAWDATACOR SET BatLevelModule=%s, TemperatureModule=%s, RssiModule=%s where UnitName=%s and ToolNameID=%s and EventDate=%s and EventTime=%s"
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(query, [batM, tempM, rssiM, unit_name, tool_name, date, time])
|
||||||
|
conn.commit()
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> Aggiornato in RAWDATACOR "+dati[0][0]+" "+dati[0][1])
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
finally:
|
||||||
|
if matlab_func != "":
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> Avvio "+matlab_func)
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_"+matlab_func+".sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+" >> /home/asega/log/loadgdmodblora.log 2>&1")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
print("-------")
|
||||||
|
|
||||||
|
def getDataFromFile(pathFile):
|
||||||
|
print(pathFile)
|
||||||
|
path = pathFile.split("/")
|
||||||
|
unitname = path[len(path) - 2]
|
||||||
|
toolname = path[len(path) - 1].split("_")[0]
|
||||||
|
if(toolname.startswith("GD") and unitname):
|
||||||
|
dati = []
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> apro "+pathFile)
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
if("/modb/" in data[5] or "/dsas/" in data[5]): #gd e modb (lora)
|
||||||
|
#print("modb")
|
||||||
|
pathFile = pathFile.replace("GD", "DT")
|
||||||
|
print(pathFile)
|
||||||
|
pathFileName = pathFile.split("_")[0]
|
||||||
|
pathFileDate = int(pathFile.split("_")[1].split(".")[0])
|
||||||
|
pathFileExt = pathFile.split("_")[1].split(".")[1]
|
||||||
|
for xsec in range(1, 60):
|
||||||
|
if os.path.isfile(pathFile):
|
||||||
|
#print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> file DT trovato")
|
||||||
|
#print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> apro "+pathFile)
|
||||||
|
with open(pathFile, 'r') as fileDT:
|
||||||
|
dataDT = fileDT.readlines()
|
||||||
|
dataDT = [rowDT.rstrip() for rowDT in dataDT]
|
||||||
|
dataDT = dataDT[7:]
|
||||||
|
nodenum = 2
|
||||||
|
toolname = toolname.replace("GD", "DT")
|
||||||
|
data = data[7:]
|
||||||
|
for index, row in enumerate(data):
|
||||||
|
#print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> index " + str(index))
|
||||||
|
#print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> row " + row)
|
||||||
|
try:
|
||||||
|
lineDT = dataDT[index].split(";")
|
||||||
|
if(lineDT[1] == '' or lineDT[1] == '-.-'):
|
||||||
|
tempindex = index
|
||||||
|
for ind in range(len(data)):
|
||||||
|
tempindex-=1
|
||||||
|
if(tempindex >= 0):
|
||||||
|
lineDT[1] = dataDT[tempindex].split(";")[1]
|
||||||
|
else:
|
||||||
|
lineDT[1] = 12.0
|
||||||
|
if(lineDT[1] != '' and lineDT[1] != '-.-'):
|
||||||
|
break
|
||||||
|
print(lineDT[1])
|
||||||
|
if(lineDT[2] == '' or lineDT[2] == '-.-'):
|
||||||
|
tempindex = index
|
||||||
|
for ind in range(len(data)):
|
||||||
|
tempindex-=1
|
||||||
|
if(tempindex >= 0):
|
||||||
|
lineDT[2] = dataDT[tempindex].split(";")[2]
|
||||||
|
else:
|
||||||
|
lineDT[2] = 20.0
|
||||||
|
if(lineDT[2] != '' and lineDT[2] != '-.-'):
|
||||||
|
break
|
||||||
|
batUnit = float(lineDT[1])
|
||||||
|
tempUnit = float(lineDT[2])
|
||||||
|
line = row.split(";")
|
||||||
|
if(line[2] == '' and line[3] == ''):#se bat e temp sono vuoti
|
||||||
|
continue
|
||||||
|
dt = lineDT[0].split(" ")
|
||||||
|
if("/" in dt[0]):
|
||||||
|
try:
|
||||||
|
date = str(datetime.strptime(dt[0], "%d/%m/%Y").strftime("%Y-%m-%d"))
|
||||||
|
#print("The string is a date with format " + "%d/%m/%Y")
|
||||||
|
except ValueError:
|
||||||
|
print()
|
||||||
|
#print("The string IS NOT a date with format " + "%d/%m/%Y")#debug
|
||||||
|
try:
|
||||||
|
date = str(datetime.strptime(dt[0], "%Y/%m/%d").strftime("%Y-%m-%d"))
|
||||||
|
#print("The string is a date with format " + "%Y/%m/%d")
|
||||||
|
except ValueError:
|
||||||
|
print()
|
||||||
|
#print("The string IS NOT a date with format " + "%Y/%m/%d")#debug
|
||||||
|
else:
|
||||||
|
date = dt[0]
|
||||||
|
time = dt[1]
|
||||||
|
batlevel = float(line[2])
|
||||||
|
temp = float(line[3])
|
||||||
|
if len(line) == 6:
|
||||||
|
if line[4] == "|":
|
||||||
|
val0 = line[5]
|
||||||
|
dati.append((unitname, toolname, nodenum, date, time, batUnit, tempUnit, batlevel, temp, val0, None))
|
||||||
|
elif len(line) == 5:
|
||||||
|
rssi = line[4]
|
||||||
|
dati.append((unitname, toolname, nodenum, date, time, batUnit, tempUnit, batlevel, temp, None, rssi))
|
||||||
|
except IndexError:
|
||||||
|
print("exception: different lenght break")
|
||||||
|
break
|
||||||
|
#print("The string IS NOT a date with format " + "%d/%m/%Y")#debug
|
||||||
|
return dati
|
||||||
|
pathFileDate -= 1
|
||||||
|
pathFile = pathFileName + "_" + str(pathFileDate)+ "." + pathFileExt
|
||||||
|
print(str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+"-> CHECK "+pathFile)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
insertData(getDataFromFile(sys.argv[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
83
nesaScript.py
Executable file
83
nesaScript.py
Executable file
@@ -0,0 +1,83 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
#print(dati)
|
||||||
|
if dati != "null":
|
||||||
|
query = "INSERT INTO ELABDATANESA(UnitName, ToolNameID, NodeNum, EventTimestamp, dataJSON) " \
|
||||||
|
"VALUES(%s,%s,%s,%s,%s)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executemany(query, dati)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_Tilt_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+"")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
#data.pop(0) #rimuove header
|
||||||
|
dati = []
|
||||||
|
i = 0
|
||||||
|
unit = ""
|
||||||
|
tool = ""
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
if i == 0:
|
||||||
|
serial_number = row[1]
|
||||||
|
if serial_number[0] == "0":
|
||||||
|
serial_number = serial_number[1:] #rimuove primo char (se 0) del id nesa
|
||||||
|
query = "SELECT unit_name, tool_name FROM nesa_tools WHERE serial_number='"+serial_number+"'"
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(query)
|
||||||
|
|
||||||
|
result = cursor.fetchall()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
unit = result[0][0]
|
||||||
|
tool = result[0][1]
|
||||||
|
#print(result[0][0])
|
||||||
|
#print(result[0][1])
|
||||||
|
date = row[7]+"-"+row[6]+"-"+row[5]+" "+row[2]+":"+row[3]+":"+row[4]
|
||||||
|
nodeNum = 1
|
||||||
|
dataJSON = '{ "battery":"'+row[58]+'", "solarPanel":"'+row[61]+'", "tempAvg":"'+row[10]+'", "tempMin":"'+row[13]+'", "tempMax":"'+row[16]+'"}'
|
||||||
|
dati.append((unit, tool, nodeNum, date, dataJSON))
|
||||||
|
nodeNum = 2
|
||||||
|
dataJSON = '{ "battery":"'+row[58]+'", "solarPanel":"'+row[61]+'", "humAvg":"'+row[19]+'", "humMin":"'+row[22]+'", "humMax":"'+row[25]+'"}'
|
||||||
|
dati.append((unit, tool, nodeNum, date, dataJSON))
|
||||||
|
nodeNum = 3
|
||||||
|
dataJSON = '{ "battery":"'+row[58]+'", "solarPanel":"'+row[61]+'", "windDirAvg":"'+row[28]+'", "windDirMin":"'+row[31]+'", "windDirMax":"'+row[34]+'", "windSpeedAvg":"'+row[37]+'", "windSpeedMin":"'+row[40]+'", "windSpeedMax":"'+row[43]+'"}'
|
||||||
|
dati.append((unit, tool, nodeNum, date, dataJSON))
|
||||||
|
nodeNum = 4
|
||||||
|
dataJSON = '{ "battery":"'+row[58]+'", "solarPanel":"'+row[61]+'", "rain":"'+row[46]+'"}'
|
||||||
|
dati.append((unit, tool, nodeNum, date, dataJSON))
|
||||||
|
nodeNum = 5
|
||||||
|
dataJSON = '{ "battery":"'+row[58]+'", "solarPanel":"'+row[61]+'", "pressureAvg":"'+row[49]+'", "pressureMin":"'+row[52]+'", "pressureMax":"'+row[55]+'"}'
|
||||||
|
dati.append((unit, tool, nodeNum, date, dataJSON))
|
||||||
|
i+=1
|
||||||
|
|
||||||
|
return dati
|
||||||
|
|
||||||
|
def main():
|
||||||
|
insertData(getDataFromCsv(sys.argv[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
213
nesaScript_new.py
Executable file
213
nesaScript_new.py
Executable file
@@ -0,0 +1,213 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
print("dati: ",dati)
|
||||||
|
if dati != "null" and dati is not None:
|
||||||
|
query = "INSERT IGNORE INTO ELABDATANESA(UnitName, ToolNameID, NodeNum, EventTimestamp, dataJSON) " \
|
||||||
|
"VALUES(%s,%s,%s,%s,%s)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executemany(query, dati)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_Tilt_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+"")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
#data.pop(0) #rimuove header
|
||||||
|
dati = []
|
||||||
|
i = 0
|
||||||
|
unit = ""
|
||||||
|
tool = ""
|
||||||
|
#query = "SELECT tool_id, nodetype_id, nt.type as nodetype_name, num FROM nodes as n join nodetypes as nt on nt.id=n.nodetype_id join tools as t on t.id=n.tool_id join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
|
||||||
|
#cursor.execute(query, [unit, tool])
|
||||||
|
#resultNodi = cursor.fetchall()
|
||||||
|
#resultNodi_dict = {item['nodetype_id']: item for item in resultNodi}
|
||||||
|
#print(resultNodi_dict)
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
#print(row)
|
||||||
|
#print("-------")
|
||||||
|
if i == 0:
|
||||||
|
serial_number = row[1]
|
||||||
|
if serial_number[0] == "0":
|
||||||
|
serial_number = serial_number[1:] #rimuove primo char (se 0) del id nesa
|
||||||
|
query = "SELECT unit_name, tool_name FROM nesa_tools WHERE serial_number='"+serial_number+"'"
|
||||||
|
cursor.execute(query)
|
||||||
|
result = cursor.fetchall()
|
||||||
|
if(len(result) > 0):
|
||||||
|
unit = result[0]["unit_name"]
|
||||||
|
tool = result[0]["tool_name"]
|
||||||
|
query = "SELECT tool_id, nodetype_id, nt.type as nodetype_name, num FROM nodes as n join nodetypes as nt on nt.id=n.nodetype_id join tools as t on t.id=n.tool_id join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
|
||||||
|
cursor.execute(query, [unit, tool])
|
||||||
|
resultNodi = cursor.fetchall()
|
||||||
|
resultNodi_dict = {item['nodetype_id']: item for item in resultNodi}
|
||||||
|
#print(result[0][0])
|
||||||
|
#print(result[0][1])
|
||||||
|
if(unit != "" and tool != ""):
|
||||||
|
date = row[7]+"-"+row[6]+"-"+row[5]+" "+row[2]+":"+row[3]+":"+row[4]
|
||||||
|
row = row[:-1]
|
||||||
|
chunks = [row[i:i+3] for i in range(8, len(row), 3)]# Start from index 8 and split into chunks of 3
|
||||||
|
battery = -1
|
||||||
|
solarPanel = -1
|
||||||
|
grouped = defaultdict(list)
|
||||||
|
for chunk in chunks:
|
||||||
|
key = chunk[0] # First value as the key
|
||||||
|
grouped[key].append(chunk[1:]) # Store the rest of the values
|
||||||
|
#print(grouped)
|
||||||
|
if("108" in grouped):
|
||||||
|
battery = grouped["108"][0][1]
|
||||||
|
if("158" in grouped):
|
||||||
|
solarPanel = grouped["158"][0][1]
|
||||||
|
if("1" in grouped):
|
||||||
|
nodetype_id_to_find = 4
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
tempAvg = 0
|
||||||
|
tempMin = 0
|
||||||
|
tempMax = 0
|
||||||
|
for item in grouped["1"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
tempAvg = value
|
||||||
|
elif key == '3':
|
||||||
|
tempMin = value
|
||||||
|
elif key == '4':
|
||||||
|
tempMax = value
|
||||||
|
#print("therm: ", tempAvg, tempMin, tempMax, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "tempAvg":"'+tempAvg+'", "tempMin":"'+tempMin+'", "tempMax":"'+tempMax+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
if("2" in grouped):
|
||||||
|
nodetype_id_to_find = 35
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
humidityAvg = 0
|
||||||
|
humidityMin = 0
|
||||||
|
humidityMax = 0
|
||||||
|
for item in grouped["2"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
humidityAvg = value
|
||||||
|
elif key == '3':
|
||||||
|
humidityMin = value
|
||||||
|
elif key == '4':
|
||||||
|
humidityMax = value
|
||||||
|
#print("humidity: ", humidityAvg, humidityMin, humidityMax, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "humAvg":"'+humidityAvg+'", "humMin":"'+humidityMin+'", "humMax":"'+humidityMax+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
if("3" in grouped):
|
||||||
|
nodetype_id_to_find = 69
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
pyraAvg = 0
|
||||||
|
pyraMin = 0
|
||||||
|
pyraMax = 0
|
||||||
|
for item in grouped["3"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
pyraAvg = value
|
||||||
|
elif key == '3':
|
||||||
|
pyraMin = value
|
||||||
|
elif key == '4':
|
||||||
|
pyraMax = value
|
||||||
|
#print("pyra: ", pyroAvg, pyroMin, pyroMax, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "pyraAvg":"'+pyraAvg+'", "pyraMin":"'+pyraMin+'", "pyraMax":"'+pyraMax+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
if("4" in grouped):
|
||||||
|
nodetype_id_to_find = 55
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
windDirAvg = 0
|
||||||
|
windDirMin = 0
|
||||||
|
windDirMax = 0
|
||||||
|
windSpeedAvg = 0
|
||||||
|
windSpeedMin = 0
|
||||||
|
windSpeedMax = 0
|
||||||
|
for item in grouped["4"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
windDirAvg = value
|
||||||
|
elif key == '9':
|
||||||
|
windDirMin = value
|
||||||
|
elif key == '8':
|
||||||
|
windDirMax = value
|
||||||
|
for item in grouped["9"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
windSpeedAvg = value
|
||||||
|
elif key == '3':
|
||||||
|
windSpeedMin = value
|
||||||
|
elif key == '4':
|
||||||
|
windSpeedMax = value
|
||||||
|
#print("windDir: ", windDirAvg, windDirMin, windDirMax, "-windSpeed: ", windSpeedAvg, windSpeedMin, windSpeedMax, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "windDirAvg":"'+windDirAvg+'", "windDirMin":"'+windDirMin+'", "windDirMax":"'+windDirMax+'", "windSpeedAvg":"'+windSpeedAvg+'", "windSpeedMin":"'+windSpeedMin+'", "windSpeedMax":"'+windSpeedMax+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
if("10" in grouped):
|
||||||
|
nodetype_id_to_find = 27
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
rain = 0
|
||||||
|
for item in grouped["10"]:
|
||||||
|
key, value = item
|
||||||
|
rain = value
|
||||||
|
#print("rain: ", rain, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "rain":"'+rain+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
if("2013" in grouped):
|
||||||
|
nodetype_id_to_find = 3
|
||||||
|
res = resultNodi_dict.get(nodetype_id_to_find) # Returns None if not found
|
||||||
|
if(res is not None):
|
||||||
|
node_num = res["num"]
|
||||||
|
pressureAvg = 0
|
||||||
|
pressureMin = 0
|
||||||
|
pressureMax = 0
|
||||||
|
for item in grouped["2013"]:
|
||||||
|
key, value = item
|
||||||
|
if key == '2':
|
||||||
|
pressureAvg = value
|
||||||
|
elif key == '3':
|
||||||
|
pressureMin = value
|
||||||
|
elif key == '4':
|
||||||
|
pressureMax = value
|
||||||
|
#print("pressure: ", pressureAvg, pressureMin, pressureMax, " nodenum: ", node_num)
|
||||||
|
dataJSON = '{ "battery":"'+battery+'", "solarPanel":"'+solarPanel+'", "pressureAvg":"'+pressureAvg+'", "pressureMin":"'+pressureMin+'", "pressureMax":"'+pressureMax+'"}'
|
||||||
|
dati.append((unit, tool, node_num, date, dataJSON))
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
i+=1
|
||||||
|
print(dati)
|
||||||
|
return dati
|
||||||
|
except Error as e:
|
||||||
|
print("Error: ", e)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
insertData(getDataFromCsv(sys.argv[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
37
run_ATD_lnx.sh
Executable file
37
run_ATD_lnx.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# script for execution of deployed applications
|
||||||
|
#
|
||||||
|
# Sets up the MATLAB Runtime environment for the current $ARCH and executes
|
||||||
|
# the specified command.
|
||||||
|
#
|
||||||
|
exe_name=$0
|
||||||
|
exe_dir=`dirname "$0"`
|
||||||
|
#echo "------------------------------------------"
|
||||||
|
if [ "x$1" = "x" ]; then
|
||||||
|
echo Usage:
|
||||||
|
echo $0 \<deployedMCRroot\> args
|
||||||
|
else
|
||||||
|
# echo Setting up environment variables
|
||||||
|
MCRROOT="$1"
|
||||||
|
# echo ---
|
||||||
|
LD_LIBRARY_PATH=.:${MCRROOT}/runtime/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/bin/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/os/glnxa64;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/opengl/lib/glnxa64;
|
||||||
|
export LD_LIBRARY_PATH;
|
||||||
|
# echo LD_LIBRARY_PATH is ${LD_LIBRARY_PATH};
|
||||||
|
shift 1
|
||||||
|
args=
|
||||||
|
out=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
token=$1
|
||||||
|
args="${args} \"${token}\""
|
||||||
|
out="${out}${token}"
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
out="${out}_output_error.txt"
|
||||||
|
#eval "\"${exe_dir}/ATD_lnx\"" $args > /dev/null 2>&1
|
||||||
|
eval "\"${exe_dir}/ATD_lnx\"" $args > /tmp/${out} 2>&1
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
|
||||||
37
run_Musa_lnx.sh
Executable file
37
run_Musa_lnx.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# script for execution of deployed applications
|
||||||
|
#
|
||||||
|
# Sets up the MATLAB Runtime environment for the current $ARCH and executes
|
||||||
|
# the specified command.
|
||||||
|
#
|
||||||
|
exe_name=$0
|
||||||
|
exe_dir=`dirname "$0"`
|
||||||
|
#echo "------------------------------------------"
|
||||||
|
if [ "x$1" = "x" ]; then
|
||||||
|
echo Usage:
|
||||||
|
echo $0 \<deployedMCRroot\> args
|
||||||
|
else
|
||||||
|
# echo Setting up environment variables
|
||||||
|
MCRROOT="$1"
|
||||||
|
# echo ---
|
||||||
|
LD_LIBRARY_PATH=.:${MCRROOT}/runtime/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/bin/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/os/glnxa64;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/opengl/lib/glnxa64;
|
||||||
|
export LD_LIBRARY_PATH;
|
||||||
|
# echo LD_LIBRARY_PATH is ${LD_LIBRARY_PATH};
|
||||||
|
shift 1
|
||||||
|
args=
|
||||||
|
out=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
token=$1
|
||||||
|
args="${args} \"${token}\""
|
||||||
|
out="${out}${token}"
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
out="${out}_output_error.txt"
|
||||||
|
#eval "\"${exe_dir}/MUSA_lnx\"" $args > /dev/null 2>&1
|
||||||
|
eval "\"${exe_dir}/MUSA_lnx\"" $args > /tmp/${out} 2>&1
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
|
||||||
37
run_RSN_lnx.sh
Executable file
37
run_RSN_lnx.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# script for execution of deployed applications
|
||||||
|
#
|
||||||
|
# Sets up the MATLAB Runtime environment for the current $ARCH and executes
|
||||||
|
# the specified command.
|
||||||
|
#
|
||||||
|
exe_name=$0
|
||||||
|
exe_dir=`dirname "$0"`
|
||||||
|
#echo "------------------------------------------"
|
||||||
|
if [ "x$1" = "x" ]; then
|
||||||
|
echo Usage:
|
||||||
|
echo $0 \<deployedMCRroot\> args
|
||||||
|
else
|
||||||
|
# echo Setting up environment variables
|
||||||
|
MCRROOT="$1"
|
||||||
|
# echo ---
|
||||||
|
LD_LIBRARY_PATH=.:${MCRROOT}/runtime/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/bin/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/os/glnxa64;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/opengl/lib/glnxa64;
|
||||||
|
export LD_LIBRARY_PATH;
|
||||||
|
# echo LD_LIBRARY_PATH is ${LD_LIBRARY_PATH};
|
||||||
|
shift 1
|
||||||
|
args=
|
||||||
|
out=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
token=$1
|
||||||
|
args="${args} \"${token}\""
|
||||||
|
out="${out}${token}"
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
out="${out}_output_error.txt"
|
||||||
|
#eval "\"${exe_dir}/RSN_lnx\"" $args > /dev/null 2>&1
|
||||||
|
eval "\"${exe_dir}/RSN_lnx\"" $args > /tmp/${out} 2>&1
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
|
||||||
37
run_Tilt_lnx.sh
Executable file
37
run_Tilt_lnx.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# script for execution of deployed applications
|
||||||
|
#
|
||||||
|
# Sets up the MATLAB Runtime environment for the current $ARCH and executes
|
||||||
|
# the specified command.
|
||||||
|
#
|
||||||
|
exe_name=$0
|
||||||
|
exe_dir=`dirname "$0"`
|
||||||
|
#echo "------------------------------------------"
|
||||||
|
if [ "x$1" = "x" ]; then
|
||||||
|
echo Usage:
|
||||||
|
echo $0 \<deployedMCRroot\> args
|
||||||
|
else
|
||||||
|
# echo Setting up environment variables
|
||||||
|
MCRROOT="$1"
|
||||||
|
# echo ---
|
||||||
|
LD_LIBRARY_PATH=.:${MCRROOT}/runtime/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/bin/glnxa64 ;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/os/glnxa64;
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${MCRROOT}/sys/opengl/lib/glnxa64;
|
||||||
|
export LD_LIBRARY_PATH;
|
||||||
|
# echo LD_LIBRARY_PATH is ${LD_LIBRARY_PATH};
|
||||||
|
shift 1
|
||||||
|
args=
|
||||||
|
out=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
token=$1
|
||||||
|
args="${args} \"${token}\""
|
||||||
|
out="${out}${token}"
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
out="${out}_output_error.txt"
|
||||||
|
#eval "\"${exe_dir}/Tilt_lnx\"" $args > /dev/null 2>&1
|
||||||
|
eval "\"${exe_dir}/Tilt_lnx\"" $args > /tmp/${out} 2>&1
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
|
||||||
6
scp_aseweb_aruba.sh
Executable file
6
scp_aseweb_aruba.sh
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
echo 'Started at:' `date`
|
||||||
|
cd /var/www/www2.aseltd.eu
|
||||||
|
find sites/ase/ -type f \( ! -iname defines.php -and ! -iname UserManager.php \) -mtime -1 -exec ls -la {} \;
|
||||||
|
find sites/ase/ -type f \( ! -iname defines.php -and ! -iname UserManager.php \) -mtime -1 -exec scp {} alex@80.211.60.65:/var/www/aseltd.eu/{} \;
|
||||||
|
echo 'Ended at: ' `date`
|
||||||
305
sisgeoLoadScript.py
Executable file
305
sisgeoLoadScript.py
Executable file
@@ -0,0 +1,305 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from decimal import Decimal
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
#print(dati)
|
||||||
|
#print(len(dati))
|
||||||
|
if(len(dati) > 0):
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
if(len(dati) == 2):
|
||||||
|
u = ""
|
||||||
|
t = ""
|
||||||
|
rawdata = dati[0]
|
||||||
|
elabdata = dati[1]
|
||||||
|
if(len(rawdata) > 0):
|
||||||
|
for r in rawdata:
|
||||||
|
#print(r)
|
||||||
|
#print(len(r))
|
||||||
|
if(len(r) == 6):#nodo1
|
||||||
|
unitname = r[0]
|
||||||
|
toolname = r[1]
|
||||||
|
nodenum = r[2]
|
||||||
|
pressure = Decimal(r[3])*100
|
||||||
|
date = r[4]
|
||||||
|
time = r[5]
|
||||||
|
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
if(result):
|
||||||
|
if(result[0][8] is None):
|
||||||
|
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
|
||||||
|
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
|
||||||
|
dateDiff = datetimeNew - datetimeOld
|
||||||
|
if(dateDiff.total_seconds() / 3600 >= 5):
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:
|
||||||
|
query = "UPDATE RAWDATACOR SET val0=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [pressure, date, time, unitname, toolname, nodenum])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
elif(result[0][8] is not None):
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, pressure, -1, -273])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:#altri 2->5
|
||||||
|
unitname = r[0]
|
||||||
|
toolname = r[1]
|
||||||
|
nodenum = r[2]
|
||||||
|
freqinhz = r[3]
|
||||||
|
therminohms = r[4]
|
||||||
|
freqindigit = r[5]
|
||||||
|
date = r[6]
|
||||||
|
time = r[7]
|
||||||
|
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
if(result):
|
||||||
|
if(result[0][8] is None):
|
||||||
|
query = "UPDATE RAWDATACOR SET val0=%s, val1=%s, val2=%s, EventDate=%s, EventTime=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND val0 is NULL ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [freqinhz, therminohms, freqindigit, date, time, unitname, toolname, nodenum])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
elif(result[0][8] is not None):
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, val0, val1, val2, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, freqinhz, therminohms, freqindigit, -1, -273])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
if(len(elabdata) > 0):
|
||||||
|
for e in elabdata:
|
||||||
|
#print(e)
|
||||||
|
#print(len(e))
|
||||||
|
if(len(e) == 6):#nodo1
|
||||||
|
unitname = e[0]
|
||||||
|
toolname = e[1]
|
||||||
|
nodenum = e[2]
|
||||||
|
pressure = Decimal(e[3])*100
|
||||||
|
date = e[4]
|
||||||
|
time = e[5]
|
||||||
|
try:
|
||||||
|
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, pressure])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:#altri 2->5
|
||||||
|
unitname = e[0]
|
||||||
|
toolname = e[1]
|
||||||
|
u = unitname
|
||||||
|
t = toolname
|
||||||
|
nodenum = e[2]
|
||||||
|
pch = e[3]
|
||||||
|
tch = e[4]
|
||||||
|
date = e[5]
|
||||||
|
time = e[6]
|
||||||
|
try:
|
||||||
|
query = "INSERT INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift, T_node) VALUES(%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, pch, tch])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
#os.system("cd /usr/local/matlab_func/; ./run_ATD_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+u+" "+t+"")
|
||||||
|
else:
|
||||||
|
for r in dati:
|
||||||
|
#print(r)
|
||||||
|
unitname = r[0]
|
||||||
|
toolname = r[1]
|
||||||
|
nodenum = r[2]
|
||||||
|
date = r[3]
|
||||||
|
time = r[4]
|
||||||
|
battery = r[5]
|
||||||
|
temperature = r[6]
|
||||||
|
query = "SELECT * from RAWDATACOR WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
if(result):
|
||||||
|
if(result[0][25] is None or result[0][25] == -1.00):
|
||||||
|
datetimeOld = datetime.strptime(str(result[0][4]) + " " + str(result[0][5]), "%Y-%m-%d %H:%M:%S")
|
||||||
|
datetimeNew = datetime.strptime(str(date) + " " + str(time), "%Y-%m-%d %H:%M:%S")
|
||||||
|
dateDiff = datetimeNew - datetimeOld
|
||||||
|
#print(dateDiff.total_seconds() / 3600)
|
||||||
|
if(dateDiff.total_seconds() / 3600 >= 5):
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:
|
||||||
|
query = "UPDATE RAWDATACOR SET BatLevelModule=%s, TemperatureModule=%s WHERE UnitName=%s AND ToolNameID=%s AND NodeNum=%s AND (BatLevelModule is NULL or BatLevelModule = -1.00) ORDER BY EventDate desc,EventTime desc limit 1"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [battery, temperature, unitname, toolname, nodenum])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
elif(result[0][25] is not None and result[0][25] != -1.00):
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
else:
|
||||||
|
query = "INSERT INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, BatLevelModule, TemperatureModule) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
try:
|
||||||
|
cursor.execute(query, [unitname, toolname, nodenum, date, time, -1, -273, battery, temperature])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
serial_number = data[0].split(",")[1]
|
||||||
|
data = data[10:] #rimuove righe header
|
||||||
|
dati = []
|
||||||
|
rawDatiReadings = []#tmp
|
||||||
|
elabDatiReadings = []#tmp
|
||||||
|
datiReadings = []
|
||||||
|
i = 0
|
||||||
|
unit = ""
|
||||||
|
tool = ""
|
||||||
|
#row = data[0]#quando non c'era il for solo 1 riga
|
||||||
|
for row in data:#se ci sono righe multiple
|
||||||
|
row = row.split(",")
|
||||||
|
if i == 0:
|
||||||
|
query = "SELECT unit_name, tool_name FROM sisgeo_tools WHERE serial_number='"+serial_number+"'"
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(query)
|
||||||
|
result = cursor.fetchall()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
unit = result[0][0]
|
||||||
|
tool = result[0][1]
|
||||||
|
#print(result[0][0])
|
||||||
|
#print(result[0][1])
|
||||||
|
if("health" in pathFile):
|
||||||
|
datetime = str(row[0]).replace("\"", "").split(" ")
|
||||||
|
date = datetime[0]
|
||||||
|
time = datetime[1]
|
||||||
|
battery = row[1]
|
||||||
|
temperature = row[2]
|
||||||
|
dati.append((unit, tool, 1, date, time, battery, temperature))
|
||||||
|
dati.append((unit, tool, 2, date, time, battery, temperature))
|
||||||
|
dati.append((unit, tool, 3, date, time, battery, temperature))
|
||||||
|
dati.append((unit, tool, 4, date, time, battery, temperature))
|
||||||
|
dati.append((unit, tool, 5, date, time, battery, temperature))
|
||||||
|
else:
|
||||||
|
datetime = str(row[0]).replace("\"", "").split(" ")
|
||||||
|
date = datetime[0]
|
||||||
|
time = datetime[1]
|
||||||
|
atmpressure = row[1]#nodo1
|
||||||
|
#raw
|
||||||
|
freqinhzch1 = row[2]#nodo2
|
||||||
|
freqindigitch1 = row[3]#nodo2
|
||||||
|
thermResInOhmsch1 = row[4]#nodo2
|
||||||
|
freqinhzch2 = row[5]#nodo3
|
||||||
|
freqindigitch2 = row[6]#nodo3
|
||||||
|
thermResInOhmsch2 = row[7]#nodo3
|
||||||
|
freqinhzch3 = row[8]#nodo4
|
||||||
|
freqindigitch3 = row[9]#nodo4
|
||||||
|
thermResInOhmsch3 = row[10]#nodo4
|
||||||
|
freqinhzch4 = row[11]#nodo5
|
||||||
|
freqindigitch4 = row[12]#nodo5
|
||||||
|
thermResInOhmsch4 = row[13]#nodo5
|
||||||
|
#elab
|
||||||
|
pch1 = row[18]#nodo2
|
||||||
|
tch1 = row[19]#nodo2
|
||||||
|
pch2 = row[20]#nodo3
|
||||||
|
tch2 = row[21]#nodo3
|
||||||
|
pch3 = row[22]#nodo4
|
||||||
|
tch3 = row[23]#nodo4
|
||||||
|
pch4 = row[24]#nodo5
|
||||||
|
tch4 = row[25]#nodo5
|
||||||
|
|
||||||
|
rawDatiReadings.append((unit, tool, 1, atmpressure, date, time))
|
||||||
|
rawDatiReadings.append((unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time))
|
||||||
|
rawDatiReadings.append((unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time))
|
||||||
|
rawDatiReadings.append((unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time))
|
||||||
|
rawDatiReadings.append((unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time))
|
||||||
|
|
||||||
|
elabDatiReadings.append((unit, tool, 1, atmpressure, date, time))
|
||||||
|
elabDatiReadings.append((unit, tool, 2, pch1, tch1, date, time))
|
||||||
|
elabDatiReadings.append((unit, tool, 3, pch2, tch2, date, time))
|
||||||
|
elabDatiReadings.append((unit, tool, 4, pch3, tch3, date, time))
|
||||||
|
elabDatiReadings.append((unit, tool, 5, pch4, tch4, date, time))
|
||||||
|
|
||||||
|
#[ram],[elab]#quando c'era solo 1 riga
|
||||||
|
#dati = [
|
||||||
|
# [
|
||||||
|
# (unit, tool, 1, atmpressure, date, time),
|
||||||
|
# (unit, tool, 2, freqinhzch1, thermResInOhmsch1, freqindigitch1, date, time),
|
||||||
|
# (unit, tool, 3, freqinhzch2, thermResInOhmsch2, freqindigitch2, date, time),
|
||||||
|
# (unit, tool, 4, freqinhzch3, thermResInOhmsch3, freqindigitch3, date, time),
|
||||||
|
# (unit, tool, 5, freqinhzch4, thermResInOhmsch4, freqindigitch4, date, time),
|
||||||
|
# ], [
|
||||||
|
# (unit, tool, 1, atmpressure, date, time),
|
||||||
|
# (unit, tool, 2, pch1, tch1, date, time),
|
||||||
|
# (unit, tool, 3, pch2, tch2, date, time),
|
||||||
|
# (unit, tool, 4, pch3, tch3, date, time),
|
||||||
|
# (unit, tool, 5, pch4, tch4, date, time),
|
||||||
|
# ]
|
||||||
|
# ]
|
||||||
|
i+=1
|
||||||
|
#print(dati)
|
||||||
|
if(len(rawDatiReadings) > 0 or len(elabDatiReadings) > 0):
|
||||||
|
datiReadings = [rawDatiReadings, elabDatiReadings]
|
||||||
|
if(len(datiReadings) > 0):
|
||||||
|
return datiReadings
|
||||||
|
return dati
|
||||||
|
|
||||||
|
def main():
|
||||||
|
insertData(getDataFromCsv(sys.argv[1]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
306
sorotecPini.py
Executable file
306
sorotecPini.py
Executable file
@@ -0,0 +1,306 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from datetime import datetime
|
||||||
|
import math
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
def removeDuplicates(lst):
|
||||||
|
return list(set([i for i in lst]))
|
||||||
|
|
||||||
|
def getDataFromCsvAndInsert(pathFile):
|
||||||
|
try:
|
||||||
|
print(pathFile)
|
||||||
|
folder_name = pathFile.split("/")[-2]#cartella
|
||||||
|
with open(pathFile, 'r') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
if(len(data) > 0 and data is not None):
|
||||||
|
if(folder_name == "ID0247"):
|
||||||
|
unit_name = "ID0247"
|
||||||
|
tool_name = "DT0001"
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0)
|
||||||
|
data.pop(0)
|
||||||
|
data.pop(0)
|
||||||
|
data = [element for element in data if element != ""]
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
queryElab = "insert ignore into ELABDATADISP(UnitName,ToolNameID,NodeNum,EventDate,EventTime,load_value) values (%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryRaw = "insert ignore into RAWDATACOR(UnitName,ToolNameID,NodeNum,EventDate,EventTime,BatLevel,Temperature,Val0) values (%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
if("_1_" in pathFile):
|
||||||
|
print("File tipo 1.\n")
|
||||||
|
#print(unit_name, tool_name)
|
||||||
|
dataToInsertElab = []
|
||||||
|
dataToInsertRaw = []
|
||||||
|
for row in data:
|
||||||
|
rowSplitted = row.replace("\"","").split(";")
|
||||||
|
eventTimestamp = rowSplitted[0].split(" ")
|
||||||
|
date = eventTimestamp[0].split("-")
|
||||||
|
date = date[2]+"-"+date[1]+"-"+date[0]
|
||||||
|
time = eventTimestamp[1]
|
||||||
|
an3 = rowSplitted[1]
|
||||||
|
an4 = rowSplitted[2]#V unit battery
|
||||||
|
OUTREG2 = rowSplitted[3]
|
||||||
|
E8_181_CH2 = rowSplitted[4]#2
|
||||||
|
E8_181_CH3 = rowSplitted[5]#3
|
||||||
|
E8_181_CH4 = rowSplitted[6]#4
|
||||||
|
E8_181_CH5 = rowSplitted[7]#5
|
||||||
|
E8_181_CH6 = rowSplitted[8]#6
|
||||||
|
E8_181_CH7 = rowSplitted[9]#7
|
||||||
|
E8_181_CH8 = rowSplitted[10]#8
|
||||||
|
E8_182_CH1 = rowSplitted[11]#9
|
||||||
|
E8_182_CH2 = rowSplitted[12]#10
|
||||||
|
E8_182_CH3 = rowSplitted[13]#11
|
||||||
|
E8_182_CH4 = rowSplitted[14]#12
|
||||||
|
E8_182_CH5 = rowSplitted[15]#13
|
||||||
|
E8_182_CH6 = rowSplitted[16]#14
|
||||||
|
E8_182_CH7 = rowSplitted[17]#15
|
||||||
|
E8_182_CH8 = rowSplitted[18]#16
|
||||||
|
E8_183_CH1 = rowSplitted[19]#17
|
||||||
|
E8_183_CH2 = rowSplitted[20]#18
|
||||||
|
E8_183_CH3 = rowSplitted[21]#19
|
||||||
|
E8_183_CH4 = rowSplitted[22]#20
|
||||||
|
E8_183_CH5 = rowSplitted[23]#21
|
||||||
|
E8_183_CH6 = rowSplitted[24]#22
|
||||||
|
E8_183_CH7 = rowSplitted[25]#23
|
||||||
|
E8_183_CH8 = rowSplitted[26]#24
|
||||||
|
E8_184_CH1 = rowSplitted[27]#25
|
||||||
|
E8_184_CH2 = rowSplitted[28]#26
|
||||||
|
E8_184_CH3 = rowSplitted[29]#27 mv/V
|
||||||
|
E8_184_CH4 = rowSplitted[30]#28 mv/V
|
||||||
|
E8_184_CH5 = rowSplitted[31]#29 mv/V
|
||||||
|
E8_184_CH6 = rowSplitted[32]#30 mv/V
|
||||||
|
E8_184_CH7 = rowSplitted[33]#31 mv/V
|
||||||
|
E8_184_CH8 = rowSplitted[34]#32 mv/V
|
||||||
|
E8_181_CH1 = rowSplitted[35]#1
|
||||||
|
an1 = rowSplitted[36]
|
||||||
|
an2 = rowSplitted[37]
|
||||||
|
#print(unit_name, tool_name, 1, E8_181_CH1)
|
||||||
|
#print(unit_name, tool_name, 2, E8_181_CH2)
|
||||||
|
#print(unit_name, tool_name, 3, E8_181_CH3)
|
||||||
|
#print(unit_name, tool_name, 4, E8_181_CH4)
|
||||||
|
#print(unit_name, tool_name, 5, E8_181_CH5)
|
||||||
|
#print(unit_name, tool_name, 6, E8_181_CH6)
|
||||||
|
#print(unit_name, tool_name, 7, E8_181_CH7)
|
||||||
|
#print(unit_name, tool_name, 8, E8_181_CH8)
|
||||||
|
#print(unit_name, tool_name, 9, E8_182_CH1)
|
||||||
|
#print(unit_name, tool_name, 10, E8_182_CH2)
|
||||||
|
#print(unit_name, tool_name, 11, E8_182_CH3)
|
||||||
|
#print(unit_name, tool_name, 12, E8_182_CH4)
|
||||||
|
#print(unit_name, tool_name, 13, E8_182_CH5)
|
||||||
|
#print(unit_name, tool_name, 14, E8_182_CH6)
|
||||||
|
#print(unit_name, tool_name, 15, E8_182_CH7)
|
||||||
|
#print(unit_name, tool_name, 16, E8_182_CH8)
|
||||||
|
#print(unit_name, tool_name, 17, E8_183_CH1)
|
||||||
|
#print(unit_name, tool_name, 18, E8_183_CH2)
|
||||||
|
#print(unit_name, tool_name, 19, E8_183_CH3)
|
||||||
|
#print(unit_name, tool_name, 20, E8_183_CH4)
|
||||||
|
#print(unit_name, tool_name, 21, E8_183_CH5)
|
||||||
|
#print(unit_name, tool_name, 22, E8_183_CH6)
|
||||||
|
#print(unit_name, tool_name, 23, E8_183_CH7)
|
||||||
|
#print(unit_name, tool_name, 24, E8_183_CH8)
|
||||||
|
#print(unit_name, tool_name, 25, E8_184_CH1)
|
||||||
|
#print(unit_name, tool_name, 26, E8_184_CH2)
|
||||||
|
#print(unit_name, tool_name, 27, E8_184_CH3)
|
||||||
|
#print(unit_name, tool_name, 28, E8_184_CH4)
|
||||||
|
#print(unit_name, tool_name, 29, E8_184_CH5)
|
||||||
|
#print(unit_name, tool_name, 30, E8_184_CH6)
|
||||||
|
#print(unit_name, tool_name, 31, E8_184_CH7)
|
||||||
|
#print(unit_name, tool_name, 32, E8_184_CH8)
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 1, date, time, an4, -273, E8_181_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 2, date, time, an4, -273, E8_181_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 3, date, time, an4, -273, E8_181_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 4, date, time, an4, -273, E8_181_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 5, date, time, an4, -273, E8_181_CH5))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 6, date, time, an4, -273, E8_181_CH6))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 7, date, time, an4, -273, E8_181_CH7))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 8, date, time, an4, -273, E8_181_CH8))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 9, date, time, an4, -273, E8_182_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 10, date, time, an4, -273, E8_182_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 11, date, time, an4, -273, E8_182_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 12, date, time, an4, -273, E8_182_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 13, date, time, an4, -273, E8_182_CH5))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 14, date, time, an4, -273, E8_182_CH6))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 15, date, time, an4, -273, E8_182_CH7))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 16, date, time, an4, -273, E8_182_CH8))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 17, date, time, an4, -273, E8_183_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 18, date, time, an4, -273, E8_183_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 19, date, time, an4, -273, E8_183_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 20, date, time, an4, -273, E8_183_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 21, date, time, an4, -273, E8_183_CH5))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 22, date, time, an4, -273, E8_183_CH6))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 23, date, time, an4, -273, E8_183_CH7))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 24, date, time, an4, -273, E8_183_CH8))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 25, date, time, an4, -273, E8_184_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 26, date, time, an4, -273, E8_184_CH2))
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 1, date, time, E8_181_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 2, date, time, E8_181_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 3, date, time, E8_181_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 4, date, time, E8_181_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 5, date, time, E8_181_CH5))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 6, date, time, E8_181_CH6))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 7, date, time, E8_181_CH7))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 8, date, time, E8_181_CH8))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 9, date, time, E8_182_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 10, date, time, E8_182_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 11, date, time, E8_182_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 12, date, time, E8_182_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 13, date, time, E8_182_CH5))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 14, date, time, E8_182_CH6))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 15, date, time, E8_182_CH7))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 16, date, time, E8_182_CH8))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 17, date, time, E8_183_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 18, date, time, E8_183_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 19, date, time, E8_183_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 20, date, time, E8_183_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 21, date, time, E8_183_CH5))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 22, date, time, E8_183_CH6))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 23, date, time, E8_183_CH7))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 24, date, time, E8_183_CH8))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 25, date, time, E8_184_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 26, date, time, E8_184_CH2))
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
cursor.executemany(queryElab, dataToInsertElab)
|
||||||
|
cursor.executemany(queryRaw, dataToInsertRaw)
|
||||||
|
conn.commit()
|
||||||
|
#print(dataToInsertElab)
|
||||||
|
#print(dataToInsertRaw)
|
||||||
|
elif("_2_" in pathFile):
|
||||||
|
print("File tipo 2.\n")
|
||||||
|
#print(unit_name, tool_name)
|
||||||
|
dataToInsertElab = []
|
||||||
|
dataToInsertRaw = []
|
||||||
|
for row in data:
|
||||||
|
rowSplitted = row.replace("\"","").split(";")
|
||||||
|
eventTimestamp = rowSplitted[0].split(" ")
|
||||||
|
date = eventTimestamp[0].split("-")
|
||||||
|
date = date[2]+"-"+date[1]+"-"+date[0]
|
||||||
|
time = eventTimestamp[1]
|
||||||
|
an2 = rowSplitted[1]
|
||||||
|
an3 = rowSplitted[2]
|
||||||
|
an1 = rowSplitted[3]
|
||||||
|
OUTREG2 = rowSplitted[4]
|
||||||
|
E8_181_CH1 = rowSplitted[5]#33 mv/V
|
||||||
|
E8_181_CH2 = rowSplitted[6]#34 mv/V
|
||||||
|
E8_181_CH3 = rowSplitted[7]#35 mv/V
|
||||||
|
E8_181_CH4 = rowSplitted[8]#36 mv/V
|
||||||
|
E8_181_CH5 = rowSplitted[9]#37 mv/V
|
||||||
|
E8_181_CH6 = rowSplitted[10]#38 mv/V
|
||||||
|
E8_181_CH7 = rowSplitted[11]#39 mv/V
|
||||||
|
E8_181_CH8 = rowSplitted[12]#40 mv/V
|
||||||
|
E8_182_CH1 = rowSplitted[13]#41
|
||||||
|
E8_182_CH2 = rowSplitted[14]#42
|
||||||
|
E8_182_CH3 = rowSplitted[15]#43
|
||||||
|
E8_182_CH4 = rowSplitted[16]#44
|
||||||
|
E8_182_CH5 = rowSplitted[17]#45 mv/V
|
||||||
|
E8_182_CH6 = rowSplitted[18]#46 mv/V
|
||||||
|
E8_182_CH7 = rowSplitted[19]#47 mv/V
|
||||||
|
E8_182_CH8 = rowSplitted[20]#48 mv/V
|
||||||
|
E8_183_CH1 = rowSplitted[21]#49
|
||||||
|
E8_183_CH2 = rowSplitted[22]#50
|
||||||
|
E8_183_CH3 = rowSplitted[23]#51
|
||||||
|
E8_183_CH4 = rowSplitted[24]#52
|
||||||
|
E8_183_CH5 = rowSplitted[25]#53 mv/V
|
||||||
|
E8_183_CH6 = rowSplitted[26]#54 mv/V
|
||||||
|
E8_183_CH7 = rowSplitted[27]#55 mv/V
|
||||||
|
E8_183_CH8 = rowSplitted[28]#56
|
||||||
|
E8_184_CH1 = rowSplitted[29]#57
|
||||||
|
E8_184_CH2 = rowSplitted[30]#58
|
||||||
|
E8_184_CH3 = rowSplitted[31]#59
|
||||||
|
E8_184_CH4 = rowSplitted[32]#60
|
||||||
|
E8_184_CH5 = rowSplitted[33]#61
|
||||||
|
E8_184_CH6 = rowSplitted[34]#62
|
||||||
|
E8_184_CH7 = rowSplitted[35]#63 mv/V
|
||||||
|
E8_184_CH8 = rowSplitted[36]#64 mv/V
|
||||||
|
an4 = rowSplitted[37]#V unit battery
|
||||||
|
#print(unit_name, tool_name, 33, E8_181_CH1)
|
||||||
|
#print(unit_name, tool_name, 34, E8_181_CH2)
|
||||||
|
#print(unit_name, tool_name, 35, E8_181_CH3)
|
||||||
|
#print(unit_name, tool_name, 36, E8_181_CH4)
|
||||||
|
#print(unit_name, tool_name, 37, E8_181_CH5)
|
||||||
|
#print(unit_name, tool_name, 38, E8_181_CH6)
|
||||||
|
#print(unit_name, tool_name, 39, E8_181_CH7)
|
||||||
|
#print(unit_name, tool_name, 40, E8_181_CH8)
|
||||||
|
#print(unit_name, tool_name, 41, E8_182_CH1)
|
||||||
|
#print(unit_name, tool_name, 42, E8_182_CH2)
|
||||||
|
#print(unit_name, tool_name, 43, E8_182_CH3)
|
||||||
|
#print(unit_name, tool_name, 44, E8_182_CH4)
|
||||||
|
#print(unit_name, tool_name, 45, E8_182_CH5)
|
||||||
|
#print(unit_name, tool_name, 46, E8_182_CH6)
|
||||||
|
#print(unit_name, tool_name, 47, E8_182_CH7)
|
||||||
|
#print(unit_name, tool_name, 48, E8_182_CH8)
|
||||||
|
#print(unit_name, tool_name, 49, E8_183_CH1)
|
||||||
|
#print(unit_name, tool_name, 50, E8_183_CH2)
|
||||||
|
#print(unit_name, tool_name, 51, E8_183_CH3)
|
||||||
|
#print(unit_name, tool_name, 52, E8_183_CH4)
|
||||||
|
#print(unit_name, tool_name, 53, E8_183_CH5)
|
||||||
|
#print(unit_name, tool_name, 54, E8_183_CH6)
|
||||||
|
#print(unit_name, tool_name, 55, E8_183_CH7)
|
||||||
|
#print(unit_name, tool_name, 56, E8_183_CH8)
|
||||||
|
#print(unit_name, tool_name, 57, E8_184_CH1)
|
||||||
|
#print(unit_name, tool_name, 58, E8_184_CH2)
|
||||||
|
#print(unit_name, tool_name, 59, E8_184_CH3)
|
||||||
|
#print(unit_name, tool_name, 60, E8_184_CH4)
|
||||||
|
#print(unit_name, tool_name, 61, E8_184_CH5)
|
||||||
|
#print(unit_name, tool_name, 62, E8_184_CH6)
|
||||||
|
#print(unit_name, tool_name, 63, E8_184_CH7)
|
||||||
|
#print(unit_name, tool_name, 64, E8_184_CH8)
|
||||||
|
#print(rowSplitted)
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 41, date, time, an4, -273, E8_182_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 42, date, time, an4, -273, E8_182_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 43, date, time, an4, -273, E8_182_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 44, date, time, an4, -273, E8_182_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 49, date, time, an4, -273, E8_183_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 50, date, time, an4, -273, E8_183_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 51, date, time, an4, -273, E8_183_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 52, date, time, an4, -273, E8_183_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 56, date, time, an4, -273, E8_183_CH8))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 57, date, time, an4, -273, E8_184_CH1))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 58, date, time, an4, -273, E8_184_CH2))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 59, date, time, an4, -273, E8_184_CH3))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 60, date, time, an4, -273, E8_184_CH4))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 61, date, time, an4, -273, E8_184_CH5))
|
||||||
|
dataToInsertRaw.append((unit_name, tool_name, 62, date, time, an4, -273, E8_184_CH6))
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 41, date, time, E8_182_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 42, date, time, E8_182_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 43, date, time, E8_182_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 44, date, time, E8_182_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 49, date, time, E8_183_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 50, date, time, E8_183_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 51, date, time, E8_183_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 52, date, time, E8_183_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 56, date, time, E8_183_CH8))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 57, date, time, E8_184_CH1))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 58, date, time, E8_184_CH2))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 59, date, time, E8_184_CH3))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 60, date, time, E8_184_CH4))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 61, date, time, E8_184_CH5))
|
||||||
|
dataToInsertElab.append((unit_name, tool_name, 62, date, time, E8_184_CH6))
|
||||||
|
#---------------------------------------------------------------------------------------
|
||||||
|
cursor.executemany(queryElab, dataToInsertElab)
|
||||||
|
cursor.executemany(queryRaw, dataToInsertRaw)
|
||||||
|
conn.commit()
|
||||||
|
#print(dataToInsertElab)
|
||||||
|
#print(dataToInsertRaw)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
finally:
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An unexpected error occurred: {str(e)}\n")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
getDataFromCsvAndInsert(sys.argv[1])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
172
vulinkScript.py
Executable file
172
vulinkScript.py
Executable file
@@ -0,0 +1,172 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
|
||||||
|
def checkBatteryLevel(db_conn, db_cursor, unit, date_time, battery_perc):
|
||||||
|
print(date_time, battery_perc)
|
||||||
|
if(float(battery_perc) < 25):#sotto il 25%
|
||||||
|
query = "select unit_name, date_time from alarms where unit_name=%s and date_time < %s and type_id=2 order by date_time desc limit 1"
|
||||||
|
db_cursor.execute(query, [unit, date_time])
|
||||||
|
result = db_cursor.fetchall()
|
||||||
|
if(len(result) > 0):
|
||||||
|
alarm_date_time = result[0]["date_time"]#datetime not str
|
||||||
|
format1 = "%Y-%m-%d %H:%M"
|
||||||
|
dt1 = datetime.strptime(date_time, format1)
|
||||||
|
time_difference = abs(dt1 - alarm_date_time)
|
||||||
|
if time_difference.total_seconds() > 24 * 60 * 60:
|
||||||
|
print("The difference is above 24 hours. Creo allarme battery")
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
|
||||||
|
db_conn.commit()
|
||||||
|
else:
|
||||||
|
print("Creo allarme battery")
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, unit_name, date_time, battery_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [2, unit, date_time, battery_perc, "75%", 1, 0])
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
|
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
|
||||||
|
soglie = json.loads(soglie_str)
|
||||||
|
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
|
||||||
|
ph = soglia["data"]["ph"]
|
||||||
|
ph_uno = soglia["data"]["ph_uno"]
|
||||||
|
ph_due = soglia["data"]["ph_due"]
|
||||||
|
ph_tre = soglia["data"]["ph_tre"]
|
||||||
|
ph_uno_value = soglia["data"]["ph_uno_value"]
|
||||||
|
ph_due_value = soglia["data"]["ph_due_value"]
|
||||||
|
ph_tre_value = soglia["data"]["ph_tre_value"]
|
||||||
|
ph_uno_sms = soglia["data"]["ph_uno_sms"]
|
||||||
|
ph_due_sms = soglia["data"]["ph_due_sms"]
|
||||||
|
ph_tre_sms = soglia["data"]["ph_tre_sms"]
|
||||||
|
ph_uno_email = soglia["data"]["ph_uno_email"]
|
||||||
|
ph_due_email = soglia["data"]["ph_due_email"]
|
||||||
|
ph_tre_email = soglia["data"]["ph_tre_email"]
|
||||||
|
alert_uno = 0
|
||||||
|
alert_due = 0
|
||||||
|
alert_tre = 0
|
||||||
|
ph_value_prev = 0
|
||||||
|
#print(unit, tool, node_num, date_time)
|
||||||
|
query = "select XShift, EventDate, EventTime from ELABDATADISP where UnitName=%s and ToolNameID=%s and NodeNum=%s and concat(EventDate, ' ', EventTime) < %s order by concat(EventDate, ' ', EventTime) desc limit 1"
|
||||||
|
db_cursor.execute(query, [unit, tool, node_num, date_time])
|
||||||
|
resultPhPrev = db_cursor.fetchall()
|
||||||
|
if(len(resultPhPrev) > 0):
|
||||||
|
ph_value_prev = float(resultPhPrev[0]["XShift"])
|
||||||
|
#ph_value = random.uniform(7, 10)
|
||||||
|
print(tool, unit, node_num, date_time, ph_value)
|
||||||
|
#print(ph_value_prev, ph_value)
|
||||||
|
if(ph == 1):
|
||||||
|
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
|
||||||
|
if(ph_value_prev <= float(ph_tre_value)):
|
||||||
|
alert_tre = 1
|
||||||
|
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
|
||||||
|
if(ph_value_prev <= float(ph_due_value)):
|
||||||
|
alert_due = 1
|
||||||
|
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
|
||||||
|
if(ph_value_prev <= float(ph_uno_value)):
|
||||||
|
alert_uno = 1
|
||||||
|
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
|
||||||
|
if(alert_tre == 1):
|
||||||
|
print("level3",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_due == 1):
|
||||||
|
print("level2",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_uno == 1):
|
||||||
|
print("level1",tool, unit, node_num, date_time, ph_value)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
folder_path, file_with_extension = os.path.split(pathFile)
|
||||||
|
file_name, _ = os.path.splitext(file_with_extension)#toolname
|
||||||
|
serial_number = file_name.split("_")[0]
|
||||||
|
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
|
||||||
|
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
|
||||||
|
query_nodes = "SELECT t.soglie, n.num as node_num, n.nodetype_id FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s"
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
cursor.execute(query, [serial_number])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
unit = result[0]["unit_name"]
|
||||||
|
tool = result[0]["tool_name"]
|
||||||
|
cursor.execute(query_node_depth, [unit, tool])
|
||||||
|
resultNode = cursor.fetchall()
|
||||||
|
cursor.execute(query_nodes, [unit, tool])
|
||||||
|
resultAllNodes = cursor.fetchall()
|
||||||
|
#print(resultAllNodes)
|
||||||
|
node_num_piezo = next((item for item in resultAllNodes if item.get('nodetype_id') == 2), None)["node_num"]
|
||||||
|
node_num_baro = next((item for item in resultAllNodes if item.get('nodetype_id') == 3), None)["node_num"]
|
||||||
|
node_num_conductivity = next((item for item in resultAllNodes if item.get('nodetype_id') == 94), None)["node_num"]
|
||||||
|
node_num_ph = next((item for item in resultAllNodes if item.get('nodetype_id') == 97), None)["node_num"]
|
||||||
|
#print(node_num_piezo, node_num_baro, node_num_conductivity, node_num_ph)
|
||||||
|
# 2 piezo
|
||||||
|
# 3 baro
|
||||||
|
# 94 conductivity
|
||||||
|
# 97 ph
|
||||||
|
node_depth = float(resultNode[0]["depth"]) #node piezo depth
|
||||||
|
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
|
||||||
|
date_time = date_time.split(" ")
|
||||||
|
date = date_time[0]
|
||||||
|
time = date_time[1]
|
||||||
|
temperature_unit = float(row[2])
|
||||||
|
battery_perc = float(row[3])
|
||||||
|
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
|
||||||
|
conductivity = float(row[6])
|
||||||
|
ph = float(row[11])
|
||||||
|
temperature_piezo = float(row[14])
|
||||||
|
pressure = float(row[16])*1000
|
||||||
|
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_baro, date, time, battery_perc, temperature_unit, pressure_baro])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_baro, date, time, pressure_baro])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_conductivity, date, time, battery_perc, temperature_unit, conductivity])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_conductivity, date, time, conductivity])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_ph, date, time, battery_perc, temperature_unit, ph])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_ph, date, time, ph])
|
||||||
|
conn.commit()
|
||||||
|
checkSogliePh(conn, cursor, unit, tool, node_num_ph, date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, node_num_piezo, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, node_num_piezo, date, time, temperature_piezo, depth, pressure])
|
||||||
|
conn.commit()
|
||||||
|
checkBatteryLevel(conn, cursor, unit, date_time[0]+" "+date_time[1], battery_perc)
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
def main():
|
||||||
|
getDataFromCsv(sys.argv[1])
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
125
vulinkScript_new_old.py
Executable file
125
vulinkScript_new_old.py
Executable file
@@ -0,0 +1,125 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
|
||||||
|
def checkSogliePh(db_conn, db_cursor, unit, tool, node_num, date_time, ph_value, soglie_str):
|
||||||
|
soglie = json.loads(soglie_str)
|
||||||
|
soglia = next((item for item in soglie if item.get("type") == "PH Link"), None)
|
||||||
|
ph = soglia["data"]["ph"]
|
||||||
|
ph_uno = soglia["data"]["ph_uno"]
|
||||||
|
ph_due = soglia["data"]["ph_due"]
|
||||||
|
ph_tre = soglia["data"]["ph_tre"]
|
||||||
|
ph_uno_value = soglia["data"]["ph_uno_value"]
|
||||||
|
ph_due_value = soglia["data"]["ph_due_value"]
|
||||||
|
ph_tre_value = soglia["data"]["ph_tre_value"]
|
||||||
|
ph_uno_sms = soglia["data"]["ph_uno_sms"]
|
||||||
|
ph_due_sms = soglia["data"]["ph_due_sms"]
|
||||||
|
ph_tre_sms = soglia["data"]["ph_tre_sms"]
|
||||||
|
ph_uno_email = soglia["data"]["ph_uno_email"]
|
||||||
|
ph_due_email = soglia["data"]["ph_due_email"]
|
||||||
|
ph_tre_email = soglia["data"]["ph_tre_email"]
|
||||||
|
alert_uno = 0
|
||||||
|
alert_due = 0
|
||||||
|
alert_tre = 0
|
||||||
|
#ph_value = random.uniform(8, 10)
|
||||||
|
#print(ph_value)
|
||||||
|
if(ph == 1):
|
||||||
|
if(ph_tre == 1 and ph_tre_value != '' and float(ph_value) > float(ph_tre_value)):
|
||||||
|
alert_tre = 1
|
||||||
|
if(ph_due == 1 and ph_due_value != '' and float(ph_value) > float(ph_due_value)):
|
||||||
|
alert_due = 1
|
||||||
|
if(ph_uno == 1 and ph_uno_value != '' and float(ph_value) > float(ph_uno_value)):
|
||||||
|
alert_uno = 1
|
||||||
|
#print(ph_value, ph, " livelli:", ph_uno, ph_due, ph_tre, " value:", ph_uno_value, ph_due_value, ph_tre_value, " sms:", ph_uno_sms, ph_due_sms, ph_tre_sms, " email:", ph_uno_email, ph_due_email, ph_tre_email)
|
||||||
|
if(alert_tre == 1):
|
||||||
|
print("level3",tool, unit, node_num, date_time)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 3, "pH", ph_tre_email, ph_tre_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_due == 1):
|
||||||
|
print("level2",tool, unit, node_num, date_time)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 2, "pH", ph_due_email, ph_due_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
elif(alert_uno == 1):
|
||||||
|
print("level1",tool, unit, node_num, date_time)
|
||||||
|
queryInsAlarm = "INSERT IGNORE INTO alarms(type_id, tool_name, unit_name, date_time, registered_value, node_num, alarm_level, description, send_email, send_sms) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
db_cursor.execute(queryInsAlarm, [3, tool, unit, date_time, ph_value, node_num, 1, "pH", ph_uno_email, ph_uno_sms])
|
||||||
|
db_conn.commit()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
folder_path, file_with_extension = os.path.split(pathFile)
|
||||||
|
file_name, _ = os.path.splitext(file_with_extension)#toolname
|
||||||
|
serial_number = file_name.split("_")[0]
|
||||||
|
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
|
||||||
|
query_node_depth = "SELECT depth, t.soglie, n.num as node_num FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor(dictionary=True)
|
||||||
|
cursor.execute(query, [serial_number])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
unit = result[0]["unit_name"]
|
||||||
|
tool = result[0]["tool_name"]
|
||||||
|
cursor.execute(query_node_depth, [unit, tool])
|
||||||
|
resultNode = cursor.fetchall()
|
||||||
|
node_depth = float(resultNode[0]["depth"]) #node piezo depth
|
||||||
|
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
|
||||||
|
date_time = date_time.split(" ")
|
||||||
|
date = date_time[0]
|
||||||
|
time = date_time[1]
|
||||||
|
temperature_unit = float(row[2])
|
||||||
|
battery_perc = float(row[3])
|
||||||
|
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
|
||||||
|
conductivity = float(row[6])
|
||||||
|
ph = float(row[11])
|
||||||
|
temperature_piezo = float(row[14])
|
||||||
|
pressure = float(row[16])*1000
|
||||||
|
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 1, date, time, battery_perc, temperature_unit, pressure_baro])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 1, date, time, pressure_baro])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 2, date, time, battery_perc, temperature_unit, conductivity])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 2, date, time, conductivity])
|
||||||
|
conn.commit()
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 3, date, time, battery_perc, temperature_unit, ph])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 3, date, time, ph])
|
||||||
|
conn.commit()
|
||||||
|
checkSogliePh(conn, cursor, unit, tool, resultNode[0]["node_num"], date_time[0]+" "+date_time[1], ph, resultNode[0]["soglie"])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 4, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 4, date, time, temperature_piezo, depth, pressure])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
def main():
|
||||||
|
getDataFromCsv(sys.argv[1])
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
96
vulinkScript_old.py
Executable file
96
vulinkScript_old.py
Executable file
@@ -0,0 +1,96 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from mysql.connector import MySQLConnection, Error
|
||||||
|
from dbconfig import read_db_config
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def insertData(dati):
|
||||||
|
#print(dati)
|
||||||
|
if dati != "null":
|
||||||
|
query = "INSERT INTO ELABDATANESA(UnitName, ToolNameID, NodeNum, EventTimestamp, dataJSON) " \
|
||||||
|
"VALUES(%s,%s,%s,%s,%s)"
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executemany(query, dati)
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
os.system("cd /usr/local/matlab_func/; ./run_Tilt_lnx.sh /usr/local/MATLAB/MATLAB_Runtime/v93/ "+dati[0][0]+" "+dati[0][1]+"")
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def getDataFromCsv(pathFile):
|
||||||
|
try:
|
||||||
|
folder_path, file_with_extension = os.path.split(pathFile)
|
||||||
|
file_name, _ = os.path.splitext(file_with_extension)#toolname
|
||||||
|
serial_number = file_name.split("_")[0]
|
||||||
|
query = "SELECT unit_name, tool_name FROM vulink_tools WHERE serial_number=%s"
|
||||||
|
query_node_depth = "SELECT depth FROM ase_lar.nodes as n left join tools as t on n.tool_id=t.id left join units as u on u.id=t.unit_id where u.name=%s and t.name=%s and n.nodetype_id=2"
|
||||||
|
db_config = read_db_config()
|
||||||
|
conn = MySQLConnection(**db_config)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(query, [serial_number])
|
||||||
|
result = cursor.fetchall()
|
||||||
|
unit = result[0][0]
|
||||||
|
tool = result[0][1]
|
||||||
|
cursor.execute(query_node_depth, [unit, tool])
|
||||||
|
resultNode = cursor.fetchall()
|
||||||
|
node_depth = float(resultNode[0][0]) #node piezo depth
|
||||||
|
with open(pathFile, 'r', encoding='ISO-8859-1') as file:
|
||||||
|
data = file.readlines()
|
||||||
|
data = [row.rstrip() for row in data]
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
data.pop(0) #rimuove header
|
||||||
|
for row in data:
|
||||||
|
row = row.split(",")
|
||||||
|
date_time = datetime.strptime(row[1], '%Y/%m/%d %H:%M').strftime('%Y-%m-%d %H:%M')
|
||||||
|
date_time = date_time.split(" ")
|
||||||
|
date = date_time[0]
|
||||||
|
time = date_time[1]
|
||||||
|
temperature_unit = float(row[2])
|
||||||
|
battery_perc = float(row[3])
|
||||||
|
pressure_baro = float(row[4])*1000#(kPa) da fare *1000 per Pa in elab->pressure
|
||||||
|
conductivity = float(row[6])
|
||||||
|
ph = float(row[11])
|
||||||
|
temperature_piezo = float(row[14])
|
||||||
|
pressure = float(row[16])*1000
|
||||||
|
depth = (node_depth * -1) + float(row[17])#da sommare alla quota del nodo (quota del nodo fare *-1)
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, pressure) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 1, date, time, battery_perc, temperature_unit, pressure_baro])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 1, date, time, pressure_baro])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 2, date, time, battery_perc, temperature_unit, conductivity])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 2, date, time, conductivity])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, XShift) VALUES(%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 3, date, time, battery_perc, temperature_unit, ph])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 3, date, time, ph])
|
||||||
|
queryInsRaw = "INSERT IGNORE INTO RAWDATACOR(UnitName, ToolNameID, NodeNum, EventDate, EventTime, BatLevel, Temperature, Val0, Val1, Val2) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
queryInsElab = "INSERT IGNORE INTO ELABDATADISP(UnitName, ToolNameID, NodeNum, EventDate, EventTime, T_node, water_level, pressure) VALUES(%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
cursor.execute(queryInsRaw, [unit, tool, 4, date, time, battery_perc, temperature_unit, temperature_piezo, depth, pressure])
|
||||||
|
cursor.execute(queryInsElab, [unit, tool, 4, date, time, temperature_piezo, depth, pressure])
|
||||||
|
conn.commit()
|
||||||
|
except Error as e:
|
||||||
|
print('Error:', e)
|
||||||
|
def main():
|
||||||
|
getDataFromCsv(sys.argv[1])
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user