readjust path

This commit is contained in:
2022-01-23 16:37:30 +01:00
parent dc58a0efbb
commit 8a2d4d3ee6
3 changed files with 115 additions and 147 deletions

View File

@@ -3,7 +3,7 @@
import sys import sys
import os import os
import shutil import shutil
import ssl # import ssl
import pika import pika
import re import re
import logging import logging
@@ -42,13 +42,11 @@ def send_mail(sev, msg, cfg):
conn.sendmail(cfg.sender, cfg.receivers, msg.as_string()) conn.sendmail(cfg.sender, cfg.receivers, msg.as_string())
except SMTPAuthenticationError: except SMTPAuthenticationError:
logging.error( logging.error(
"PID {:>5} >> Mail failed: {}.".format( "Mail failed: {}.".format("SMTP authentication error")
os.getpid(), "SMTP authentication error"
)
) )
except: except:
logging.info( logging.info(
"PID {:>5} >> Mail failed: {}.".format(os.getpid(), "CUSTOM_ERROR") "Mail failed: {}.".format("CUSTOM_ERROR")
) )
finally: finally:
conn.quit() conn.quit()
@@ -82,14 +80,10 @@ class mq:
), ),
) )
logging.info( logging.info(
"PID {:>5} >> write message {} in queue".format( "Write message {} in queue".format(msg))
os.getpid(), msg)
)
except: except:
logging.error( logging.error(
"PID {:>5} >> error write message {} in queue".format( "Error write message {} in queue".format(msg))
os.getpid(), msg)
)
def close(self): def close(self):
self.channel.close() self.channel.close()
@@ -150,142 +144,120 @@ class ASEHandler(FTPHandler):
if not os.stat(file).st_size: if not os.stat(file).st_size:
os.remove(file) os.remove(file)
logging.info( logging.info(
"PID {:>5} >> file {} was empty: removed.".format( "File {} was empty: removed.".format(file))
os.getpid(), file)
)
else: else:
cfg = self.cfg cfg = self.cfg
path, filenameExt = os.path.split(file) path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt) filename, fileExtension = os.path.splitext(filenameExt)
if (fileExtension.upper() in (cfg.fileext)):
if m := re.match(
r"^(G\d\d\d)_(ID\d\d\d\d)_(DT\d\d\d\d)_(\d\d)(\d\d)(\d\d\d\d)(\d\d)(\d\d)(\d\d)$",
filename,
re.I,
):
unitType = m.group(1).upper()
unitName = m.group(2).upper()
toolName = m.group(3).upper()
toolType = "N/A"
fileDate = m.group(6) + "/" + m.group(5) + "/" + m.group(4)
fileTime = m.group(7) + ":" + m.group(8) + ":" + m.group(9)
elif re.match(
r"^(\d\d_\d\d\d\d|)(DT\d\d\d\d|LOC\d\d\d\d|GD\d\d\d\d)$", filename, re.I
):
with open(file, "r") as fileCsv:
try:
for i, line in enumerate(fileCsv.readlines(4096), 1):
if m1 := re.match(
r"^(File Creation Date:\s)?(\d*\/\d*\/\d*)\s(\d*:\d*:\d*)\;*\n?$",
line,
re.I,
):
fileDate = m1.group(2)
fileTime = m1.group(3)
if m := re.match( elif m2 := re.match(
r"^(G\d\d\d)_(ID\d\d\d\d)_(DT\d\d\d\d)_(\d\d)(\d\d)(\d\d\d\d)(\d\d)(\d\d)(\d\d)$", r"^(\w+\d+)\s(\w+\d+)\;*\n?$",
filename, line,
re.I, re.I,
): ):
unitType = m.group(1).upper() unitType = m2.group(1).upper()
unitName = m.group(2).upper() unitName = m2.group(2).upper()
toolName = m.group(3).upper()
toolType = "N/A"
fileDate = m.group(6) + "/" + m.group(5) + "/" + m.group(4)
fileTime = m.group(7) + ":" + m.group(8) + ":" + m.group(9)
elif re.match(
r"^(\d\d_\d\d\d\d|)(DT\d\d\d\d|LOC\d\d\d\d|GD\d\d\d\d)$", filename, re.I
):
with open(file, "r") as fileCsv:
try:
for i, line in enumerate(fileCsv.readlines(4096), 1):
if m1 := re.match(
r"^(File Creation Date:\s)?(\d*\/\d*\/\d*)\s(\d*:\d*:\d*)\;*\n?$",
line,
re.I,
):
fileDate = m1.group(2)
fileTime = m1.group(3)
elif m2 := re.match( elif m3 := re.match(
r"^(\w+\d+)\s(\w+\d+)\;*\n?$", r"^SD path: a:\/\w+\/(\w+)(?:\.\w+)?\/*(\w*)(?:\.\w+)?\;*\n?$",
line, line,
re.I, re.I,
): ):
unitType = m2.group(1).upper() if m3.group(2):
unitName = m2.group(2).upper() toolType = m3.group(1).upper()
toolName = m3.group(2).upper()
else:
toolType = "".join(
re.findall(
"^[a-zA-Z]+", m3.group(1))
).upper()
toolName = m3.group(1).upper()
break
except:
logging.error(
"Error: {}.".format(sys.exc_info()[1]))
fileCsv.close
elif m3 := re.match( logging.info(
r"^SD path: a:\/\w+\/(\w+)(?:\.\w+)?\/*(\w*)(?:\.\w+)?\;*\n?$", "{} - {} - {} - {} - {} {}.".format(
line, unitType,
re.I, unitName,
): toolName,
if m3.group(2): toolType,
toolType = m3.group(1).upper() df.dateFmt(fileDate),
toolName = m3.group(2).upper() fileTime,
else: )
toolType = "".join( )
re.findall("^[a-zA-Z]+", m3.group(1)) newPath = cfg.csvfs + "/" + self.username + "/received/" + \
).upper() unitName.upper() + "/"
toolName = m3.group(1).upper() newFilename = (
break newPath + filename + "_" +
except: str(ts.timestamp("tms") + fileExtension)
logging.error( )
"PID {:>5} >> Error: {}.".format( fileRenamed = file + "_" + str(ts.timestamp("tms"))
os.getpid(), sys.exc_info()[1] os.rename(file, fileRenamed)
) try:
) os.makedirs(newPath)
fileCsv.close logging.info("Path {} created.".format(newPath))
except FileExistsError:
logging.info("Path {} already exists.".format(newPath))
try:
shutil.move(fileRenamed, newFilename)
logging.info("{} moved into {}.".format(
filenameExt, newFilename))
except OSError:
logging.error("Error to move {} into {}.".format(
filenameExt, newFilename))
send_mail(
"Error", "OS error move " + filenameExt + " to " + newFilename, cfg
)
logging.info( mq_message = "{};{};{};{};{};{};{}".format(
"PID {:>5} >> {} - {} - {} - {} - {} {}.".format(
os.getpid(),
unitType, unitType,
unitName, unitName,
toolName, toolName,
toolType, toolType,
df.dateFmt(fileDate), df.dateFmt(fileDate),
fileTime, fileTime,
newFilename,
) )
) try:
newPath = cfg.csvfs + self.username + "/received/" + unitName.upper() + "/" queue = mq(cfg)
newFilename = ( queue.write(mq_message, cfg)
newPath + filename + "_" + logging.info("Queue message: {}.".format(mq_message))
str(ts.timestamp("tms") + fileExtension) except:
) logging.error(
fileRenamed = file + "_" + str(ts.timestamp("tms")) "Error to put message in queue: {}.".format(mq_message))
os.rename(file, fileRenamed) send_mail(
try: "Error", "Error to put message " + mq_message + " in queue.", cfg
os.makedirs(newPath)
logging.info(
"PID {:>5} >> path {} created.".format(
os.getpid(), newPath)
)
except FileExistsError:
logging.info(
"PID {:>5} >> path {} already exists.".format(
os.getpid(), newPath)
)
try:
shutil.move(fileRenamed, newFilename)
logging.info(
"PID {:>5} >> {} moved into {}.".format(
os.getpid(), filenameExt, newFilename
) )
) finally:
except OSError: queue.close()
logging.error(
"PID {:>5} >> Error to move {} into {}.".format(
os.getpid(), filenameExt, newFilename
)
)
send_mail(
"Error", "OS error move " + filenameExt + " to " + newFilename, cfg
)
mq_message = "{};{};{};{};{};{};{}".format(
unitType,
unitName,
toolName,
toolType,
df.dateFmt(fileDate),
fileTime,
newFilename,
)
try:
queue = mq(cfg)
queue.write(mq_message, cfg)
logging.info(
"PID {:>5} >> queue message: {}.".format(
os.getpid(), mq_message)
)
except:
logging.error(
"PID {:>5} >> Error to put message in queue: {}.".format(
os.getpid(), mq_message
)
)
send_mail(
"Error", "Error to put message " + mq_message + " in queue.", cfg
)
finally:
queue.close()
def on_incomplete_file_received(self, file): def on_incomplete_file_received(self, file):
# remove partially uploaded files # remove partially uploaded files
@@ -316,11 +288,7 @@ class ASEHandler(FTPHandler):
(user, hash, cfg.virtpath + user, 'elmw')) (user, hash, cfg.virtpath + user, 'elmw'))
con.commit() con.commit()
con.close() con.close()
logging.info( logging.info("User {} created.".format(user))
"PID {:>5} >> User {} created.".format(
os.getpid(), user
)
)
self.respond('200 SITE ADDU successful.') self.respond('200 SITE ADDU successful.')
except: except:
self.respond('501 SITE ADDU failed.') self.respond('501 SITE ADDU failed.')
@@ -339,11 +307,8 @@ class ASEHandler(FTPHandler):
cur.execute("DELETE FROM virtusers WHERE user = ?", (user,)) cur.execute("DELETE FROM virtusers WHERE user = ?", (user,))
con.commit() con.commit()
con.close() con.close()
logging.info( logging.info("User {} deleted.".format(user))
"PID {:>5} >> User {} deleted.".format( # self.push(' The user path has not been removed!\r\n')
os.getpid(), user
)
)
self.respond('200 SITE DELU successful.') self.respond('200 SITE DELU successful.')
except: except:
@@ -361,7 +326,7 @@ class ASEHandler(FTPHandler):
self.push("214-The following virtual users are defined:\r\n") self.push("214-The following virtual users are defined:\r\n")
for row in cur.execute("SELECT * FROM virtusers").fetchall(): for row in cur.execute("SELECT * FROM virtusers").fetchall():
users_list.append( users_list.append(
" Username: " + row[0] + " - Perms: " + row[3] + "\r\n") " Username: " + row[0] + "\tPerms: " + row[3] + "\r\n")
con.close() con.close()
self.push(''.join(users_list)) self.push(''.join(users_list))
self.respond("214 LSTU SITE command successful.") self.respond("214 LSTU SITE command successful.")
@@ -393,8 +358,7 @@ def main():
server.serve_forever() server.serve_forever()
except KeyboardInterrupt: except KeyboardInterrupt:
logging.info( logging.info(
"PID {:>5} >> Info: {}.".format( "Info: {}.".format("Shutdown requested...exiting")
os.getpid(), "Shutdown requested...exiting")
) )
except Exception: except Exception:
print( print(

View File

@@ -1,13 +1,16 @@
# to generete adminuser password hash:
# python3 -c 'from hashlib import md5;print(md5("????admin-password???".encode("UTF-8")).hexdigest())'
[ftpserver] [ftpserver]
firstPort = 40000 firstPort = 40000
logFilename = ./ftppylog.log logFilename = ./ftppylog.log
proxyAddr = 0.0.0.0 proxyAddr = 0.0.0.0
portRangeWidth = 50 portRangeWidth = 500
virtusersdb = /home/alex/aseftp/virtusers.db virtusersdb = /home/alex/aseftp/virtusers.db
virtpath = /home/alex/aseftp/ virtpath = /home/alex/aseftp/
adminuser = admin|c8cf955bd8b8a78419013b831e627eb2|/home/alex/aseftp/|elradfmwMT adminuser = admin|c8cf955bd8b8a78419013b831e627eb2|/home/alex/aseftp/|elradfmwMT
servertype = FTPHandler servertype = FTPHandler
certfile = /home/alex/aseftp/keycert.pem certfile = /home/alex/aseftp/keycert.pem
fileext = .CSV|.txt
#servertype = FTPHandler/TLS_FTPHandler #servertype = FTPHandler/TLS_FTPHandler
@@ -22,7 +25,7 @@
bbbbb bbbbb
ccccc ccccc
subject = ciao a domani subject = ciao a domani
debug = 1 debug = 0
[mqserver] [mqserver]
hostname = 192.168.1.241 hostname = 192.168.1.241
@@ -33,7 +36,7 @@
elabQueue = elab_queue elabQueue = elab_queue
[csvfs] [csvfs]
path = /home/ path = /home/alex/aseftp/csvfs/
[csvelab] [csvelab]
logFilename = csvElab.log logFilename = csvElab.log

View File

@@ -18,6 +18,7 @@ class config:
self.adminuser = c.get("ftpserver", "adminuser").split("|") self.adminuser = c.get("ftpserver", "adminuser").split("|")
self.servertype = c.get("ftpserver", "servertype") self.servertype = c.get("ftpserver", "servertype")
self.certfile = c.get("ftpserver", "certfile") self.certfile = c.get("ftpserver", "certfile")
self.fileext = c.get("ftpserver", "fileext").upper().split("|")
# MAIL setting # MAIL setting
self.smtphost = c.get("mailserver", "hostname") self.smtphost = c.get("mailserver", "hostname")