Files
python/FtpCsvReceiver.py
2021-01-31 10:50:50 +01:00

241 lines
8.8 KiB
Python
Executable File

#!/usr/bin/python3.8
import sys
import os
import shutil
import ssl
import pika
import re
import logging
from smtplib import SMTP_SSL as SMTP, SMTPException, SMTPAuthenticationError
from email.mime.text import MIMEText
from asebat.timefmt import timestamp_fmt as ts
from asebat.timefmt import date_refmt as df
from asebat.config import set_config as setting
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.authorizers import UnixAuthorizer
from pyftpdlib.filesystems import UnixFilesystem
def send_mail(sev, msg, cfg):
msg = MIMEText(cfg.message + "\n" + msg)
msg["Subject"] = cfg.subject + " " + sev
msg["From"] = cfg.sender
msg["To"] = cfg.receivers
conn = SMTP(host=cfg.smtphost,
port=cfg.smtpport,
local_hostname=None,
timeout=5,
source_address=None)
conn.set_debuglevel(cfg.debuglevel)
try:
conn.login(cfg.sender, cfg.password)
conn.sendmail(cfg.sender, cfg.receivers, msg.as_string())
except SMTPAuthenticationError:
logging.error("PID {:>5} >> Mail failed: {}.".format(
os.getpid(), "SMTP authentication error"))
except:
logging.info("PID {:>5} >> Mail failed: {}.".format(
os.getpid(), "CUSTOM_ERROR"))
finally:
conn.quit()
class mq():
def __init__(self, cfg):
parameters = pika.URLParameters('amqp://' + cfg.mquser + ':' +
cfg.mqpass + '@' + cfg.mqhost + ':' +
cfg.mqport + '/%2F')
connection = pika.BlockingConnection(parameters)
self.channel = connection.channel()
self.channel.queue_declare(queue=cfg.csv_queue, durable=True)
def write(self, msg, cfg):
try:
self.channel.basic_publish(
exchange='',
routing_key=cfg.csv_queue,
body=msg,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
logging.info("PID {:>5} >> write message {} in queue".format(
os.getpid(), msg))
except:
logging.error(
"PID {:>5} >> error write message {} in queue".format(
os.getpid(), msg))
def close(self):
self.channel.close()
class ASEHandler(FTPHandler):
def on_file_received(self, file):
unitType = ''
unitName = ''
toolName = ''
toolType = ''
fileDate = ''
fileTime = ''
queue = ''
if not os.stat(file).st_size:
os.remove(file)
logging.info("PID {:>5} >> file {} was empty: removed.".format(
os.getpid(), file))
else:
cfg = self.cfg
path, filenameExt = os.path.split(file)
filename, fileExtension = os.path.splitext(filenameExt)
if (m := re.match(
r"^(G\d\d\d)_(ID\d\d\d\d)_(DT\d\d\d\d)_(\d\d)(\d\d)(\d\d\d\d)(\d\d)(\d\d)(\d\d)$",
filename,
re.I,
)):
unitType = m.group(1).upper()
unitName = m.group(2).upper()
toolName = m.group(3).upper()
toolType = "N/A"
fileDate = m.group(6) + "/" + m.group(5) + "/" + m.group(4)
fileTime = m.group(7) + ":" + m.group(8) + ":" + m.group(9)
elif re.match(
r"^(\d\d_\d\d\d\d|)(DT\d\d\d\d|LOC\d\d\d\d|GD\d\d\d\d)$",
filename, re.I):
with open(file, "r") as fileCsv:
try:
for i, line in enumerate(fileCsv.readlines(4096), 1):
if (m1 := re.match(
r"^(File Creation Date:\s)?(\d*\/\d*\/\d*)\s(\d*:\d*:\d*)\;*\n?$",
line,
re.I,
)):
fileDate = m1.group(2)
fileTime = m1.group(3)
elif (m2 := re.match(
r"^(\w+\d+)\s(\w+\d+)\;*\n?$",
line,
re.I,
)):
unitType = m2.group(1).upper()
unitName = m2.group(2).upper()
elif (m3 := re.match(
r"^SD path: a:\/\w+\/(\w+)(?:\.\w+)?\/*(\w*)(?:\.\w+)?\;*\n?$",
line, re.I)):
if m3.group(2):
toolType = m3.group(1).upper()
toolName = m3.group(2).upper()
else:
toolType = "".join(
re.findall("^[a-zA-Z]+",
m3.group(1))).upper()
toolName = m3.group(1).upper()
break
except:
logging.error("PID {:>5} >> Error: {}.".format(
os.getpid(),
sys.exc_info()[1]))
fileCsv.close
logging.info("PID {:>5} >> {} - {} - {} - {} - {} {}.".format(
os.getpid(),
unitType,
unitName,
toolName,
toolType,
df.dateFmt(fileDate),
fileTime,
))
newPath = cfg.csvfs + self.username + "/received/" + unitName.upper(
) + "/"
newFilename = (newPath + filename + "_" +
str(ts.timestamp("tms") + fileExtension))
fileRenamed = (file + "_" + str(ts.timestamp("tms")))
os.rename(file, fileRenamed)
try:
os.makedirs(newPath)
logging.info("PID {:>5} >> path {} created.".format(
os.getpid(), newPath))
except FileExistsError:
logging.info("PID {:>5} >> path {} already exists.".format(
os.getpid(), newPath))
try:
shutil.move(fileRenamed, newFilename)
logging.info("PID {:>5} >> {} moved into {}.".format(
os.getpid(), filenameExt, newFilename))
except OSError:
logging.error("PID {:>5} >> Error to move {} into {}.".format(
os.getpid(), filenameExt, newFilename))
send_mail(
"Error",
"OS error move " + filenameExt + " to " + newFilename, cfg)
mq_message = "{};{};{};{};{};{};{}".format(
unitType,
unitName,
toolName,
toolType,
df.dateFmt(fileDate),
fileTime,
newFilename,
)
try:
queue = mq(cfg)
queue.write(mq_message, cfg)
logging.info("PID {:>5} >> queue message: {}.".format(
os.getpid(), mq_message))
except:
logging.error(
"PID {:>5} >> Error to put message in queue: {}.".format(
os.getpid(), mq_message))
send_mail("Error",
"Error to put message " + mq_message + " in queue.",
cfg)
finally:
queue.close()
def on_incomplete_file_received(self, file):
# remove partially uploaded files
os.remove(file)
def main():
cfg = setting.config()
try:
authorizer = UnixAuthorizer(rejected_users=["root"],
require_valid_shell=True)
handler = ASEHandler
handler.cfg = cfg
handler.authorizer = authorizer
handler.abstracted_fs = UnixFilesystem
handler.masquerade_address = cfg.proxyaddr
_range = list(range(cfg.firstport, cfg.firstport + 20))
handler.passive_ports = _range
logging.basicConfig(
format="%(asctime)s %(message)s",
filename="/var/log/" + cfg.logfilename,
level=logging.INFO,
)
server = FTPServer(("0.0.0.0", 21), handler)
server.serve_forever()
except KeyboardInterrupt:
logging.info("PID {:>5} >> Info: {}.".format(
os.getpid(), "Shutdown requested...exiting"))
except Exception:
print("{} - PID {:>5} >> Error: {}.".format(ts.timestamp("log"),
os.getpid(),
sys.exc_info()[1]))
if __name__ == "__main__":
main()