ftp idempotente e istanziabile più volte + logghing su stout x promtail

This commit is contained in:
2025-11-01 15:58:02 +01:00
parent 1d7d33df0b
commit 76094f7641
8 changed files with 132766 additions and 71 deletions

114540
logs/non_sysgeo.txt Normal file

File diff suppressed because it is too large Load Diff

17641
logs/sysgeo.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -9,54 +9,20 @@ import os
from hashlib import sha256 from hashlib import sha256
from pathlib import Path from pathlib import Path
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.handlers import FTPHandler from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer from pyftpdlib.servers import FTPServer
from utils.authorizers.database_authorizer import DatabaseAuthorizer
from utils.config import loader_ftp_csv as setting from utils.config import loader_ftp_csv as setting
from utils.connect import file_management, user_admin from utils.connect import file_management, user_admin
from utils.database.connection import connetti_db
# Configure logging (moved inside main function) # Configure logging (moved inside main function)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class DummySha256Authorizer(DummyAuthorizer): # Legacy authorizer kept for reference (not used anymore)
"""Custom authorizer that uses SHA256 for password hashing and manages users from a database.""" # The DatabaseAuthorizer is now used for real-time database synchronization
def __init__(self: object, cfg: dict) -> None:
"""Initializes the authorizer, adds the admin user, and loads users from the database.
Args:
cfg: The configuration object.
"""
super().__init__()
self.add_user(cfg.adminuser[0], cfg.adminuser[1], cfg.adminuser[2], perm=cfg.adminuser[3])
# Define the database connection
conn = connetti_db(cfg)
# Create a cursor
cur = conn.cursor()
cur.execute(f"SELECT ftpuser, hash, virtpath, perm FROM {cfg.dbname}.{cfg.dbusertable} WHERE disabled_at IS NULL")
for ftpuser, user_hash, virtpath, perm in cur.fetchall():
# Create the user's directory if it does not exist.
try:
Path(cfg.virtpath + ftpuser).mkdir(parents=True, exist_ok=True)
self.add_user(ftpuser, user_hash, virtpath, perm)
except Exception as e: # pylint: disable=broad-except
self.responde(f"551 Error in create virtual user path: {e}")
def validate_authentication(self: object, username: str, password: str, handler: object) -> None:
# Validate the user's password against the stored user_hash
user_hash = sha256(password.encode("UTF-8")).hexdigest()
try:
if self.user_table[username]["pwd"] != user_hash:
raise KeyError
except KeyError:
raise AuthenticationFailed # noqa: B904
class ASEHandler(FTPHandler): class ASEHandler(FTPHandler):
@@ -143,23 +109,29 @@ def main():
cfg = setting.Config() cfg = setting.Config()
try: try:
# Initialize the authorizer and handler # Configure logging first
authorizer = DummySha256Authorizer(cfg) logging.basicConfig(
format="%(asctime)s - PID: %(process)d.%(name)s.%(levelname)s: %(message)s ",
filename=cfg.logfilename,
level=logging.INFO,
)
# Initialize the authorizer with database support
# This authorizer checks the database on every login, ensuring
# all FTP server instances stay synchronized without restarts
authorizer = DatabaseAuthorizer(cfg)
# Initialize handler
handler = ASEHandler handler = ASEHandler
handler.cfg = cfg handler.cfg = cfg
handler.authorizer = authorizer handler.authorizer = authorizer
handler.masquerade_address = cfg.proxyaddr handler.masquerade_address = cfg.proxyaddr
# Set the range of passive ports for the FTP server # Set the range of passive ports for the FTP server
_range = list(range(cfg.firstport, cfg.firstport + cfg.portrangewidth)) _range = list(range(cfg.firstport, cfg.firstport + cfg.portrangewidth))
handler.passive_ports = _range handler.passive_ports = _range
# Configure logging logger.info(f"Starting FTP server on port {cfg.service_port} with DatabaseAuthorizer")
logging.basicConfig(
format="%(asctime)s - PID: %(process)d.%(name)s.%(levelname)s: %(message)s ",
# Use cfg.logfilename directly without checking its existence
filename=cfg.logfilename,
level=logging.INFO,
)
# Create and start the FTP server # Create and start the FTP server
server = FTPServer(("0.0.0.0", cfg.service_port), handler) server = FTPServer(("0.0.0.0", cfg.service_port), handler)

View File

@@ -68,6 +68,38 @@ def fetch_data_from_db(connection: mysql.connector.MySQLConnection) -> list[tupl
cursor.close() cursor.close()
def fetch_existing_users(connection: mysql.connector.MySQLConnection) -> dict[str, tuple]:
"""
Fetches existing FTP users from virtusers table.
Args:
connection (mysql.connector.MySQLConnection): The database connection object.
Returns:
dict: Dictionary mapping username to (is_enabled, has_matching_password).
is_enabled is True if disabled_at is NULL.
"""
try:
cursor = connection.cursor()
query = """
SELECT ftpuser, disabled_at
FROM ase_lar.virtusers
"""
cursor.execute(query)
results = cursor.fetchall()
# Create dictionary: username -> is_enabled
users_dict = {username: (disabled_at is None) for username, disabled_at in results}
logger.info("Trovati %s utenti esistenti in virtusers", len(users_dict))
return users_dict
except mysql.connector.Error as e:
logger.error("Errore query database virtusers: %s", e)
return {}
finally:
cursor.close()
def send_site_command(ftp: FTP, command: str) -> bool: def send_site_command(ftp: FTP, command: str) -> bool:
""" """
Sends a SITE command to the FTP server. Sends a SITE command to the FTP server.
@@ -90,9 +122,13 @@ def send_site_command(ftp: FTP, command: str) -> bool:
def main(): def main():
""" """
Main function to connect to the database, fetch FTP user data, and send SITE ADDU commands to the FTP server. Main function to connect to the database, fetch FTP user data, and synchronize users to FTP server.
This function is idempotent - it can be run multiple times safely:
- If user exists and is enabled: skips
- If user exists but is disabled: enables it (SITE ENAU)
- If user doesn't exist: creates it (SITE ADDU)
""" """
logger.info("Avvio script caricamento utenti FTP") logger.info("Avvio script caricamento utenti FTP (idempotente)")
cfg = setting.Config() cfg = setting.Config()
# Connessioni # Connessioni
@@ -100,32 +136,58 @@ def main():
ftp_connection = connect_ftp() ftp_connection = connect_ftp()
try: try:
# Preleva dati dal database # Preleva utenti da sincronizzare
data = fetch_data_from_db(db_connection) users_to_sync = fetch_data_from_db(db_connection)
if not data: if not users_to_sync:
logger.warning("Nessun dato trovato nel database") logger.warning("Nessun utente da sincronizzare nel database ftp_accounts")
return return
success_count = 0 # Preleva utenti già esistenti
existing_users = fetch_existing_users(db_connection)
added_count = 0
enabled_count = 0
skipped_count = 0
error_count = 0 error_count = 0
# Processa ogni riga # Processa ogni utente
for row in data: for row in users_to_sync:
username, password = row username, password = row
# Costruisci il comando SITE completo if username in existing_users:
is_enabled = existing_users[username]
if is_enabled:
# Utente già esiste ed è abilitato - skip
logger.info("Utente %s già esiste ed è abilitato - skip", username)
skipped_count += 1
else:
# Utente esiste ma è disabilitato - riabilita
logger.info("Utente %s esiste ma è disabilitato - riabilito con SITE ENAU", username)
ftp_site_command = f"enau {username}"
if send_site_command(ftp_connection, ftp_site_command):
enabled_count += 1
else:
error_count += 1
else:
# Utente non esiste - crea
logger.info("Utente %s non esiste - creazione con SITE ADDU", username)
ftp_site_command = f"addu {username} {password}" ftp_site_command = f"addu {username} {password}"
logger.info("Sending ftp command: %s", ftp_site_command)
# Invia comando SITE
if send_site_command(ftp_connection, ftp_site_command): if send_site_command(ftp_connection, ftp_site_command):
success_count += 1 added_count += 1
else: else:
error_count += 1 error_count += 1
logger.info("Elaborazione completata. Successi: %s, Errori: %s", success_count, error_count) logger.info(
"Elaborazione completata. Aggiunti: %s, Riabilitati: %s, Saltati: %s, Errori: %s",
added_count,
enabled_count,
skipped_count,
error_count
)
except Exception as e: # pylint: disable=broad-except except Exception as e: # pylint: disable=broad-except
logger.error("Errore generale: %s", e) logger.error("Errore generale: %s", e)

View File

@@ -121,7 +121,7 @@ async def load_csv(record: tuple, cfg: object, pool: object) -> bool:
# Cache hit! Use cached module # Cache hit! Use cached module
modulo = _module_cache[module_name] modulo = _module_cache[module_name]
cache_key = module_name cache_key = module_name
logger.debug("Modulo caricato dalla cache: %s", module_name) logger.info("Modulo caricato dalla cache: %s", module_name)
break break
# If not in cache, import dynamically # If not in cache, import dynamically
@@ -133,7 +133,7 @@ async def load_csv(record: tuple, cfg: object, pool: object) -> bool:
# Store in cache for future use # Store in cache for future use
_module_cache[module_name] = modulo _module_cache[module_name] = modulo
cache_key = module_name cache_key = module_name
logger.info("Funzione 'main_loader' caricata dal modulo %s (cached)", module_name) logger.info("Modulo caricato per la prima volta: %s", module_name)
break break
except (ImportError, AttributeError) as e: except (ImportError, AttributeError) as e:
logger.debug( logger.debug(

View File

@@ -0,0 +1,162 @@
"""
Database-backed authorizer for FTP server that checks authentication against database in real-time.
This ensures multiple FTP server instances stay synchronized without needing restarts.
"""
import logging
from hashlib import sha256
from pathlib import Path
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from utils.database.connection import connetti_db
logger = logging.getLogger(__name__)
class DatabaseAuthorizer(DummyAuthorizer):
"""
Custom authorizer that validates users against the database on every login.
This approach ensures that:
- Multiple FTP server instances stay synchronized
- User changes (add/remove/disable) are reflected immediately
- No server restart is needed when users are modified
"""
def __init__(self, cfg: dict) -> None:
"""
Initializes the authorizer with admin user only.
Regular users are validated against database at login time.
Args:
cfg: The configuration object.
"""
super().__init__()
self.cfg = cfg
# Add admin user to in-memory authorizer (always available)
self.add_user(
cfg.adminuser[0], # username
cfg.adminuser[1], # password hash
cfg.adminuser[2], # home directory
perm=cfg.adminuser[3] # permissions
)
logger.info("DatabaseAuthorizer initialized with admin user")
def validate_authentication(self, username: str, password: str, handler: object) -> None:
"""
Validates user authentication against the database.
This method is called on every login attempt and checks:
1. If user is admin, use in-memory credentials
2. Otherwise, query database for user credentials
3. Verify password hash matches
4. Ensure user is not disabled
Args:
username: The username attempting to login.
password: The plain-text password provided.
handler: The FTP handler object.
Raises:
AuthenticationFailed: If authentication fails for any reason.
"""
# Hash the provided password
password_hash = sha256(password.encode("UTF-8")).hexdigest()
# Check if user is admin (stored in memory)
if username == self.cfg.adminuser[0]:
if self.user_table[username]["pwd"] != password_hash:
logger.warning(f"Failed admin login attempt for user: {username}")
raise AuthenticationFailed("Invalid credentials")
return
# For regular users, check database
try:
conn = connetti_db(self.cfg)
cur = conn.cursor()
# Query user from database
cur.execute(
f"SELECT ftpuser, hash, virtpath, perm, disabled_at FROM {self.cfg.dbname}.{self.cfg.dbusertable} WHERE ftpuser = %s",
(username,)
)
result = cur.fetchone()
cur.close()
conn.close()
if not result:
logger.warning(f"Login attempt for non-existent user: {username}")
raise AuthenticationFailed("Invalid credentials")
ftpuser, stored_hash, virtpath, perm, disabled_at = result
# Check if user is disabled
if disabled_at is not None:
logger.warning(f"Login attempt for disabled user: {username}")
raise AuthenticationFailed("User account is disabled")
# Verify password
if stored_hash != password_hash:
logger.warning(f"Invalid password for user: {username}")
raise AuthenticationFailed("Invalid credentials")
# Authentication successful - ensure user directory exists
try:
Path(virtpath).mkdir(parents=True, exist_ok=True)
except Exception as e:
logger.error(f"Failed to create directory for user {username}: {e}")
raise AuthenticationFailed("System error")
# Temporarily add user to in-memory table for this session
# This allows pyftpdlib to work correctly for the duration of the session
if username not in self.user_table:
self.add_user(ftpuser, stored_hash, virtpath, perm)
logger.info(f"Successful login for user: {username}")
except AuthenticationFailed:
raise
except Exception as e:
logger.error(f"Database error during authentication for user {username}: {e}", exc_info=True)
raise AuthenticationFailed("System error")
def has_user(self, username: str) -> bool:
"""
Check if a user exists in the database or in-memory table.
This is called by pyftpdlib for various checks. We override it to check
the database as well as the in-memory table.
Args:
username: The username to check.
Returns:
True if user exists and is enabled, False otherwise.
"""
# Check in-memory first (for admin and active sessions)
if username in self.user_table:
return True
# Check database for regular users
try:
conn = connetti_db(self.cfg)
cur = conn.cursor()
cur.execute(
f"SELECT COUNT(*) FROM {self.cfg.dbname}.{self.cfg.dbusertable} WHERE ftpuser = %s AND disabled_at IS NULL",
(username,)
)
count = cur.fetchone()[0]
cur.close()
conn.close()
return count > 0
except Exception as e:
logger.error(f"Database error checking user existence for {username}: {e}")
return False

View File

@@ -4,6 +4,7 @@ import logging
import os import os
import signal import signal
from collections.abc import Callable, Coroutine from collections.abc import Callable, Coroutine
from logging.handlers import RotatingFileHandler
from typing import Any from typing import Any
import aiomysql import aiomysql
@@ -33,24 +34,37 @@ class WorkerFormatter(logging.Formatter):
def setup_logging(log_filename: str, log_level_str: str): def setup_logging(log_filename: str, log_level_str: str):
"""Configura il logging globale. """Configura il logging globale con rotation automatica.
Args: Args:
log_filename (str): Percorso del file di log. log_filename (str): Percorso del file di log.
log_level_str (str): Livello di log (es. "INFO", "DEBUG"). log_level_str (str): Livello di log (es. "INFO", "DEBUG").
""" """
logger = logging.getLogger() logger = logging.getLogger()
handler = logging.FileHandler(log_filename)
formatter = WorkerFormatter("%(asctime)s - PID: %(process)d.Worker-%(worker_id)s.%(name)s.%(funcName)s.%(levelname)s: %(message)s") formatter = WorkerFormatter("%(asctime)s - PID: %(process)d.Worker-%(worker_id)s.%(name)s.%(funcName)s.%(levelname)s: %(message)s")
handler.setFormatter(formatter)
# Rimuovi eventuali handler esistenti e aggiungi il nostro # Rimuovi eventuali handler esistenti
if logger.hasHandlers(): if logger.hasHandlers():
logger.handlers.clear() logger.handlers.clear()
logger.addHandler(handler)
# Handler per file con rotation (max 10MB per file, mantiene 5 backup)
file_handler = RotatingFileHandler(
log_filename,
maxBytes=10 * 1024 * 1024, # 10 MB
backupCount=5, # Mantiene 5 file di backup
encoding="utf-8"
)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# Handler per console (utile per Docker)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
log_level = getattr(logging, log_level_str.upper(), logging.INFO) log_level = getattr(logging, log_level_str.upper(), logging.INFO)
logger.setLevel(log_level) logger.setLevel(log_level)
logger.info("Logging configurato correttamente") logger.info("Logging configurato correttamente con rotation (10MB, 5 backup)")
def setup_signal_handlers(logger: logging.Logger): def setup_signal_handlers(logger: logging.Logger):

304
test_ftp_client.py Executable file
View File

@@ -0,0 +1,304 @@
#!/home/alex/devel/ASE/.venv/bin/python
"""
Script di test per inviare file CSV via FTP al server ftp_csv_receiver.py
Legge gli utenti dalla tabella ftp_accounts e carica i file dalla directory corrispondente.
"""
import logging
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed
from ftplib import FTP
from pathlib import Path
from threading import Lock
import mysql.connector
# Add src directory to Python path
src_path = Path(__file__).parent / "src"
sys.path.insert(0, str(src_path))
from utils.config import users_loader as setting
from utils.database.connection import connetti_db
# Configurazione logging (verrà completata nel main dopo aver creato la directory logs)
logger = logging.getLogger(__name__)
# Configurazione server FTP e path base
FTP_CONFIG = {"host": "localhost", "port": 2121}
BASE_CSV_PATH = Path("/home/alex/Scrivania/archivio_csv")
# Numero di worker paralleli per testare il throughput
MAX_WORKERS = 10 # Modifica questo valore per aumentare/diminuire il parallelismo
# Lock per logging thread-safe
log_lock = Lock()
def fetch_ftp_users(connection: mysql.connector.MySQLConnection) -> list[tuple]:
"""
Preleva username e password dalla tabella ftp_accounts.
Args:
connection: Connessione MySQL
Returns:
Lista di tuple (username, password)
"""
try:
cursor = connection.cursor()
query = """
SELECT username, password
FROM ase_lar.ftp_accounts
WHERE username IS NOT NULL AND password IS NOT NULL
"""
cursor.execute(query)
results = cursor.fetchall()
logger.info("Prelevati %s utenti dal database", len(results))
return results
except mysql.connector.Error as e:
logger.error("Errore query database: %s", e)
return []
finally:
cursor.close()
def create_remote_dir(ftp: FTP, remote_dir: str) -> None:
"""
Crea ricorsivamente tutte le directory necessarie sul server FTP.
Args:
ftp: Connessione FTP attiva
remote_dir: Path della directory da creare (es. "home/ID0354/subdir")
"""
if not remote_dir or remote_dir == ".":
return
# Separa il path in parti
parts = remote_dir.split("/")
# Crea ogni livello di directory
current_path = ""
for part in parts:
if not part: # Salta parti vuote
continue
current_path = f"{current_path}/{part}" if current_path else part
try:
# Prova a creare la directory
ftp.mkd(current_path)
except Exception: # pylint: disable=broad-except
# Directory già esistente o altro errore, continua
pass
def upload_files_for_user(username: str, password: str) -> tuple[str, str, bool, int, int]:
"""
Carica tutti i file CSV dalla directory dell'utente via FTP.
Cerca ricorsivamente in tutte le sottodirectory e gestisce estensioni .csv e .CSV
Args:
username: Nome utente FTP
password: Password FTP
Returns:
Tuple con (username, status_message, successo, file_caricati, totale_file)
status_message può essere: 'OK', 'NO_DIR', 'NO_FILES', 'ERROR'
"""
user_csv_path = BASE_CSV_PATH / username
with log_lock:
logger.info("[%s] Inizio elaborazione", username)
# Verifica che la directory esista
if not user_csv_path.exists():
with log_lock:
logger.warning("[%s] Directory non trovata: %s", username, user_csv_path)
return (username, "NO_DIR", False, 0, 0)
# Trova tutti i file CSV ricorsivamente (sia .csv che .CSV)
csv_files = []
csv_files.extend(user_csv_path.glob("**/*.csv"))
csv_files.extend(user_csv_path.glob("**/*.CSV"))
if not csv_files:
with log_lock:
logger.warning("[%s] Nessun file CSV trovato in %s", username, user_csv_path)
return (username, "NO_FILES", False, 0, 0)
total_files = len(csv_files)
with log_lock:
logger.info("[%s] Trovati %s file CSV", username, total_files)
# Connessione FTP
try:
ftp = FTP()
ftp.connect(FTP_CONFIG["host"], FTP_CONFIG["port"])
ftp.login(username, password)
with log_lock:
logger.info("[%s] Connesso al server FTP", username)
# Upload di ogni file CSV mantenendo la struttura delle directory
uploaded = 0
for csv_file in csv_files:
try:
# Calcola il path relativo rispetto alla directory base dell'utente
relative_path = csv_file.relative_to(user_csv_path)
# Se il file è in una sottodirectory, crea la struttura sul server FTP
if relative_path.parent != Path("."):
# Crea ricorsivamente tutte le directory necessarie
remote_dir = str(relative_path.parent).replace("\\", "/")
create_remote_dir(ftp, remote_dir)
remote_file = str(relative_path).replace("\\", "/")
else:
remote_file = csv_file.name
# Carica il file (gli spazi nei nomi sono gestiti automaticamente da ftplib)
with log_lock:
logger.debug("[%s] Caricamento file: '%s'", username, remote_file)
with open(csv_file, "rb") as f:
ftp.storbinary(f"STOR {remote_file}", f)
with log_lock:
logger.info("[%s] File caricato: %s", username, remote_file)
uploaded += 1
except Exception as e: # pylint: disable=broad-except
with log_lock:
logger.error("[%s] Errore caricamento file %s: %s", username, csv_file.name, e)
ftp.quit()
with log_lock:
logger.info("[%s] Upload completato: %s/%s file caricati", username, uploaded, total_files)
return (username, "OK" if uploaded > 0 else "NO_UPLOAD", uploaded > 0, uploaded, total_files)
except Exception as e: # pylint: disable=broad-except
with log_lock:
logger.error("[%s] Errore FTP: %s", username, e)
return (username, "ERROR", False, 0, total_files if "total_files" in locals() else 0)
def main():
"""
Funzione principale per testare il caricamento FTP con upload paralleli.
"""
# Configura logging con file nella directory logs
log_dir = Path(__file__).parent / "logs"
log_dir.mkdir(exist_ok=True)
log_file = log_dir / "test_ftp_client.log"
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[
logging.FileHandler(log_file),
logging.StreamHandler(), # Mantiene anche l'output su console
],
)
logger.info("=== Avvio test client FTP (modalità parallela) ===")
logger.info("Log file: %s", log_file)
logger.info("Path base CSV: %s", BASE_CSV_PATH)
logger.info("Server FTP: %s:%s", FTP_CONFIG["host"], FTP_CONFIG["port"])
logger.info("Worker paralleli: %s", MAX_WORKERS)
# Connessione al database
cfg = setting.Config()
db_connection = connetti_db(cfg)
try:
# Preleva gli utenti FTP dal database
users = fetch_ftp_users(db_connection)
if not users:
logger.warning("Nessun utente trovato nel database")
return
logger.info("Avvio upload parallelo per %s utenti...", len(users))
logger.info("")
# Usa ThreadPoolExecutor per upload paralleli
results = []
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
# Sottometti tutti i task
futures = {executor.submit(upload_files_for_user, username, password): username for username, password in users}
# Raccogli i risultati man mano che completano
for future in as_completed(futures):
username = futures[future]
try:
result = future.result()
results.append(result)
except Exception as e: # pylint: disable=broad-except
logger.error("[%s] Eccezione durante l'upload: %s", username, e)
results.append((username, "ERROR", False, 0, 0))
# Analizza i risultati
logger.info("")
logger.info("=== Test completato ===")
success_count = sum(1 for _, _, success, _, _ in results if success)
error_count = len(results) - success_count
total_uploaded = sum(uploaded for _, _, _, uploaded, _ in results)
total_files = sum(total for _, _, _, _, total in results)
# Categorizza gli utenti per status
users_no_dir = [username for username, status, _, _, _ in results if status == "NO_DIR"]
users_no_files = [username for username, status, _, _, _ in results if status == "NO_FILES"]
users_error = [username for username, status, _, _, _ in results if status == "ERROR"]
users_ok = [username for username, status, _, _, _ in results if status == "OK"]
logger.info("Utenti con successo: %s/%s", success_count, len(users))
logger.info("Utenti con errori: %s/%s", error_count, len(users))
logger.info("File caricati totali: %s/%s", total_uploaded, total_files)
# Report utenti senza directory
if users_no_dir:
logger.info("")
logger.info("=== Utenti senza directory CSV (%s) ===", len(users_no_dir))
for username in sorted(users_no_dir):
logger.info(" - %s (directory attesa: %s)", username, BASE_CSV_PATH / username)
# Report utenti con directory vuota
if users_no_files:
logger.info("")
logger.info("=== Utenti con directory vuota (%s) ===", len(users_no_files))
for username in sorted(users_no_files):
logger.info(" - %s", username)
# Report utenti con errori FTP
if users_error:
logger.info("")
logger.info("=== Utenti con errori FTP (%s) ===", len(users_error))
for username in sorted(users_error):
logger.info(" - %s", username)
# Dettaglio per utente con successo
if users_ok:
logger.info("")
logger.info("=== Dettaglio utenti con successo (%s) ===", len(users_ok))
for username, status, _, uploaded, total in sorted(results):
if status == "OK":
logger.info("[%s] %s/%s file caricati", username, uploaded, total)
except Exception as e: # pylint: disable=broad-except
logger.error("Errore generale: %s", e)
sys.exit(1)
finally:
try:
db_connection.close()
logger.info("")
logger.info("Connessione MySQL chiusa")
except Exception as e: # pylint: disable=broad-except
logger.error("Errore chiusura connessione MySQL: %s", e)
if __name__ == "__main__":
main()