inserimento massivo

This commit is contained in:
2024-11-10 16:45:43 +01:00
parent afd028b794
commit 63d26b35c3
3 changed files with 36 additions and 15 deletions

14
.gitignore vendored Normal file
View File

@@ -0,0 +1,14 @@
.python-version
DT0028_20241106044856.csv
DT0029_20241106044856.csv
DT0030_20241106104859.csv
input_file.txt
README.md
uv.lock
csv_ase.egg-info/dependency_links.txt
csv_ase.egg-info/PKG-INFO
csv_ase.egg-info/requires.txt
csv_ase.egg-info/SOURCES.txt
csv_ase.egg-info/top_level.txt
dist/csv_ase-0.1.0-py3-none-any.whl
dist/csv_ase-0.1.0.tar.gz

3
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"CodeGPT.apiKey": "Ollama"
}

View File

@@ -20,6 +20,7 @@ if len(lines) >= 7:
else: else:
raise ValueError("Il file non contiene abbastanza righe per estrarre i dati richiesti.") raise ValueError("Il file non contiene abbastanza righe per estrarre i dati richiesti.")
records = []
# Elabora le righe dei dati a partire dalla riga 8 in poi # Elabora le righe dei dati a partire dalla riga 8 in poi
for line in lines[7:]: for line in lines[7:]:
# Rimuovi spazi bianchi o caratteri di nuova riga # Rimuovi spazi bianchi o caratteri di nuova riga
@@ -41,7 +42,6 @@ for line in lines[7:]:
nodes = parts[2:] nodes = parts[2:]
node_list = [] node_list = []
records = []
for i, node_data in enumerate(nodes, start=1): for i, node_data in enumerate(nodes, start=1):
# Dividi ogni nodo in valori separati da ";" # Dividi ogni nodo in valori separati da ";"
node_values = node_data.split(';') node_values = node_data.split(';')
@@ -71,22 +71,26 @@ for line in lines[7:]:
"temperature": float(measurements.split(';')[1]), "temperature": float(measurements.split(';')[1]),
"nodes_jsonb": json.dumps(node_list) # Converti la lista di dizionari in una stringa JSON "nodes_jsonb": json.dumps(node_list) # Converti la lista di dizionari in una stringa JSON
} }
records.append(record) records.append(record)
# Esegui l'upsert con un comando SQL
with engine.connect() as conn: with engine.connect() as conn:
with conn.execution_options(isolation_level='AUTOCOMMIT'): conn.execute(text("""
result = conn.execute(text(""" INSERT INTO dataraw (tipo_centralina, unit, ip_centralina, path, ip_gateway, event_timestamp, battery_level, temperature, nodes_jsonb)
INSERT INTO dataraw (tipo_centralina, unit, ip_centralina, path, ip_gateway, event_timestamp, battery_level, temperature, nodes_jsonb) VALUES
VALUES (:tipo_centralina, :unit, :ip_centralina, :path, :ip_gateway, :event_timestamp, :battery_level, :temperature, :nodes_jsonb) """ + ",".join([
ON CONFLICT ON CONSTRAINT dataraw_unique f"(:{i}_tipo_centralina, :{i}_unit, :{i}_ip_centralina, :{i}_path, :{i}_ip_gateway, :{i}_event_timestamp, :{i}_battery_level, :{i}_temperature, :{i}_nodes_jsonb)"
DO UPDATE SET for i in range(len(records))
path = EXCLUDED.path, ]) + """
ip_gateway = EXCLUDED.ip_gateway, ON CONFLICT ON CONSTRAINT dataraw_unique
battery_level = EXCLUDED.battery_level, DO UPDATE SET
temperature = EXCLUDED.temperature, path = EXCLUDED.path,
nodes_jsonb = EXCLUDED.nodes_jsonb; ip_gateway = EXCLUDED.ip_gateway,
"""), records) battery_level = EXCLUDED.battery_level,
print(result) temperature = EXCLUDED.temperature,
nodes_jsonb = EXCLUDED.nodes_jsonb;
"""), {f"{i}_{key}": value for i, record in enumerate(records) for key, value in record.items()})
conn.commit()
print("Tutte le righe del file sono state caricate con successo nella tabella PostgreSQL!") print("Tutte le righe del file sono state caricate con successo nella tabella PostgreSQL!")