#!/usr/bin/env python3 """Debug what the generator is actually returning.""" import sys sys.path.insert(0, '/home/alex/devel/mysql2postgres') from src.connectors.mysql_connector import MySQLConnector from src.utils.logger import setup_logger, get_logger setup_logger(__name__) logger = get_logger(__name__) print("\n" + "="*80) print("Testing consolidation groups generator for d1") print("="*80 + "\n") with MySQLConnector() as mysql_conn: partition = "d1" group_num = 0 # Use datetime objects to match what the generator uses import datetime target_key = ("ID0003", "DT0002", datetime.date(2014, 8, 31), datetime.timedelta(hours=11, minutes=59, seconds=10)) print("First 20 groups from generator:\n") print("DEBUG: First row columns:", flush=True) for group_rows in mysql_conn.fetch_consolidation_groups_from_partition( "ELABDATADISP", partition, limit=100 ): group_num += 1 if group_rows: first_row = group_rows[0] # Debug: print all columns from first group if group_num == 1: print(f" Available columns: {first_row.keys()}\n") print(f" First row data: {dict(first_row)}\n") key = ( first_row.get("UnitName"), first_row.get("ToolNameID"), str(first_row.get("EventDate")), str(first_row.get("EventTime")) ) nodes = sorted([r.get('NodeNum') for r in group_rows]) # Show first 20 groups or target key if group_num <= 20 or key == target_key: print(f"Group {group_num}: key={key}") print(f" Nodes ({len(nodes)}): {nodes}") print(f" Rows count: {len(group_rows)}\n") if key == target_key: print("^^^ THIS IS THE TARGET KEY! ^^^\n") break if group_num >= 100: print(f"\nStopped at group {group_num}") break print(f"\nTotal groups processed: {group_num}") print("Done!\n")