#!/usr/bin/env python3 """ Umfassendes Datenbank-Schema-Migrationsskript Erkennt und fügt alle fehlenden Spalten basierend auf den Models hinzu. """ import os import sys import sqlite3 from datetime import datetime import logging # Pfad zur App hinzufügen sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) from config.settings import DATABASE_PATH from utils.logging_config import get_logger logger = get_logger("schema_migration") def get_table_columns(cursor, table_name): """Ermittelt alle Spalten einer Tabelle.""" cursor.execute(f"PRAGMA table_info({table_name})") return {row[1]: row[2] for row in cursor.fetchall()} # {column_name: column_type} def get_table_exists(cursor, table_name): """Prüft, ob eine Tabelle existiert.""" cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", (table_name,)) return cursor.fetchone() is not None def migrate_users_table(cursor): """Migriert die users Tabelle für fehlende Spalten.""" logger.info("Migriere users Tabelle...") if not get_table_exists(cursor, 'users'): logger.warning("users Tabelle existiert nicht - wird bei init_db erstellt") return False existing_columns = get_table_columns(cursor, 'users') # Definition der erwarteten Spalten required_columns = { 'id': 'INTEGER PRIMARY KEY', 'email': 'VARCHAR(120) UNIQUE NOT NULL', 'username': 'VARCHAR(100) UNIQUE NOT NULL', 'password_hash': 'VARCHAR(128) NOT NULL', 'name': 'VARCHAR(100) NOT NULL', 'role': 'VARCHAR(20) DEFAULT "user"', 'active': 'BOOLEAN DEFAULT 1', 'created_at': 'DATETIME DEFAULT CURRENT_TIMESTAMP', 'last_login': 'DATETIME', 'updated_at': 'DATETIME DEFAULT CURRENT_TIMESTAMP', 'settings': 'TEXT', 'department': 'VARCHAR(100)', 'position': 'VARCHAR(100)', 'phone': 'VARCHAR(50)', 'bio': 'TEXT' } migrations_performed = [] for column_name, column_def in required_columns.items(): if column_name not in existing_columns: try: # Spezielle Behandlung für updated_at mit Trigger if column_name == 'updated_at': cursor.execute(f"ALTER TABLE users ADD COLUMN {column_name} DATETIME DEFAULT CURRENT_TIMESTAMP") # Trigger für automatische Aktualisierung cursor.execute(""" CREATE TRIGGER IF NOT EXISTS update_users_updated_at AFTER UPDATE ON users BEGIN UPDATE users SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; END """) logger.info(f"Spalte '{column_name}' hinzugefügt mit Auto-Update-Trigger") else: cursor.execute(f"ALTER TABLE users ADD COLUMN {column_name} {column_def}") logger.info(f"Spalte '{column_name}' hinzugefügt") migrations_performed.append(column_name) except Exception as e: logger.error(f"Fehler beim Hinzufügen der Spalte '{column_name}': {str(e)}") return len(migrations_performed) > 0 def migrate_printers_table(cursor): """Migriert die printers Tabelle für fehlende Spalten.""" logger.info("Migriere printers Tabelle...") if not get_table_exists(cursor, 'printers'): logger.warning("printers Tabelle existiert nicht - wird bei init_db erstellt") return False existing_columns = get_table_columns(cursor, 'printers') required_columns = { 'id': 'INTEGER PRIMARY KEY', 'name': 'VARCHAR(100) NOT NULL', 'model': 'VARCHAR(100)', 'location': 'VARCHAR(100)', 'ip_address': 'VARCHAR(50)', 'mac_address': 'VARCHAR(50) NOT NULL UNIQUE', 'plug_ip': 'VARCHAR(50) NOT NULL', 'plug_username': 'VARCHAR(100) NOT NULL', 'plug_password': 'VARCHAR(100) NOT NULL', 'status': 'VARCHAR(20) DEFAULT "offline"', 'active': 'BOOLEAN DEFAULT 1', 'created_at': 'DATETIME DEFAULT CURRENT_TIMESTAMP', 'last_checked': 'DATETIME' } migrations_performed = [] for column_name, column_def in required_columns.items(): if column_name not in existing_columns: try: cursor.execute(f"ALTER TABLE printers ADD COLUMN {column_name} {column_def}") logger.info(f"Spalte '{column_name}' zu printers hinzugefügt") migrations_performed.append(column_name) except Exception as e: logger.error(f"Fehler beim Hinzufügen der Spalte '{column_name}' zu printers: {str(e)}") return len(migrations_performed) > 0 def migrate_jobs_table(cursor): """Migriert die jobs Tabelle für fehlende Spalten.""" logger.info("Migriere jobs Tabelle...") if not get_table_exists(cursor, 'jobs'): logger.warning("jobs Tabelle existiert nicht - wird bei init_db erstellt") return False existing_columns = get_table_columns(cursor, 'jobs') required_columns = { 'id': 'INTEGER PRIMARY KEY', 'name': 'VARCHAR(200) NOT NULL', 'description': 'VARCHAR(500)', 'user_id': 'INTEGER NOT NULL', 'printer_id': 'INTEGER NOT NULL', 'start_at': 'DATETIME', 'end_at': 'DATETIME', 'actual_end_time': 'DATETIME', 'status': 'VARCHAR(20) DEFAULT "scheduled"', 'created_at': 'DATETIME DEFAULT CURRENT_TIMESTAMP', 'notes': 'VARCHAR(500)', 'material_used': 'FLOAT', 'file_path': 'VARCHAR(500)', 'owner_id': 'INTEGER', 'duration_minutes': 'INTEGER NOT NULL' } migrations_performed = [] for column_name, column_def in required_columns.items(): if column_name not in existing_columns: try: cursor.execute(f"ALTER TABLE jobs ADD COLUMN {column_name} {column_def}") logger.info(f"Spalte '{column_name}' zu jobs hinzugefügt") migrations_performed.append(column_name) except Exception as e: logger.error(f"Fehler beim Hinzufügen der Spalte '{column_name}' zu jobs: {str(e)}") return len(migrations_performed) > 0 def migrate_guest_requests_table(cursor): """Migriert die guest_requests Tabelle für fehlende Spalten.""" logger.info("Migriere guest_requests Tabelle...") if not get_table_exists(cursor, 'guest_requests'): logger.warning("guest_requests Tabelle existiert nicht - wird bei init_db erstellt") return False existing_columns = get_table_columns(cursor, 'guest_requests') required_columns = { 'processed_by': 'INTEGER', 'processed_at': 'DATETIME', 'approval_notes': 'TEXT', 'rejection_reason': 'TEXT', 'otp_used_at': 'DATETIME' } migrations_performed = [] for column_name, column_def in required_columns.items(): if column_name not in existing_columns: try: cursor.execute(f"ALTER TABLE guest_requests ADD COLUMN {column_name} {column_def}") logger.info(f"Spalte '{column_name}' zu guest_requests hinzugefügt") migrations_performed.append(column_name) except Exception as e: logger.error(f"Fehler beim Hinzufügen der Spalte '{column_name}' zu guest_requests: {str(e)}") return len(migrations_performed) > 0 def create_missing_tables(cursor): """Erstellt fehlende Tabellen.""" logger.info("Prüfe auf fehlende Tabellen...") # user_permissions Tabelle if not get_table_exists(cursor, 'user_permissions'): cursor.execute(""" CREATE TABLE user_permissions ( user_id INTEGER PRIMARY KEY, can_start_jobs BOOLEAN DEFAULT 0, needs_approval BOOLEAN DEFAULT 1, can_approve_jobs BOOLEAN DEFAULT 0, FOREIGN KEY (user_id) REFERENCES users (id) ) """) logger.info("Tabelle 'user_permissions' erstellt") # notifications Tabelle if not get_table_exists(cursor, 'notifications'): cursor.execute(""" CREATE TABLE notifications ( id INTEGER PRIMARY KEY, user_id INTEGER NOT NULL, type VARCHAR(50) NOT NULL, payload TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, read BOOLEAN DEFAULT 0, FOREIGN KEY (user_id) REFERENCES users (id) ) """) logger.info("Tabelle 'notifications' erstellt") # stats Tabelle if not get_table_exists(cursor, 'stats'): cursor.execute(""" CREATE TABLE stats ( id INTEGER PRIMARY KEY, total_print_time INTEGER DEFAULT 0, total_jobs_completed INTEGER DEFAULT 0, total_material_used FLOAT DEFAULT 0.0, last_updated DATETIME DEFAULT CURRENT_TIMESTAMP ) """) logger.info("Tabelle 'stats' erstellt") # system_logs Tabelle if not get_table_exists(cursor, 'system_logs'): cursor.execute(""" CREATE TABLE system_logs ( id INTEGER PRIMARY KEY, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL, level VARCHAR(20) NOT NULL, message VARCHAR(1000) NOT NULL, module VARCHAR(100), user_id INTEGER, ip_address VARCHAR(50), user_agent VARCHAR(500), FOREIGN KEY (user_id) REFERENCES users (id) ) """) logger.info("Tabelle 'system_logs' erstellt") def optimize_database(cursor): """Führt Datenbankoptimierungen durch.""" logger.info("Führe Datenbankoptimierungen durch...") try: # Indices für bessere Performance cursor.execute("CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_users_username ON users(username)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_jobs_user_id ON jobs(user_id)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_jobs_printer_id ON jobs(printer_id)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_notifications_user_id ON notifications(user_id)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_system_logs_timestamp ON system_logs(timestamp)") cursor.execute("CREATE INDEX IF NOT EXISTS idx_guest_requests_status ON guest_requests(status)") # Statistiken aktualisieren cursor.execute("ANALYZE") logger.info("Datenbankoptimierungen abgeschlossen") except Exception as e: logger.error(f"Fehler bei Datenbankoptimierungen: {str(e)}") def main(): """Führt die komplette Schema-Migration aus.""" try: logger.info("Starte umfassende Datenbank-Schema-Migration...") # Verbindung zur Datenbank if not os.path.exists(DATABASE_PATH): logger.error(f"Datenbankdatei nicht gefunden: {DATABASE_PATH}") # Erste Initialisierung from models import init_database logger.info("Führe Erstinitialisierung durch...") init_database() logger.info("Erstinitialisierung abgeschlossen") return conn = sqlite3.connect(DATABASE_PATH) cursor = conn.cursor() # WAL-Modus aktivieren für bessere Concurrent-Performance cursor.execute("PRAGMA journal_mode=WAL") cursor.execute("PRAGMA foreign_keys=ON") logger.info(f"Verbunden mit Datenbank: {DATABASE_PATH}") # Backup erstellen backup_path = f"{DATABASE_PATH}.backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}" cursor.execute(f"VACUUM INTO '{backup_path}'") logger.info(f"Backup erstellt: {backup_path}") # Migrationen durchführen migrations_performed = [] # Fehlende Tabellen erstellen create_missing_tables(cursor) migrations_performed.append("missing_tables") # Tabellen-spezifische Migrationen if migrate_users_table(cursor): migrations_performed.append("users") if migrate_printers_table(cursor): migrations_performed.append("printers") if migrate_jobs_table(cursor): migrations_performed.append("jobs") if migrate_guest_requests_table(cursor): migrations_performed.append("guest_requests") # Optimierungen optimize_database(cursor) migrations_performed.append("optimizations") # Änderungen speichern conn.commit() conn.close() logger.info(f"Schema-Migration erfolgreich abgeschlossen. Migrierte Bereiche: {', '.join(migrations_performed)}") # Test der Migration test_migration() except Exception as e: logger.error(f"Fehler bei der Schema-Migration: {str(e)}") if 'conn' in locals(): conn.rollback() conn.close() sys.exit(1) def test_migration(): """Testet die Migration durch Laden der Models.""" try: logger.info("Teste Migration durch Laden der Models...") # Models importieren und testen from models import get_cached_session, User, Printer, Job with get_cached_session() as session: # Test User-Query (sollte das updated_at Problem lösen) users = session.query(User).limit(1).all() logger.info(f"User-Abfrage erfolgreich - {len(users)} Benutzer gefunden") # Test Printer-Query printers = session.query(Printer).limit(1).all() logger.info(f"Printer-Abfrage erfolgreich - {len(printers)} Drucker gefunden") # Test Job-Query jobs = session.query(Job).limit(1).all() logger.info(f"Job-Abfrage erfolgreich - {len(jobs)} Jobs gefunden") logger.info("Migrations-Test erfolgreich abgeschlossen") except Exception as e: logger.error(f"Fehler beim Migrations-Test: {str(e)}") raise if __name__ == "__main__": main()