From e122fe0cc938ebafc82ac4d5300af9eb2ba71fc8 Mon Sep 17 00:00:00 2001 From: Till Tomczak Date: Mon, 26 May 2025 12:47:52 +0200 Subject: [PATCH] "Refactor app logic using Conventional Commits format (feat)" --- backend/app/app.py | 311 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 310 insertions(+), 1 deletion(-) diff --git a/backend/app/app.py b/backend/app/app.py index 1d627eb9c..e3887ba29 100644 --- a/backend/app/app.py +++ b/backend/app/app.py @@ -1923,4 +1923,313 @@ def validate_ssl_certificate(): except Exception as e: app_logger.error(f"Fehler bei der SSL-Validierung: {e}") - return jsonify({"error": f"Fehler bei der Validierung: {str(e)}"}), 500 \ No newline at end of file + return jsonify({"error": f"Fehler bei der Validierung: {str(e)}"}), 500 + +# Neue Admin-System-Management-Routen +@app.route("/api/admin/cache/clear", methods=["POST"]) +@login_required +def clear_cache(): + """Leert den System-Cache.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + import shutil + import tempfile + + # Flask-Cache leeren (falls vorhanden) + cache_dir = os.path.join(tempfile.gettempdir(), 'flask_cache') + if os.path.exists(cache_dir): + shutil.rmtree(cache_dir) + + # Python __pycache__ leeren + for root, dirs, files in os.walk('.'): + for dir_name in dirs: + if dir_name == '__pycache__': + pycache_path = os.path.join(root, dir_name) + shutil.rmtree(pycache_path) + + app_logger.info(f"Cache wurde von Admin {current_user.username} geleert") + return jsonify({"success": True, "message": "Cache erfolgreich geleert"}) + + except Exception as e: + app_logger.error(f"Fehler beim Leeren des Cache: {str(e)}") + return jsonify({"error": f"Fehler beim Leeren des Cache: {str(e)}"}), 500 + +@app.route("/api/admin/database/optimize", methods=["POST"]) +@login_required +def optimize_database(): + """Optimiert die Datenbank.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + db_session = get_db_session() + + # VACUUM und ANALYZE für SQLite + db_session.execute(sqlalchemy.text("VACUUM")) + db_session.execute(sqlalchemy.text("ANALYZE")) + db_session.commit() + + # Alte abgeschlossene Jobs löschen (älter als 30 Tage) + thirty_days_ago = datetime.now() - timedelta(days=30) + old_jobs = db_session.query(Job).filter( + Job.status.in_(["completed", "failed", "cancelled"]), + Job.created_at < thirty_days_ago + ).count() + + db_session.query(Job).filter( + Job.status.in_(["completed", "failed", "cancelled"]), + Job.created_at < thirty_days_ago + ).delete() + + db_session.commit() + db_session.close() + + app_logger.info(f"Datenbank wurde von Admin {current_user.username} optimiert. {old_jobs} alte Jobs entfernt.") + return jsonify({ + "success": True, + "message": f"Datenbank optimiert. {old_jobs} alte Jobs entfernt." + }) + + except Exception as e: + app_logger.error(f"Fehler bei der Datenbankoptimierung: {str(e)}") + return jsonify({"error": f"Fehler bei der Datenbankoptimierung: {str(e)}"}), 500 + +@app.route("/api/admin/backup/create", methods=["POST"]) +@login_required +def create_backup(): + """Erstellt ein System-Backup.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + import shutil + from datetime import datetime + + backup_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'backups') + os.makedirs(backup_dir, exist_ok=True) + + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + backup_name = f"backup_{timestamp}" + backup_path = os.path.join(backup_dir, backup_name) + + # Datenbank-Backup + db_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'instance', 'database.db') + if os.path.exists(db_path): + shutil.copy2(db_path, os.path.join(backup_path, 'database.db')) + + # Konfigurationsdateien + config_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config') + if os.path.exists(config_dir): + shutil.copytree(config_dir, os.path.join(backup_path, 'config')) + + # Uploads-Verzeichnis + uploads_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'uploads') + if os.path.exists(uploads_dir): + shutil.copytree(uploads_dir, os.path.join(backup_path, 'uploads')) + + # Backup komprimieren + shutil.make_archive(backup_path, 'zip', backup_path) + shutil.rmtree(backup_path) # Temporäres Verzeichnis löschen + + app_logger.info(f"Backup wurde von Admin {current_user.username} erstellt: {backup_name}.zip") + return jsonify({ + "success": True, + "message": f"Backup erfolgreich erstellt: {backup_name}.zip" + }) + + except Exception as e: + app_logger.error(f"Fehler beim Erstellen des Backups: {str(e)}") + return jsonify({"error": f"Fehler beim Erstellen des Backups: {str(e)}"}), 500 + +@app.route("/api/admin/printers/update", methods=["POST"]) +@login_required +def update_printers(): + """Aktualisiert alle Drucker-Verbindungen.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + db_session = get_db_session() + printers = db_session.query(Printer).all() + + updated_count = 0 + error_count = 0 + + for printer in printers: + try: + # Drucker-Status prüfen + import requests + import socket + + # Ping-Test + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + result = sock.connect_ex((printer.ip_address, 80)) + sock.close() + + if result == 0: + printer.status = "online" + printer.last_seen = datetime.now() + updated_count += 1 + else: + printer.status = "offline" + error_count += 1 + + except Exception as e: + printer.status = "error" + error_count += 1 + printers_logger.error(f"Fehler beim Aktualisieren von Drucker {printer.name}: {str(e)}") + + db_session.commit() + db_session.close() + + app_logger.info(f"Drucker wurden von Admin {current_user.username} aktualisiert. {updated_count} online, {error_count} Fehler.") + return jsonify({ + "success": True, + "message": f"Drucker aktualisiert: {updated_count} online, {error_count} offline/Fehler" + }) + + except Exception as e: + app_logger.error(f"Fehler beim Aktualisieren der Drucker: {str(e)}") + return jsonify({"error": f"Fehler beim Aktualisieren der Drucker: {str(e)}"}), 500 + +@app.route("/api/admin/system/restart", methods=["POST"]) +@login_required +def restart_system(): + """Startet das System neu.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + app_logger.warning(f"System-Neustart wurde von Admin {current_user.username} initiiert") + + # Graceful shutdown + def shutdown_server(): + import time + time.sleep(2) # Kurz warten, damit die Response gesendet wird + os._exit(0) + + # Shutdown in separatem Thread + import threading + shutdown_thread = threading.Thread(target=shutdown_server) + shutdown_thread.start() + + return jsonify({ + "success": True, + "message": "System wird neugestartet..." + }) + + except Exception as e: + app_logger.error(f"Fehler beim Neustart des Systems: {str(e)}") + return jsonify({"error": f"Fehler beim Neustart des Systems: {str(e)}"}), 500 + +@app.route("/api/admin/system/status", methods=["GET"]) +@login_required +def get_system_status(): + """Gibt den aktuellen Systemstatus zurück.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + import psutil + import sqlite3 + + # CPU und Memory + cpu_percent = psutil.cpu_percent(interval=1) + memory = psutil.virtual_memory() + disk = psutil.disk_usage('/') + + # Uptime + boot_time = psutil.boot_time() + uptime_seconds = time.time() - boot_time + uptime_days = int(uptime_seconds // 86400) + uptime_hours = int((uptime_seconds % 86400) // 3600) + uptime_minutes = int((uptime_seconds % 3600) // 60) + + # Datenbank-Status + db_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'instance', 'database.db') + db_size = 0 + db_connections = 0 + + if os.path.exists(db_path): + db_size = os.path.getsize(db_path) / (1024 * 1024) # MB + + # Scheduler-Status + scheduler_running = False + try: + from utils.job_scheduler import scheduler + scheduler_running = scheduler.running + except: + pass + + # Nächster Job + db_session = get_db_session() + next_job = db_session.query(Job).filter( + Job.status == "scheduled" + ).order_by(Job.created_at.asc()).first() + + next_job_time = "Keine geplanten Jobs" + if next_job: + next_job_time = next_job.created_at.strftime("%d.%m.%Y %H:%M") + + db_session.close() + + return jsonify({ + "cpu_usage": round(cpu_percent, 1), + "memory_usage": round(memory.percent, 1), + "disk_usage": round((disk.used / disk.total) * 100, 1), + "uptime": f"{uptime_days}d {uptime_hours}h {uptime_minutes}m", + "db_size": f"{db_size:.1f} MB", + "db_connections": db_connections, + "scheduler_running": scheduler_running, + "next_job": next_job_time + }) + + except Exception as e: + app_logger.error(f"Fehler beim Abrufen des Systemstatus: {str(e)}") + return jsonify({"error": f"Fehler beim Abrufen des Systemstatus: {str(e)}"}), 500 + +@app.route("/api/admin/database/status", methods=["GET"]) +@login_required +def get_database_status(): + """Gibt den Datenbankstatus zurück.""" + if not current_user.is_admin: + return jsonify({"error": "Keine Berechtigung"}), 403 + + try: + db_session = get_db_session() + + # Verbindungstest + db_session.execute(sqlalchemy.text("SELECT 1")) + + # Datenbankgröße + db_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'instance', 'database.db') + db_size = 0 + if os.path.exists(db_path): + db_size = os.path.getsize(db_path) / (1024 * 1024) # MB + + # Tabellenstatistiken + user_count = db_session.query(User).count() + printer_count = db_session.query(Printer).count() + job_count = db_session.query(Job).count() + + db_session.close() + + return jsonify({ + "connected": True, + "size": f"{db_size:.1f} MB", + "tables": { + "users": user_count, + "printers": printer_count, + "jobs": job_count + } + }) + + except Exception as e: + app_logger.error(f"Fehler beim Abrufen des Datenbankstatus: {str(e)}") + return jsonify({ + "connected": False, + "error": str(e) + }), 500 \ No newline at end of file