489 lines
21 KiB
Python
489 lines
21 KiB
Python
"""
|
||
Admin-API Blueprint für erweiterte Verwaltungsfunktionen
|
||
|
||
Dieses Blueprint stellt zusätzliche Admin-API-Endpunkte bereit für:
|
||
- System-Backups
|
||
- Datenbank-Optimierung
|
||
- Cache-Verwaltung
|
||
|
||
Autor: MYP Team
|
||
Datum: 2025-06-01
|
||
"""
|
||
|
||
import os
|
||
import shutil
|
||
import zipfile
|
||
import sqlite3
|
||
import glob
|
||
from datetime import datetime, timedelta
|
||
from flask import Blueprint, request, jsonify, current_app
|
||
from flask_login import login_required, current_user
|
||
from functools import wraps
|
||
from utils.logging_config import get_logger
|
||
|
||
# Blueprint erstellen
|
||
admin_api_blueprint = Blueprint('admin_api', __name__, url_prefix='/api/admin')
|
||
|
||
# Logger initialisieren
|
||
admin_logger = get_logger("admin_api")
|
||
|
||
def admin_required(f):
|
||
"""Decorator um sicherzustellen, dass nur Admins auf Endpunkte zugreifen können."""
|
||
@wraps(f)
|
||
@login_required
|
||
def decorated_function(*args, **kwargs):
|
||
if not current_user.is_authenticated or current_user.role != 'admin':
|
||
admin_logger.warning(f"Unauthorized admin access attempt by user {getattr(current_user, 'id', 'anonymous')}")
|
||
return jsonify({'error': 'Admin-Berechtigung erforderlich'}), 403
|
||
return f(*args, **kwargs)
|
||
return decorated_function
|
||
|
||
@admin_api_blueprint.route('/backup/create', methods=['POST'])
|
||
@admin_required
|
||
def create_backup():
|
||
"""
|
||
Erstellt ein manuelles System-Backup.
|
||
|
||
Erstellt eine Sicherung aller wichtigen Systemdaten einschließlich
|
||
Datenbank, Konfigurationsdateien und Benutzer-Uploads.
|
||
|
||
Returns:
|
||
JSON: Erfolgs-Status und Backup-Informationen
|
||
"""
|
||
try:
|
||
admin_logger.info(f"Backup-Erstellung angefordert von Admin {current_user.username}")
|
||
|
||
# Backup-Verzeichnis sicherstellen
|
||
backup_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'database', 'backups')
|
||
os.makedirs(backup_dir, exist_ok=True)
|
||
|
||
# Eindeutigen Backup-Namen erstellen
|
||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||
backup_name = f"system_backup_{timestamp}.zip"
|
||
backup_path = os.path.join(backup_dir, backup_name)
|
||
|
||
created_files = []
|
||
backup_size = 0
|
||
|
||
with zipfile.ZipFile(backup_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||
# 1. Datenbank-Datei hinzufügen
|
||
try:
|
||
from config.settings import DATABASE_PATH
|
||
if os.path.exists(DATABASE_PATH):
|
||
zipf.write(DATABASE_PATH, 'database/main.db')
|
||
created_files.append('database/main.db')
|
||
admin_logger.debug("✅ Hauptdatenbank zur Sicherung hinzugefügt")
|
||
|
||
# WAL- und SHM-Dateien falls vorhanden
|
||
wal_path = DATABASE_PATH + '-wal'
|
||
shm_path = DATABASE_PATH + '-shm'
|
||
|
||
if os.path.exists(wal_path):
|
||
zipf.write(wal_path, 'database/main.db-wal')
|
||
created_files.append('database/main.db-wal')
|
||
|
||
if os.path.exists(shm_path):
|
||
zipf.write(shm_path, 'database/main.db-shm')
|
||
created_files.append('database/main.db-shm')
|
||
|
||
except Exception as db_error:
|
||
admin_logger.warning(f"Fehler beim Hinzufügen der Datenbank: {str(db_error)}")
|
||
|
||
# 2. Konfigurationsdateien
|
||
try:
|
||
config_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'config')
|
||
if os.path.exists(config_dir):
|
||
for root, dirs, files in os.walk(config_dir):
|
||
for file in files:
|
||
if file.endswith(('.py', '.json', '.yaml', '.yml', '.toml')):
|
||
file_path = os.path.join(root, file)
|
||
arc_path = os.path.relpath(file_path, os.path.dirname(os.path.dirname(__file__)))
|
||
zipf.write(file_path, arc_path)
|
||
created_files.append(arc_path)
|
||
admin_logger.debug("✅ Konfigurationsdateien zur Sicherung hinzugefügt")
|
||
except Exception as config_error:
|
||
admin_logger.warning(f"Fehler beim Hinzufügen der Konfiguration: {str(config_error)}")
|
||
|
||
# 3. Wichtige User-Uploads (limitiert auf die letzten 1000 Dateien)
|
||
try:
|
||
uploads_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'uploads')
|
||
if os.path.exists(uploads_dir):
|
||
file_count = 0
|
||
max_files = 1000 # Limit für Performance
|
||
|
||
for root, dirs, files in os.walk(uploads_dir):
|
||
for file in files[:max_files - file_count]:
|
||
if file_count >= max_files:
|
||
break
|
||
|
||
file_path = os.path.join(root, file)
|
||
file_size = os.path.getsize(file_path)
|
||
|
||
# Nur Dateien unter 50MB hinzufügen
|
||
if file_size < 50 * 1024 * 1024:
|
||
arc_path = os.path.relpath(file_path, os.path.dirname(os.path.dirname(__file__)))
|
||
zipf.write(file_path, arc_path)
|
||
created_files.append(arc_path)
|
||
file_count += 1
|
||
|
||
if file_count >= max_files:
|
||
break
|
||
|
||
admin_logger.debug(f"✅ {file_count} Upload-Dateien zur Sicherung hinzugefügt")
|
||
except Exception as uploads_error:
|
||
admin_logger.warning(f"Fehler beim Hinzufügen der Uploads: {str(uploads_error)}")
|
||
|
||
# 4. System-Logs (nur die letzten 100 Log-Dateien)
|
||
try:
|
||
logs_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'logs')
|
||
if os.path.exists(logs_dir):
|
||
log_files = []
|
||
for root, dirs, files in os.walk(logs_dir):
|
||
for file in files:
|
||
if file.endswith(('.log', '.txt')):
|
||
file_path = os.path.join(root, file)
|
||
log_files.append((file_path, os.path.getmtime(file_path)))
|
||
|
||
# Sortiere nach Datum (neueste zuerst) und nimm nur die letzten 100
|
||
log_files.sort(key=lambda x: x[1], reverse=True)
|
||
for file_path, _ in log_files[:100]:
|
||
arc_path = os.path.relpath(file_path, os.path.dirname(os.path.dirname(__file__)))
|
||
zipf.write(file_path, arc_path)
|
||
created_files.append(arc_path)
|
||
|
||
admin_logger.debug(f"✅ {len(log_files[:100])} Log-Dateien zur Sicherung hinzugefügt")
|
||
except Exception as logs_error:
|
||
admin_logger.warning(f"Fehler beim Hinzufügen der Logs: {str(logs_error)}")
|
||
|
||
# Backup-Größe bestimmen
|
||
if os.path.exists(backup_path):
|
||
backup_size = os.path.getsize(backup_path)
|
||
|
||
admin_logger.info(f"✅ System-Backup erfolgreich erstellt: {backup_name} ({backup_size / 1024 / 1024:.2f} MB)")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': f'Backup erfolgreich erstellt: {backup_name}',
|
||
'backup_info': {
|
||
'filename': backup_name,
|
||
'size_bytes': backup_size,
|
||
'size_mb': round(backup_size / 1024 / 1024, 2),
|
||
'files_count': len(created_files),
|
||
'created_at': datetime.now().isoformat(),
|
||
'path': backup_path
|
||
}
|
||
})
|
||
|
||
except Exception as e:
|
||
admin_logger.error(f"❌ Fehler beim Erstellen des Backups: {str(e)}")
|
||
return jsonify({
|
||
'success': False,
|
||
'message': f'Fehler beim Erstellen des Backups: {str(e)}'
|
||
}), 500
|
||
|
||
@admin_api_blueprint.route('/database/optimize', methods=['POST'])
|
||
@admin_required
|
||
def optimize_database():
|
||
"""
|
||
Führt Datenbank-Optimierung durch.
|
||
|
||
Optimiert die SQLite-Datenbank durch VACUUM, ANALYZE und weitere
|
||
Wartungsoperationen für bessere Performance.
|
||
|
||
Returns:
|
||
JSON: Erfolgs-Status und Optimierungs-Statistiken
|
||
"""
|
||
try:
|
||
admin_logger.info(f"Datenbank-Optimierung angefordert von Admin {current_user.username}")
|
||
|
||
from config.settings import DATABASE_PATH
|
||
|
||
optimization_results = {
|
||
'vacuum_completed': False,
|
||
'analyze_completed': False,
|
||
'integrity_check': False,
|
||
'wal_checkpoint': False,
|
||
'size_before': 0,
|
||
'size_after': 0,
|
||
'space_saved': 0
|
||
}
|
||
|
||
# Datenbankgröße vor Optimierung
|
||
if os.path.exists(DATABASE_PATH):
|
||
optimization_results['size_before'] = os.path.getsize(DATABASE_PATH)
|
||
|
||
# Verbindung zur Datenbank herstellen
|
||
conn = sqlite3.connect(DATABASE_PATH, timeout=30.0)
|
||
cursor = conn.cursor()
|
||
|
||
try:
|
||
# 1. Integritätsprüfung
|
||
admin_logger.debug("🔍 Führe Integritätsprüfung durch...")
|
||
cursor.execute("PRAGMA integrity_check")
|
||
integrity_result = cursor.fetchone()
|
||
optimization_results['integrity_check'] = integrity_result[0] == 'ok'
|
||
|
||
if not optimization_results['integrity_check']:
|
||
admin_logger.warning(f"⚠️ Integritätsprüfung ergab: {integrity_result[0]}")
|
||
else:
|
||
admin_logger.debug("✅ Integritätsprüfung erfolgreich")
|
||
|
||
# 2. WAL-Checkpoint (falls WAL-Modus aktiv)
|
||
try:
|
||
admin_logger.debug("🔄 Führe WAL-Checkpoint durch...")
|
||
cursor.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||
optimization_results['wal_checkpoint'] = True
|
||
admin_logger.debug("✅ WAL-Checkpoint erfolgreich")
|
||
except Exception as wal_error:
|
||
admin_logger.debug(f"ℹ️ WAL-Checkpoint nicht möglich: {str(wal_error)}")
|
||
|
||
# 3. ANALYZE - Statistiken aktualisieren
|
||
admin_logger.debug("📊 Aktualisiere Datenbank-Statistiken...")
|
||
cursor.execute("ANALYZE")
|
||
optimization_results['analyze_completed'] = True
|
||
admin_logger.debug("✅ ANALYZE erfolgreich")
|
||
|
||
# 4. VACUUM - Datenbank komprimieren und reorganisieren
|
||
admin_logger.debug("🗜️ Komprimiere und reorganisiere Datenbank...")
|
||
cursor.execute("VACUUM")
|
||
optimization_results['vacuum_completed'] = True
|
||
admin_logger.debug("✅ VACUUM erfolgreich")
|
||
|
||
# 5. Performance-Optimierungen
|
||
try:
|
||
# Cache-Größe optimieren
|
||
cursor.execute("PRAGMA cache_size = 10000") # 10MB Cache
|
||
|
||
# Journal-Modus auf WAL setzen für bessere Concurrent-Performance
|
||
cursor.execute("PRAGMA journal_mode = WAL")
|
||
|
||
# Synchronous auf NORMAL für Balance zwischen Performance und Sicherheit
|
||
cursor.execute("PRAGMA synchronous = NORMAL")
|
||
|
||
# Page-Größe optimieren (falls noch nicht gesetzt)
|
||
cursor.execute("PRAGMA page_size = 4096")
|
||
|
||
admin_logger.debug("✅ Performance-Optimierungen angewendet")
|
||
except Exception as perf_error:
|
||
admin_logger.warning(f"⚠️ Performance-Optimierungen teilweise fehlgeschlagen: {str(perf_error)}")
|
||
|
||
finally:
|
||
cursor.close()
|
||
conn.close()
|
||
|
||
# Datenbankgröße nach Optimierung
|
||
if os.path.exists(DATABASE_PATH):
|
||
optimization_results['size_after'] = os.path.getsize(DATABASE_PATH)
|
||
optimization_results['space_saved'] = optimization_results['size_before'] - optimization_results['size_after']
|
||
|
||
# Ergebnisse loggen
|
||
space_saved_mb = optimization_results['space_saved'] / 1024 / 1024
|
||
admin_logger.info(f"✅ Datenbank-Optimierung abgeschlossen - {space_saved_mb:.2f} MB Speicher gespart")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': 'Datenbank erfolgreich optimiert',
|
||
'results': {
|
||
'vacuum_completed': optimization_results['vacuum_completed'],
|
||
'analyze_completed': optimization_results['analyze_completed'],
|
||
'integrity_check_passed': optimization_results['integrity_check'],
|
||
'wal_checkpoint_completed': optimization_results['wal_checkpoint'],
|
||
'size_before_mb': round(optimization_results['size_before'] / 1024 / 1024, 2),
|
||
'size_after_mb': round(optimization_results['size_after'] / 1024 / 1024, 2),
|
||
'space_saved_mb': round(space_saved_mb, 2),
|
||
'optimization_timestamp': datetime.now().isoformat()
|
||
}
|
||
})
|
||
|
||
except Exception as e:
|
||
admin_logger.error(f"❌ Fehler bei Datenbank-Optimierung: {str(e)}")
|
||
return jsonify({
|
||
'success': False,
|
||
'message': f'Fehler bei Datenbank-Optimierung: {str(e)}'
|
||
}), 500
|
||
|
||
@admin_api_blueprint.route('/cache/clear', methods=['POST'])
|
||
@admin_required
|
||
def clear_cache():
|
||
"""
|
||
Leert den System-Cache.
|
||
|
||
Entfernt alle temporären Dateien, Cache-Verzeichnisse und
|
||
Python-Bytecode um Speicher freizugeben und Performance zu verbessern.
|
||
|
||
Returns:
|
||
JSON: Erfolgs-Status und Lösch-Statistiken
|
||
"""
|
||
try:
|
||
admin_logger.info(f"Cache-Leerung angefordert von Admin {current_user.username}")
|
||
|
||
cleared_stats = {
|
||
'files_deleted': 0,
|
||
'dirs_deleted': 0,
|
||
'space_freed': 0,
|
||
'categories': {}
|
||
}
|
||
|
||
app_root = os.path.dirname(os.path.dirname(__file__))
|
||
|
||
# 1. Python-Bytecode-Cache leeren (__pycache__)
|
||
try:
|
||
pycache_count = 0
|
||
pycache_size = 0
|
||
|
||
for root, dirs, files in os.walk(app_root):
|
||
if '__pycache__' in root:
|
||
for file in files:
|
||
file_path = os.path.join(root, file)
|
||
try:
|
||
pycache_size += os.path.getsize(file_path)
|
||
os.remove(file_path)
|
||
pycache_count += 1
|
||
except Exception:
|
||
pass
|
||
|
||
# Versuche das __pycache__-Verzeichnis zu löschen
|
||
try:
|
||
os.rmdir(root)
|
||
cleared_stats['dirs_deleted'] += 1
|
||
except Exception:
|
||
pass
|
||
|
||
cleared_stats['categories']['python_bytecode'] = {
|
||
'files': pycache_count,
|
||
'size_mb': round(pycache_size / 1024 / 1024, 2)
|
||
}
|
||
cleared_stats['files_deleted'] += pycache_count
|
||
cleared_stats['space_freed'] += pycache_size
|
||
|
||
admin_logger.debug(f"✅ Python-Bytecode-Cache: {pycache_count} Dateien, {pycache_size / 1024 / 1024:.2f} MB")
|
||
|
||
except Exception as pycache_error:
|
||
admin_logger.warning(f"⚠️ Fehler beim Leeren des Python-Cache: {str(pycache_error)}")
|
||
|
||
# 2. Temporäre Dateien im uploads/temp Verzeichnis
|
||
try:
|
||
temp_count = 0
|
||
temp_size = 0
|
||
temp_dir = os.path.join(app_root, 'uploads', 'temp')
|
||
|
||
if os.path.exists(temp_dir):
|
||
for root, dirs, files in os.walk(temp_dir):
|
||
for file in files:
|
||
file_path = os.path.join(root, file)
|
||
try:
|
||
temp_size += os.path.getsize(file_path)
|
||
os.remove(file_path)
|
||
temp_count += 1
|
||
except Exception:
|
||
pass
|
||
|
||
cleared_stats['categories']['temp_uploads'] = {
|
||
'files': temp_count,
|
||
'size_mb': round(temp_size / 1024 / 1024, 2)
|
||
}
|
||
cleared_stats['files_deleted'] += temp_count
|
||
cleared_stats['space_freed'] += temp_size
|
||
|
||
admin_logger.debug(f"✅ Temporäre Upload-Dateien: {temp_count} Dateien, {temp_size / 1024 / 1024:.2f} MB")
|
||
|
||
except Exception as temp_error:
|
||
admin_logger.warning(f"⚠️ Fehler beim Leeren des Temp-Verzeichnisses: {str(temp_error)}")
|
||
|
||
# 3. System-Cache-Verzeichnisse (falls vorhanden)
|
||
try:
|
||
cache_count = 0
|
||
cache_size = 0
|
||
|
||
cache_dirs = [
|
||
os.path.join(app_root, 'static', 'cache'),
|
||
os.path.join(app_root, 'cache'),
|
||
os.path.join(app_root, '.cache')
|
||
]
|
||
|
||
for cache_dir in cache_dirs:
|
||
if os.path.exists(cache_dir):
|
||
for root, dirs, files in os.walk(cache_dir):
|
||
for file in files:
|
||
file_path = os.path.join(root, file)
|
||
try:
|
||
cache_size += os.path.getsize(file_path)
|
||
os.remove(file_path)
|
||
cache_count += 1
|
||
except Exception:
|
||
pass
|
||
|
||
cleared_stats['categories']['system_cache'] = {
|
||
'files': cache_count,
|
||
'size_mb': round(cache_size / 1024 / 1024, 2)
|
||
}
|
||
cleared_stats['files_deleted'] += cache_count
|
||
cleared_stats['space_freed'] += cache_size
|
||
|
||
admin_logger.debug(f"✅ System-Cache: {cache_count} Dateien, {cache_size / 1024 / 1024:.2f} MB")
|
||
|
||
except Exception as cache_error:
|
||
admin_logger.warning(f"⚠️ Fehler beim Leeren des System-Cache: {str(cache_error)}")
|
||
|
||
# 4. Alte Log-Dateien (älter als 30 Tage)
|
||
try:
|
||
logs_count = 0
|
||
logs_size = 0
|
||
logs_dir = os.path.join(app_root, 'logs')
|
||
cutoff_date = datetime.now().timestamp() - (30 * 24 * 60 * 60) # 30 Tage
|
||
|
||
if os.path.exists(logs_dir):
|
||
for root, dirs, files in os.walk(logs_dir):
|
||
for file in files:
|
||
if file.endswith(('.log', '.log.1', '.log.2', '.log.3')):
|
||
file_path = os.path.join(root, file)
|
||
try:
|
||
if os.path.getmtime(file_path) < cutoff_date:
|
||
logs_size += os.path.getsize(file_path)
|
||
os.remove(file_path)
|
||
logs_count += 1
|
||
except Exception:
|
||
pass
|
||
|
||
cleared_stats['categories']['old_logs'] = {
|
||
'files': logs_count,
|
||
'size_mb': round(logs_size / 1024 / 1024, 2)
|
||
}
|
||
cleared_stats['files_deleted'] += logs_count
|
||
cleared_stats['space_freed'] += logs_size
|
||
|
||
admin_logger.debug(f"✅ Alte Log-Dateien: {logs_count} Dateien, {logs_size / 1024 / 1024:.2f} MB")
|
||
|
||
except Exception as logs_error:
|
||
admin_logger.warning(f"⚠️ Fehler beim Leeren alter Log-Dateien: {str(logs_error)}")
|
||
|
||
# 5. Application-Level Cache leeren (falls Models-Cache existiert)
|
||
try:
|
||
from models import clear_model_cache
|
||
clear_model_cache()
|
||
admin_logger.debug("✅ Application-Level Cache geleert")
|
||
except (ImportError, AttributeError):
|
||
admin_logger.debug("ℹ️ Kein Application-Level Cache verfügbar")
|
||
|
||
# Ergebnisse zusammenfassen
|
||
total_space_mb = cleared_stats['space_freed'] / 1024 / 1024
|
||
admin_logger.info(f"✅ Cache-Leerung abgeschlossen: {cleared_stats['files_deleted']} Dateien, {total_space_mb:.2f} MB freigegeben")
|
||
|
||
return jsonify({
|
||
'success': True,
|
||
'message': f'Cache erfolgreich geleert - {total_space_mb:.2f} MB freigegeben',
|
||
'statistics': {
|
||
'total_files_deleted': cleared_stats['files_deleted'],
|
||
'total_dirs_deleted': cleared_stats['dirs_deleted'],
|
||
'total_space_freed_mb': round(total_space_mb, 2),
|
||
'categories': cleared_stats['categories'],
|
||
'cleanup_timestamp': datetime.now().isoformat()
|
||
}
|
||
})
|
||
|
||
except Exception as e:
|
||
admin_logger.error(f"❌ Fehler beim Leeren des Cache: {str(e)}")
|
||
return jsonify({
|
||
'success': False,
|
||
'message': f'Fehler beim Leeren des Cache: {str(e)}'
|
||
}), 500 |