264 lines
11 KiB
Python
264 lines
11 KiB
Python
"""
|
|
Wartungs-API-Endpunkte für das MYP-System
|
|
Stellt API-Routen für Cache-Löschung, Datenbank-Optimierung und Backup-Erstellung bereit
|
|
"""
|
|
|
|
import os
|
|
import tempfile
|
|
import zipfile
|
|
import gc
|
|
from datetime import datetime, timedelta
|
|
from flask import jsonify, current_app
|
|
from flask_login import login_required, current_user
|
|
from sqlalchemy import text
|
|
|
|
# Import der notwendigen Module aus der Hauptanwendung
|
|
try:
|
|
from app import app, app_logger, admin_required, get_db_session
|
|
except ImportError:
|
|
# Fallback für den Fall, dass die Imports nicht verfügbar sind
|
|
app = None
|
|
app_logger = None
|
|
admin_required = None
|
|
get_db_session = None
|
|
|
|
def register_maintenance_routes(app_instance, logger, admin_decorator, db_session_func):
|
|
"""
|
|
Registriert die Wartungs-API-Routen bei der Flask-App
|
|
|
|
Args:
|
|
app_instance: Flask-App-Instanz
|
|
logger: Logger-Instanz
|
|
admin_decorator: Admin-Required-Decorator
|
|
db_session_func: Funktion zum Abrufen einer DB-Session
|
|
"""
|
|
|
|
@app_instance.route('/api/admin/maintenance/clear-cache', methods=['POST'])
|
|
@login_required
|
|
@admin_decorator
|
|
def api_clear_cache():
|
|
"""Leert den System-Cache"""
|
|
try:
|
|
logger.info(f"🧹 Cache-Löschung gestartet von Benutzer {current_user.username}")
|
|
|
|
# Flask-Cache leeren (falls vorhanden)
|
|
if hasattr(app_instance, 'cache'):
|
|
app_instance.cache.clear()
|
|
|
|
# Temporäre Dateien löschen
|
|
temp_dir = tempfile.gettempdir()
|
|
myp_temp_files = []
|
|
|
|
try:
|
|
for root, dirs, files in os.walk(temp_dir):
|
|
for file in files:
|
|
if 'myp_' in file.lower() or 'tba_' in file.lower():
|
|
file_path = os.path.join(root, file)
|
|
try:
|
|
os.remove(file_path)
|
|
myp_temp_files.append(file)
|
|
except:
|
|
pass
|
|
except Exception as e:
|
|
logger.warning(f"Fehler beim Löschen temporärer Dateien: {str(e)}")
|
|
|
|
# Python-Cache leeren
|
|
gc.collect()
|
|
|
|
logger.info(f"✅ Cache erfolgreich geleert. {len(myp_temp_files)} temporäre Dateien entfernt")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Cache erfolgreich geleert. {len(myp_temp_files)} temporäre Dateien entfernt.',
|
|
'details': {
|
|
'temp_files_removed': len(myp_temp_files),
|
|
'timestamp': datetime.now().isoformat()
|
|
}
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Fehler beim Leeren des Cache: {str(e)}")
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Fehler beim Leeren des Cache: {str(e)}'
|
|
}), 500
|
|
|
|
@app_instance.route('/api/admin/maintenance/optimize-database', methods=['POST'])
|
|
@login_required
|
|
@admin_decorator
|
|
def api_optimize_database():
|
|
"""Optimiert die Datenbank"""
|
|
db_session = db_session_func()
|
|
|
|
try:
|
|
logger.info(f"🔧 Datenbank-Optimierung gestartet von Benutzer {current_user.username}")
|
|
|
|
optimization_results = {
|
|
'tables_analyzed': 0,
|
|
'indexes_rebuilt': 0,
|
|
'space_freed_mb': 0,
|
|
'errors': []
|
|
}
|
|
|
|
# SQLite-spezifische Optimierungen
|
|
try:
|
|
# VACUUM - komprimiert die Datenbank
|
|
db_session.execute(text("VACUUM;"))
|
|
optimization_results['space_freed_mb'] += 1 # Geschätzt
|
|
|
|
# ANALYZE - aktualisiert Statistiken
|
|
db_session.execute(text("ANALYZE;"))
|
|
optimization_results['tables_analyzed'] += 1
|
|
|
|
# REINDEX - baut Indizes neu auf
|
|
db_session.execute(text("REINDEX;"))
|
|
optimization_results['indexes_rebuilt'] += 1
|
|
|
|
db_session.commit()
|
|
|
|
except Exception as e:
|
|
optimization_results['errors'].append(f"SQLite-Optimierung: {str(e)}")
|
|
logger.warning(f"Fehler bei SQLite-Optimierung: {str(e)}")
|
|
|
|
# Verwaiste Dateien bereinigen
|
|
try:
|
|
uploads_dir = os.path.join(app_instance.root_path, 'uploads')
|
|
if os.path.exists(uploads_dir):
|
|
orphaned_files = 0
|
|
for root, dirs, files in os.walk(uploads_dir):
|
|
for file in files:
|
|
file_path = os.path.join(root, file)
|
|
# Prüfe ob Datei älter als 7 Tage und nicht referenziert
|
|
file_age = datetime.now() - datetime.fromtimestamp(os.path.getctime(file_path))
|
|
if file_age.days > 7:
|
|
try:
|
|
os.remove(file_path)
|
|
orphaned_files += 1
|
|
except:
|
|
pass
|
|
|
|
optimization_results['orphaned_files_removed'] = orphaned_files
|
|
|
|
except Exception as e:
|
|
optimization_results['errors'].append(f"Datei-Bereinigung: {str(e)}")
|
|
|
|
logger.info(f"✅ Datenbank-Optimierung abgeschlossen: {optimization_results}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'Datenbank erfolgreich optimiert',
|
|
'details': optimization_results
|
|
})
|
|
|
|
except Exception as e:
|
|
db_session.rollback()
|
|
logger.error(f"❌ Fehler bei Datenbank-Optimierung: {str(e)}")
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Fehler bei der Datenbank-Optimierung: {str(e)}'
|
|
}), 500
|
|
finally:
|
|
db_session.close()
|
|
|
|
@app_instance.route('/api/admin/maintenance/create-backup', methods=['POST'])
|
|
@login_required
|
|
@admin_decorator
|
|
def api_create_backup():
|
|
"""Erstellt ein System-Backup"""
|
|
try:
|
|
logger.info(f"💾 Backup-Erstellung gestartet von Benutzer {current_user.username}")
|
|
|
|
# Backup-Verzeichnis erstellen
|
|
backup_dir = os.path.join(app_instance.root_path, 'database', 'backups')
|
|
os.makedirs(backup_dir, exist_ok=True)
|
|
|
|
# Backup-Dateiname mit Zeitstempel
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
backup_filename = f'myp_backup_{timestamp}.zip'
|
|
backup_path = os.path.join(backup_dir, backup_filename)
|
|
|
|
backup_info = {
|
|
'filename': backup_filename,
|
|
'created_at': datetime.now().isoformat(),
|
|
'created_by': current_user.username,
|
|
'size_mb': 0,
|
|
'files_included': []
|
|
}
|
|
|
|
# ZIP-Backup erstellen
|
|
with zipfile.ZipFile(backup_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
|
|
|
# Datenbank-Datei hinzufügen
|
|
db_path = os.path.join(app_instance.root_path, 'instance', 'database.db')
|
|
if os.path.exists(db_path):
|
|
zipf.write(db_path, 'database.db')
|
|
backup_info['files_included'].append('database.db')
|
|
|
|
# Konfigurationsdateien hinzufügen
|
|
config_files = ['config.py', 'requirements.txt', '.env']
|
|
for config_file in config_files:
|
|
config_path = os.path.join(app_instance.root_path, config_file)
|
|
if os.path.exists(config_path):
|
|
zipf.write(config_path, config_file)
|
|
backup_info['files_included'].append(config_file)
|
|
|
|
# Wichtige Upload-Verzeichnisse hinzufügen (nur kleine Dateien)
|
|
uploads_dir = os.path.join(app_instance.root_path, 'uploads')
|
|
if os.path.exists(uploads_dir):
|
|
for root, dirs, files in os.walk(uploads_dir):
|
|
for file in files:
|
|
file_path = os.path.join(root, file)
|
|
file_size = os.path.getsize(file_path)
|
|
|
|
# Nur Dateien unter 10MB hinzufügen
|
|
if file_size < 10 * 1024 * 1024:
|
|
rel_path = os.path.relpath(file_path, app_instance.root_path)
|
|
zipf.write(file_path, rel_path)
|
|
backup_info['files_included'].append(rel_path)
|
|
|
|
# Backup-Größe berechnen
|
|
backup_size = os.path.getsize(backup_path)
|
|
backup_info['size_mb'] = round(backup_size / (1024 * 1024), 2)
|
|
|
|
# Alte Backups bereinigen (nur die letzten 10 behalten)
|
|
try:
|
|
backup_files = []
|
|
for file in os.listdir(backup_dir):
|
|
if file.startswith('myp_backup_') and file.endswith('.zip'):
|
|
file_path = os.path.join(backup_dir, file)
|
|
backup_files.append((file_path, os.path.getctime(file_path)))
|
|
|
|
# Nach Erstellungszeit sortieren
|
|
backup_files.sort(key=lambda x: x[1], reverse=True)
|
|
|
|
# Alte Backups löschen (mehr als 10)
|
|
for old_backup, _ in backup_files[10:]:
|
|
try:
|
|
os.remove(old_backup)
|
|
logger.info(f"Altes Backup gelöscht: {os.path.basename(old_backup)}")
|
|
except:
|
|
pass
|
|
|
|
except Exception as e:
|
|
logger.warning(f"Fehler beim Bereinigen alter Backups: {str(e)}")
|
|
|
|
logger.info(f"✅ Backup erfolgreich erstellt: {backup_filename} ({backup_info['size_mb']} MB)")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Backup erfolgreich erstellt: {backup_filename}',
|
|
'details': backup_info
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Fehler bei Backup-Erstellung: {str(e)}")
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Fehler bei der Backup-Erstellung: {str(e)}'
|
|
}), 500
|
|
|
|
logger.info("✅ Wartungs-API-Endpunkte erfolgreich registriert")
|
|
|
|
# Automatische Registrierung, falls die Module verfügbar sind
|
|
if app and app_logger and admin_required and get_db_session:
|
|
register_maintenance_routes(app, app_logger, admin_required, get_db_session) |