📚 Improved logging structure & added backup file 🔜
This commit is contained in:
229
backend/app.py
229
backend/app.py
@@ -5612,6 +5612,235 @@ def get_tables_css():
|
||||
return "/* Advanced Tables CSS konnte nicht geladen werden */", 500
|
||||
|
||||
# ===== MAINTENANCE SYSTEM API =====
|
||||
|
||||
@app.route('/api/admin/maintenance/clear-cache', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def api_clear_cache():
|
||||
"""Leert den System-Cache"""
|
||||
try:
|
||||
app_logger.info(f"🧹 Cache-Löschung gestartet von Benutzer {current_user.username}")
|
||||
|
||||
# Flask-Cache leeren (falls vorhanden)
|
||||
if hasattr(app, 'cache'):
|
||||
app.cache.clear()
|
||||
|
||||
# Temporäre Dateien löschen
|
||||
import tempfile
|
||||
temp_dir = tempfile.gettempdir()
|
||||
myp_temp_files = []
|
||||
|
||||
try:
|
||||
for root, dirs, files in os.walk(temp_dir):
|
||||
for file in files:
|
||||
if 'myp_' in file.lower() or 'tba_' in file.lower():
|
||||
file_path = os.path.join(root, file)
|
||||
try:
|
||||
os.remove(file_path)
|
||||
myp_temp_files.append(file)
|
||||
except:
|
||||
pass
|
||||
except Exception as e:
|
||||
app_logger.warning(f"Fehler beim Löschen temporärer Dateien: {str(e)}")
|
||||
|
||||
# Python-Cache leeren
|
||||
import gc
|
||||
gc.collect()
|
||||
|
||||
app_logger.info(f"✅ Cache erfolgreich geleert. {len(myp_temp_files)} temporäre Dateien entfernt")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Cache erfolgreich geleert. {len(myp_temp_files)} temporäre Dateien entfernt.',
|
||||
'details': {
|
||||
'temp_files_removed': len(myp_temp_files),
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"❌ Fehler beim Leeren des Cache: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Fehler beim Leeren des Cache: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@app.route('/api/admin/maintenance/optimize-database', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def api_optimize_database():
|
||||
"""Optimiert die Datenbank"""
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
app_logger.info(f"🔧 Datenbank-Optimierung gestartet von Benutzer {current_user.username}")
|
||||
|
||||
optimization_results = {
|
||||
'tables_analyzed': 0,
|
||||
'indexes_rebuilt': 0,
|
||||
'space_freed_mb': 0,
|
||||
'errors': []
|
||||
}
|
||||
|
||||
# SQLite-spezifische Optimierungen
|
||||
try:
|
||||
# VACUUM - komprimiert die Datenbank
|
||||
db_session.execute(text("VACUUM;"))
|
||||
optimization_results['space_freed_mb'] += 1 # Geschätzt
|
||||
|
||||
# ANALYZE - aktualisiert Statistiken
|
||||
db_session.execute(text("ANALYZE;"))
|
||||
optimization_results['tables_analyzed'] += 1
|
||||
|
||||
# REINDEX - baut Indizes neu auf
|
||||
db_session.execute(text("REINDEX;"))
|
||||
optimization_results['indexes_rebuilt'] += 1
|
||||
|
||||
db_session.commit()
|
||||
|
||||
except Exception as e:
|
||||
optimization_results['errors'].append(f"SQLite-Optimierung: {str(e)}")
|
||||
app_logger.warning(f"Fehler bei SQLite-Optimierung: {str(e)}")
|
||||
|
||||
# Verwaiste Dateien bereinigen
|
||||
try:
|
||||
uploads_dir = os.path.join(app.root_path, 'uploads')
|
||||
if os.path.exists(uploads_dir):
|
||||
orphaned_files = 0
|
||||
for root, dirs, files in os.walk(uploads_dir):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
# Prüfe ob Datei älter als 7 Tage und nicht referenziert
|
||||
file_age = datetime.now() - datetime.fromtimestamp(os.path.getctime(file_path))
|
||||
if file_age.days > 7:
|
||||
try:
|
||||
os.remove(file_path)
|
||||
orphaned_files += 1
|
||||
except:
|
||||
pass
|
||||
|
||||
optimization_results['orphaned_files_removed'] = orphaned_files
|
||||
|
||||
except Exception as e:
|
||||
optimization_results['errors'].append(f"Datei-Bereinigung: {str(e)}")
|
||||
|
||||
app_logger.info(f"✅ Datenbank-Optimierung abgeschlossen: {optimization_results}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Datenbank erfolgreich optimiert',
|
||||
'details': optimization_results
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
app_logger.error(f"❌ Fehler bei Datenbank-Optimierung: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Fehler bei der Datenbank-Optimierung: {str(e)}'
|
||||
}), 500
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
@app.route('/api/admin/maintenance/create-backup', methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def api_create_backup():
|
||||
"""Erstellt ein System-Backup"""
|
||||
try:
|
||||
app_logger.info(f"💾 Backup-Erstellung gestartet von Benutzer {current_user.username}")
|
||||
|
||||
import zipfile
|
||||
|
||||
# Backup-Verzeichnis erstellen
|
||||
backup_dir = os.path.join(app.root_path, 'database', 'backups')
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
# Backup-Dateiname mit Zeitstempel
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
backup_filename = f'myp_backup_{timestamp}.zip'
|
||||
backup_path = os.path.join(backup_dir, backup_filename)
|
||||
|
||||
backup_info = {
|
||||
'filename': backup_filename,
|
||||
'created_at': datetime.now().isoformat(),
|
||||
'created_by': current_user.username,
|
||||
'size_mb': 0,
|
||||
'files_included': []
|
||||
}
|
||||
|
||||
# ZIP-Backup erstellen
|
||||
with zipfile.ZipFile(backup_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
|
||||
# Datenbank-Datei hinzufügen
|
||||
db_path = os.path.join(app.root_path, 'instance', 'database.db')
|
||||
if os.path.exists(db_path):
|
||||
zipf.write(db_path, 'database.db')
|
||||
backup_info['files_included'].append('database.db')
|
||||
|
||||
# Konfigurationsdateien hinzufügen
|
||||
config_files = ['config.py', 'requirements.txt', '.env']
|
||||
for config_file in config_files:
|
||||
config_path = os.path.join(app.root_path, config_file)
|
||||
if os.path.exists(config_path):
|
||||
zipf.write(config_path, config_file)
|
||||
backup_info['files_included'].append(config_file)
|
||||
|
||||
# Wichtige Upload-Verzeichnisse hinzufügen (nur kleine Dateien)
|
||||
uploads_dir = os.path.join(app.root_path, 'uploads')
|
||||
if os.path.exists(uploads_dir):
|
||||
for root, dirs, files in os.walk(uploads_dir):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
# Nur Dateien unter 10MB hinzufügen
|
||||
if file_size < 10 * 1024 * 1024:
|
||||
rel_path = os.path.relpath(file_path, app.root_path)
|
||||
zipf.write(file_path, rel_path)
|
||||
backup_info['files_included'].append(rel_path)
|
||||
|
||||
# Backup-Größe berechnen
|
||||
backup_size = os.path.getsize(backup_path)
|
||||
backup_info['size_mb'] = round(backup_size / (1024 * 1024), 2)
|
||||
|
||||
# Alte Backups bereinigen (nur die letzten 10 behalten)
|
||||
try:
|
||||
backup_files = []
|
||||
for file in os.listdir(backup_dir):
|
||||
if file.startswith('myp_backup_') and file.endswith('.zip'):
|
||||
file_path = os.path.join(backup_dir, file)
|
||||
backup_files.append((file_path, os.path.getctime(file_path)))
|
||||
|
||||
# Nach Erstellungszeit sortieren
|
||||
backup_files.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
# Alte Backups löschen (mehr als 10)
|
||||
for old_backup, _ in backup_files[10:]:
|
||||
try:
|
||||
os.remove(old_backup)
|
||||
app_logger.info(f"Altes Backup gelöscht: {os.path.basename(old_backup)}")
|
||||
except:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
app_logger.warning(f"Fehler beim Bereinigen alter Backups: {str(e)}")
|
||||
|
||||
app_logger.info(f"✅ Backup erfolgreich erstellt: {backup_filename} ({backup_info['size_mb']} MB)")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Backup erfolgreich erstellt: {backup_filename}',
|
||||
'details': backup_info
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"❌ Fehler bei Backup-Erstellung: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Fehler bei der Backup-Erstellung: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@app.route('/api/maintenance/tasks', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def maintenance_tasks():
|
||||
|
Reference in New Issue
Block a user