🚀📝 Improved documentation on log functionality with new file 'LOGS_FUNCTIONALITY_FIX.md'
This commit is contained in:
216
backend/app.py
216
backend/app.py
@@ -6637,6 +6637,220 @@ def api_logs():
|
||||
'error': f'Fehler beim Abrufen der Log-Daten: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@app.route('/api/admin/logs', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def api_admin_logs():
|
||||
"""
|
||||
Admin-spezifischer API-Endpunkt für Log-Daten-Abruf
|
||||
Erweiterte Version von /api/logs mit zusätzlichen Admin-Funktionen
|
||||
"""
|
||||
try:
|
||||
# Parameter aus Query-String extrahieren
|
||||
level = request.args.get('level', '').upper()
|
||||
if level == 'ALL':
|
||||
level = ''
|
||||
limit = min(int(request.args.get('limit', 100)), 1000)
|
||||
offset = int(request.args.get('offset', 0))
|
||||
search = request.args.get('search', '').strip()
|
||||
component = request.args.get('component', '')
|
||||
|
||||
# Verbesserter Log-Parser mit mehr Kategorien
|
||||
import os
|
||||
import glob
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
logs_dir = os.path.join(os.path.dirname(__file__), 'logs')
|
||||
log_entries = []
|
||||
|
||||
if os.path.exists(logs_dir):
|
||||
# Alle .log Dateien aus allen Unterverzeichnissen finden
|
||||
log_patterns = [
|
||||
os.path.join(logs_dir, '*.log'),
|
||||
os.path.join(logs_dir, '*', '*.log'),
|
||||
os.path.join(logs_dir, '*', '*', '*.log')
|
||||
]
|
||||
|
||||
all_log_files = []
|
||||
for pattern in log_patterns:
|
||||
all_log_files.extend(glob.glob(pattern))
|
||||
|
||||
# Nach Modifikationszeit sortieren (neueste zuerst)
|
||||
all_log_files.sort(key=os.path.getmtime, reverse=True)
|
||||
|
||||
# Maximal 10 Dateien verarbeiten für Performance
|
||||
for log_file in all_log_files[:10]:
|
||||
try:
|
||||
# Kategorie aus Dateipfad ableiten
|
||||
rel_path = os.path.relpath(log_file, logs_dir)
|
||||
file_component = os.path.dirname(rel_path) if os.path.dirname(rel_path) != '.' else 'system'
|
||||
|
||||
# Component-Filter anwenden
|
||||
if component and component.lower() != file_component.lower():
|
||||
continue
|
||||
|
||||
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
lines = f.readlines()[-500:] # Nur die letzten 500 Zeilen pro Datei
|
||||
|
||||
# Zeilen verarbeiten (neueste zuerst)
|
||||
for line in reversed(lines):
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Verschiedene Log-Formate unterstützen
|
||||
log_entry = None
|
||||
|
||||
# Format 1: 2025-06-01 00:34:08 - logger_name - [LEVEL] MESSAGE
|
||||
if ' - ' in line and '[' in line and ']' in line:
|
||||
try:
|
||||
parts = line.split(' - ', 3)
|
||||
if len(parts) >= 4:
|
||||
timestamp_str = parts[0]
|
||||
logger_name = parts[1]
|
||||
level_part = parts[2]
|
||||
message = parts[3]
|
||||
|
||||
# Level extrahieren
|
||||
if '[' in level_part and ']' in level_part:
|
||||
log_level = level_part.split('[')[1].split(']')[0]
|
||||
else:
|
||||
log_level = 'INFO'
|
||||
|
||||
# Timestamp parsen
|
||||
log_timestamp = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
log_entry = {
|
||||
'timestamp': log_timestamp.isoformat(),
|
||||
'level': log_level.upper(),
|
||||
'component': file_component,
|
||||
'logger': logger_name,
|
||||
'message': message.strip(),
|
||||
'source_file': os.path.basename(log_file)
|
||||
}
|
||||
except:
|
||||
pass
|
||||
|
||||
# Format 2: [TIMESTAMP] LEVEL: MESSAGE
|
||||
elif line.startswith('[') and ']' in line and ':' in line:
|
||||
try:
|
||||
bracket_end = line.find(']')
|
||||
timestamp_str = line[1:bracket_end]
|
||||
rest = line[bracket_end+1:].strip()
|
||||
|
||||
if ':' in rest:
|
||||
level_msg = rest.split(':', 1)
|
||||
log_level = level_msg[0].strip()
|
||||
message = level_msg[1].strip()
|
||||
|
||||
# Timestamp parsen (verschiedene Formate probieren)
|
||||
log_timestamp = None
|
||||
for fmt in ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f', '%d.%m.%Y %H:%M:%S']:
|
||||
try:
|
||||
log_timestamp = datetime.strptime(timestamp_str, fmt)
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
if log_timestamp:
|
||||
log_entry = {
|
||||
'timestamp': log_timestamp.isoformat(),
|
||||
'level': log_level.upper(),
|
||||
'component': file_component,
|
||||
'logger': file_component,
|
||||
'message': message,
|
||||
'source_file': os.path.basename(log_file)
|
||||
}
|
||||
except:
|
||||
pass
|
||||
|
||||
# Format 3: Einfaches Format ohne spezielle Struktur
|
||||
else:
|
||||
# Als INFO-Level behandeln mit aktuellem Timestamp
|
||||
log_entry = {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'level': 'INFO',
|
||||
'component': file_component,
|
||||
'logger': file_component,
|
||||
'message': line,
|
||||
'source_file': os.path.basename(log_file)
|
||||
}
|
||||
|
||||
# Entry hinzufügen wenn erfolgreich geparst
|
||||
if log_entry:
|
||||
# Filter anwenden
|
||||
if level and log_entry['level'] != level:
|
||||
continue
|
||||
|
||||
if search and search.lower() not in log_entry['message'].lower():
|
||||
continue
|
||||
|
||||
log_entries.append(log_entry)
|
||||
|
||||
# Limit pro Datei (Performance)
|
||||
if len([e for e in log_entries if e['source_file'] == os.path.basename(log_file)]) >= 50:
|
||||
break
|
||||
|
||||
except Exception as file_error:
|
||||
app_logger.warning(f"Fehler beim Verarbeiten der Log-Datei {log_file}: {str(file_error)}")
|
||||
continue
|
||||
|
||||
# Eindeutige Entries und Sortierung
|
||||
unique_entries = []
|
||||
seen_messages = set()
|
||||
|
||||
for entry in log_entries:
|
||||
# Duplikate vermeiden basierend auf Timestamp + Message
|
||||
key = f"{entry['timestamp']}_{entry['message'][:100]}"
|
||||
if key not in seen_messages:
|
||||
seen_messages.add(key)
|
||||
unique_entries.append(entry)
|
||||
|
||||
# Nach Timestamp sortieren (neueste zuerst)
|
||||
unique_entries.sort(key=lambda x: x['timestamp'], reverse=True)
|
||||
|
||||
# Paginierung anwenden
|
||||
total_count = len(unique_entries)
|
||||
paginated_entries = unique_entries[offset:offset + limit]
|
||||
|
||||
# Statistiken sammeln
|
||||
level_stats = {}
|
||||
component_stats = {}
|
||||
for entry in unique_entries:
|
||||
level_stats[entry['level']] = level_stats.get(entry['level'], 0) + 1
|
||||
component_stats[entry['component']] = component_stats.get(entry['component'], 0) + 1
|
||||
|
||||
app_logger.debug(f"📋 Log-API: {total_count} Einträge gefunden, {len(paginated_entries)} zurückgegeben")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'logs': paginated_entries,
|
||||
'pagination': {
|
||||
'total': total_count,
|
||||
'limit': limit,
|
||||
'offset': offset,
|
||||
'has_more': offset + limit < total_count
|
||||
},
|
||||
'filters': {
|
||||
'level': level or None,
|
||||
'search': search or None,
|
||||
'component': component or None
|
||||
},
|
||||
'statistics': {
|
||||
'total_entries': total_count,
|
||||
'level_distribution': level_stats,
|
||||
'component_distribution': component_stats
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Admin-Log-Daten: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'Fehler beim Abrufen der Log-Daten: {str(e)}',
|
||||
'logs': []
|
||||
}), 500
|
||||
|
||||
@app.route('/api/admin/logs/export', methods=['GET'])
|
||||
@login_required
|
||||
@admin_required
|
||||
@@ -7766,7 +7980,7 @@ if __name__ == "__main__":
|
||||
port=80,
|
||||
debug=False,
|
||||
threaded=True
|
||||
)
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
app_logger.info("🔄 Tastatur-Unterbrechung empfangen - beende Anwendung...")
|
||||
if shutdown_manager:
|
||||
|
Reference in New Issue
Block a user