197 lines
6.9 KiB
Python
197 lines
6.9 KiB
Python
"""
|
|
Performance Tracker Utility
|
|
Messung der Ausführungszeit von Funktionen für Performance-Monitoring
|
|
"""
|
|
|
|
import time
|
|
import functools
|
|
from typing import Callable, Any, Optional
|
|
from utils.logging_config import get_logger
|
|
|
|
# Standard-Logger für Performance-Tracking
|
|
performance_logger = get_logger("performance")
|
|
|
|
def measure_execution_time(logger: Optional[Any] = None, task_name: str = "Task",
|
|
log_level: str = "INFO", threshold_ms: float = 100.0) -> Callable:
|
|
"""
|
|
Decorator zur Messung der Ausführungszeit von Funktionen
|
|
|
|
Args:
|
|
logger: Logger-Instanz (optional, verwendet performance_logger als Standard)
|
|
task_name: Name der Aufgabe für das Logging
|
|
log_level: Log-Level (DEBUG, INFO, WARNING, ERROR)
|
|
threshold_ms: Schwellenwert in Millisekunden ab dem geloggt wird
|
|
|
|
Returns:
|
|
Decorator-Funktion
|
|
"""
|
|
def decorator(func: Callable) -> Callable:
|
|
@functools.wraps(func)
|
|
def wrapper(*args, **kwargs) -> Any:
|
|
# Logger bestimmen
|
|
log = logger if logger else performance_logger
|
|
|
|
# Startzeit messen
|
|
start_time = time.perf_counter()
|
|
|
|
try:
|
|
# Funktion ausführen
|
|
result = func(*args, **kwargs)
|
|
|
|
# Endzeit messen
|
|
end_time = time.perf_counter()
|
|
execution_time_ms = (end_time - start_time) * 1000
|
|
|
|
# Nur loggen wenn über Schwellenwert
|
|
if execution_time_ms >= threshold_ms:
|
|
log_message = f"⏱️ {task_name} - Ausführungszeit: {execution_time_ms:.2f}ms"
|
|
|
|
if log_level.upper() == "DEBUG":
|
|
log.debug(log_message)
|
|
elif log_level.upper() == "INFO":
|
|
log.info(log_message)
|
|
elif log_level.upper() == "WARNING":
|
|
log.warning(log_message)
|
|
elif log_level.upper() == "ERROR":
|
|
log.error(log_message)
|
|
else:
|
|
log.info(log_message)
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
# Auch bei Fehlern die Zeit messen
|
|
end_time = time.perf_counter()
|
|
execution_time_ms = (end_time - start_time) * 1000
|
|
|
|
error_message = f"❌ {task_name} - Fehler nach {execution_time_ms:.2f}ms: {str(e)}"
|
|
log.error(error_message)
|
|
|
|
# Exception weiterwerfen
|
|
raise
|
|
|
|
return wrapper
|
|
return decorator
|
|
|
|
def measure_time_sync(func: Callable, task_name: str = "Function",
|
|
logger: Optional[Any] = None) -> tuple[Any, float]:
|
|
"""
|
|
Synchrone Zeitmessung für einzelne Funktionsaufrufe
|
|
|
|
Args:
|
|
func: Auszuführende Funktion
|
|
task_name: Name für das Logging
|
|
logger: Logger-Instanz (optional)
|
|
|
|
Returns:
|
|
Tuple aus (Ergebnis, Ausführungszeit_in_ms)
|
|
"""
|
|
log = logger if logger else performance_logger
|
|
|
|
start_time = time.perf_counter()
|
|
|
|
try:
|
|
result = func()
|
|
end_time = time.perf_counter()
|
|
execution_time_ms = (end_time - start_time) * 1000
|
|
|
|
log.info(f"⏱️ {task_name} - Ausführungszeit: {execution_time_ms:.2f}ms")
|
|
|
|
return result, execution_time_ms
|
|
|
|
except Exception as e:
|
|
end_time = time.perf_counter()
|
|
execution_time_ms = (end_time - start_time) * 1000
|
|
|
|
log.error(f"❌ {task_name} - Fehler nach {execution_time_ms:.2f}ms: {str(e)}")
|
|
raise
|
|
|
|
class PerformanceTracker:
|
|
"""
|
|
Klasse für erweiterte Performance-Verfolgung
|
|
"""
|
|
|
|
def __init__(self, name: str, logger: Optional[Any] = None):
|
|
self.name = name
|
|
self.logger = logger if logger else performance_logger
|
|
self.start_time = None
|
|
self.end_time = None
|
|
self.checkpoints = []
|
|
|
|
def start(self):
|
|
"""Startet die Zeitmessung"""
|
|
self.start_time = time.perf_counter()
|
|
self.checkpoints = []
|
|
self.logger.debug(f"📊 Performance-Tracking gestartet für: {self.name}")
|
|
|
|
def checkpoint(self, name: str):
|
|
"""Fügt einen Checkpoint hinzu"""
|
|
if self.start_time is None:
|
|
self.logger.warning(f"⚠️ Checkpoint '{name}' ohne gestartete Messung")
|
|
return
|
|
|
|
current_time = time.perf_counter()
|
|
elapsed_ms = (current_time - self.start_time) * 1000
|
|
|
|
self.checkpoints.append({
|
|
'name': name,
|
|
'time': current_time,
|
|
'elapsed_ms': elapsed_ms
|
|
})
|
|
|
|
self.logger.debug(f"📍 Checkpoint '{name}': {elapsed_ms:.2f}ms")
|
|
|
|
def stop(self) -> float:
|
|
"""Stoppt die Zeitmessung und gibt die Gesamtzeit zurück"""
|
|
if self.start_time is None:
|
|
self.logger.warning(f"⚠️ Performance-Tracking wurde nicht gestartet für: {self.name}")
|
|
return 0.0
|
|
|
|
self.end_time = time.perf_counter()
|
|
total_time_ms = (self.end_time - self.start_time) * 1000
|
|
|
|
# Zusammenfassung loggen
|
|
summary = f"🏁 {self.name} - Gesamtzeit: {total_time_ms:.2f}ms"
|
|
if self.checkpoints:
|
|
summary += f" ({len(self.checkpoints)} Checkpoints)"
|
|
|
|
self.logger.info(summary)
|
|
|
|
# Detaillierte Checkpoint-Info bei DEBUG-Level
|
|
if self.checkpoints and self.logger.isEnabledFor(10): # DEBUG = 10
|
|
for i, checkpoint in enumerate(self.checkpoints):
|
|
if i == 0:
|
|
duration = checkpoint['elapsed_ms']
|
|
else:
|
|
duration = checkpoint['elapsed_ms'] - self.checkpoints[i-1]['elapsed_ms']
|
|
self.logger.debug(f" 📍 {checkpoint['name']}: +{duration:.2f}ms (total: {checkpoint['elapsed_ms']:.2f}ms)")
|
|
|
|
return total_time_ms
|
|
|
|
def __enter__(self):
|
|
"""Context Manager - Start"""
|
|
self.start()
|
|
return self
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
"""Context Manager - Stop"""
|
|
self.stop()
|
|
|
|
# Beispiel-Verwendung:
|
|
if __name__ == "__main__":
|
|
# Decorator-Verwendung
|
|
@measure_execution_time(task_name="Test-Funktion", threshold_ms=0.1)
|
|
def test_function():
|
|
time.sleep(0.1)
|
|
return "Fertig"
|
|
|
|
# Context Manager-Verwendung
|
|
with PerformanceTracker("Test-Performance") as tracker:
|
|
time.sleep(0.05)
|
|
tracker.checkpoint("Mitte")
|
|
time.sleep(0.05)
|
|
tracker.checkpoint("Ende")
|
|
|
|
# Synchrone Messung
|
|
result, exec_time = measure_time_sync(test_function, "Direkte Messung")
|
|
print(f"Ergebnis: {result}, Zeit: {exec_time:.2f}ms") |