📝 "Refactor authentication flow

This commit is contained in:
Till Tomczak 2025-06-02 14:05:37 +02:00
parent b5fca69a0f
commit 7460ce3e12
3 changed files with 1539 additions and 694 deletions

View File

@ -36,7 +36,7 @@ else:
get_windows_thread_manager = None get_windows_thread_manager = None
# Lokale Imports # Lokale Imports
from models import init_database, create_initial_admin, User, Printer, Job, Stats, SystemLog, get_db_session, GuestRequest, UserPermission, Notification, JobOrder, Base, get_engine from models import init_database, create_initial_admin, User, Printer, Job, Stats, SystemLog, get_db_session, GuestRequest, UserPermission, Notification, JobOrder, Base, get_engine, PlugStatusLog
from utils.logging_config import setup_logging, get_logger, measure_execution_time, log_startup_info, debug_request, debug_response from utils.logging_config import setup_logging, get_logger, measure_execution_time, log_startup_info, debug_request, debug_response
from utils.job_scheduler import JobScheduler, get_job_scheduler from utils.job_scheduler import JobScheduler, get_job_scheduler
from utils.queue_manager import start_queue_manager, stop_queue_manager, get_queue_manager from utils.queue_manager import start_queue_manager, stop_queue_manager, get_queue_manager

View File

@ -43,7 +43,7 @@ _cache_lock = threading.Lock()
_cache_ttl = {} # Time-to-live für Cache-Einträge _cache_ttl = {} # Time-to-live für Cache-Einträge
# Alle exportierten Modelle # Alle exportierten Modelle
__all__ = ['User', 'Printer', 'Job', 'Stats', 'SystemLog', 'Base', 'GuestRequest', 'UserPermission', 'Notification', 'JobOrder', 'SystemTimer', 'init_db', 'init_database', 'create_initial_admin', 'get_db_session', 'get_cached_session', 'clear_cache', 'engine'] __all__ = ['User', 'Printer', 'Job', 'Stats', 'SystemLog', 'Base', 'GuestRequest', 'UserPermission', 'Notification', 'JobOrder', 'SystemTimer', 'PlugStatusLog', 'init_db', 'init_database', 'create_initial_admin', 'get_db_session', 'get_cached_session', 'clear_cache', 'engine']
# ===== DATENBANK-KONFIGURATION MIT WAL UND OPTIMIERUNGEN ===== # ===== DATENBANK-KONFIGURATION MIT WAL UND OPTIMIERUNGEN =====
@ -1595,6 +1595,313 @@ class SystemTimer(Base):
return None return None
class PlugStatusLog(Base):
"""
Logging-System für Steckdosen-Status Monitoring.
Protokolliert alle Zustandsänderungen der Smart Plugs (TAPO).
"""
__tablename__ = "plug_status_logs"
id = Column(Integer, primary_key=True)
printer_id = Column(Integer, ForeignKey("printers.id"), nullable=False)
status = Column(String(20), nullable=False) # 'connected', 'disconnected', 'on', 'off'
timestamp = Column(DateTime, default=datetime.now, nullable=False)
# Zusätzliche Monitoring-Daten
ip_address = Column(String(50), nullable=True) # IP der Steckdose/des Druckers
power_consumption = Column(Float, nullable=True) # Stromverbrauch in Watt (falls verfügbar)
voltage = Column(Float, nullable=True) # Spannung in Volt (falls verfügbar)
current = Column(Float, nullable=True) # Stromstärke in Ampere (falls verfügbar)
# Monitoring-Kontext
source = Column(String(50), default="system") # 'system', 'manual', 'api', 'scheduler'
user_id = Column(Integer, ForeignKey("users.id"), nullable=True) # Bei manueller Änderung
notes = Column(Text, nullable=True) # Zusätzliche Notizen oder Fehlerinfos
# Technische Details
response_time_ms = Column(Integer, nullable=True) # Antwortzeit der Steckdose in ms
error_message = Column(Text, nullable=True) # Fehlermeldung bei Verbindungsproblemen
firmware_version = Column(String(50), nullable=True) # Firmware-Version der Steckdose
# Beziehungen
printer = relationship("Printer", foreign_keys=[printer_id])
user = relationship("User", foreign_keys=[user_id])
def to_dict(self) -> dict:
"""
Konvertiert das PlugStatusLog-Objekt in ein Dictionary.
"""
cache_key = get_cache_key("PlugStatusLog", self.id, "dict")
cached_result = get_cache(cache_key)
if cached_result is not None:
return cached_result
result = {
"id": self.id,
"printer_id": self.printer_id,
"printer_name": self.printer.name if self.printer else None,
"status": self.status,
"timestamp": self.timestamp.isoformat() if self.timestamp else None,
"ip_address": self.ip_address,
"power_consumption": self.power_consumption,
"voltage": self.voltage,
"current": self.current,
"source": self.source,
"user_id": self.user_id,
"user_name": self.user.name if self.user else None,
"notes": self.notes,
"response_time_ms": self.response_time_ms,
"error_message": self.error_message,
"firmware_version": self.firmware_version
}
# Ergebnis cachen (5 Minuten)
set_cache(cache_key, result, 300)
return result
@classmethod
def log_status_change(cls, printer_id: int, status: str, source: str = "system",
user_id: int = None, ip_address: str = None,
power_consumption: float = None, voltage: float = None,
current: float = None, notes: str = None,
response_time_ms: int = None, error_message: str = None,
firmware_version: str = None) -> 'PlugStatusLog':
"""
Erstellt einen neuen Status-Log-Eintrag für eine Steckdose.
Args:
printer_id: ID des zugehörigen Druckers
status: Status der Steckdose ('connected', 'disconnected', 'on', 'off')
source: Quelle der Statusänderung ('system', 'manual', 'api', 'scheduler')
user_id: ID des Benutzers (bei manueller Änderung)
ip_address: IP-Adresse der Steckdose
power_consumption: Stromverbrauch in Watt
voltage: Spannung in Volt
current: Stromstärke in Ampere
notes: Zusätzliche Notizen
response_time_ms: Antwortzeit in Millisekunden
error_message: Fehlermeldung bei Problemen
firmware_version: Firmware-Version der Steckdose
Returns:
Das erstellte PlugStatusLog-Objekt
"""
try:
with get_cached_session() as session:
log_entry = cls(
printer_id=printer_id,
status=status,
ip_address=ip_address,
power_consumption=power_consumption,
voltage=voltage,
current=current,
source=source,
user_id=user_id,
notes=notes,
response_time_ms=response_time_ms,
error_message=error_message,
firmware_version=firmware_version
)
session.add(log_entry)
session.commit()
# Cache invalidieren
invalidate_model_cache("PlugStatusLog")
logger.info(f"Steckdosen-Status geloggt: Drucker {printer_id}, Status: {status}, Quelle: {source}")
return log_entry
except Exception as e:
logger.error(f"Fehler beim Loggen des Steckdosen-Status: {str(e)}")
raise e
@classmethod
def get_printer_history(cls, printer_id: int, hours: int = 24) -> List['PlugStatusLog']:
"""
Holt die Steckdosen-Historie für einen bestimmten Drucker.
Args:
printer_id: ID des Druckers
hours: Anzahl der Stunden zurück (Standard: 24)
Returns:
Liste der PlugStatusLog-Einträge
"""
cache_key = get_cache_key("PlugStatusLog", printer_id, f"history_{hours}h")
cached_result = get_cache(cache_key)
if cached_result is not None:
return cached_result
try:
with get_cached_session() as session:
cutoff_time = datetime.now() - timedelta(hours=hours)
logs = session.query(cls)\
.filter(cls.printer_id == printer_id)\
.filter(cls.timestamp >= cutoff_time)\
.order_by(cls.timestamp.desc())\
.all()
# Ergebnis cachen (10 Minuten)
set_cache(cache_key, logs, 600)
return logs
except Exception as e:
logger.error(f"Fehler beim Abrufen der Steckdosen-Historie: {str(e)}")
return []
@classmethod
def get_all_recent_logs(cls, hours: int = 24, limit: int = 1000) -> List['PlugStatusLog']:
"""
Holt alle aktuellen Steckdosen-Logs für die Administrator-Übersicht.
Args:
hours: Anzahl der Stunden zurück (Standard: 24)
limit: Maximale Anzahl der Einträge (Standard: 1000)
Returns:
Liste der PlugStatusLog-Einträge
"""
cache_key = get_cache_key("PlugStatusLog", "all", f"recent_{hours}h_{limit}")
cached_result = get_cache(cache_key)
if cached_result is not None:
return cached_result
try:
with get_cached_session() as session:
cutoff_time = datetime.now() - timedelta(hours=hours)
logs = session.query(cls)\
.filter(cls.timestamp >= cutoff_time)\
.order_by(cls.timestamp.desc())\
.limit(limit)\
.all()
# Ergebnis cachen (5 Minuten für Admin-Übersicht)
set_cache(cache_key, logs, 300)
return logs
except Exception as e:
logger.error(f"Fehler beim Abrufen der aktuellen Steckdosen-Logs: {str(e)}")
return []
@classmethod
def get_status_statistics(cls, hours: int = 24) -> Dict[str, Any]:
"""
Erstellt Statistiken über Steckdosen-Status für einen Zeitraum.
Args:
hours: Anzahl der Stunden zurück (Standard: 24)
Returns:
Dictionary mit Statistiken
"""
cache_key = get_cache_key("PlugStatusLog", "stats", f"{hours}h")
cached_result = get_cache(cache_key)
if cached_result is not None:
return cached_result
try:
with get_cached_session() as session:
cutoff_time = datetime.now() - timedelta(hours=hours)
# Gesamtanzahl der Logs
total_logs = session.query(cls)\
.filter(cls.timestamp >= cutoff_time)\
.count()
# Status-Verteilung
status_counts = session.query(cls.status, func.count(cls.id))\
.filter(cls.timestamp >= cutoff_time)\
.group_by(cls.status)\
.all()
# Drucker mit den meisten Statusänderungen
printer_counts = session.query(cls.printer_id, func.count(cls.id))\
.filter(cls.timestamp >= cutoff_time)\
.group_by(cls.printer_id)\
.order_by(func.count(cls.id).desc())\
.limit(10)\
.all()
# Durchschnittliche Antwortzeit
avg_response_time = session.query(func.avg(cls.response_time_ms))\
.filter(cls.timestamp >= cutoff_time)\
.filter(cls.response_time_ms.isnot(None))\
.scalar()
# Fehlerrate
error_count = session.query(cls)\
.filter(cls.timestamp >= cutoff_time)\
.filter(cls.error_message.isnot(None))\
.count()
stats = {
"total_logs": total_logs,
"status_distribution": dict(status_counts),
"top_printers": dict(printer_counts),
"average_response_time_ms": float(avg_response_time) if avg_response_time else None,
"error_count": error_count,
"error_rate": (error_count / total_logs * 100) if total_logs > 0 else 0,
"timeframe_hours": hours,
"generated_at": datetime.now().isoformat()
}
# Ergebnis cachen (10 Minuten)
set_cache(cache_key, stats, 600)
return stats
except Exception as e:
logger.error(f"Fehler beim Erstellen der Steckdosen-Statistiken: {str(e)}")
return {
"total_logs": 0,
"status_distribution": {},
"top_printers": {},
"average_response_time_ms": None,
"error_count": 0,
"error_rate": 0,
"timeframe_hours": hours,
"generated_at": datetime.now().isoformat(),
"error": str(e)
}
@classmethod
def cleanup_old_logs(cls, days: int = 30) -> int:
"""
Bereinigt alte Steckdosen-Logs (älter als X Tage).
Args:
days: Anzahl der Tage (Standard: 30)
Returns:
Anzahl der gelöschten Einträge
"""
try:
with get_cached_session() as session:
cutoff_date = datetime.now() - timedelta(days=days)
deleted_count = session.query(cls)\
.filter(cls.timestamp < cutoff_date)\
.delete()
session.commit()
# Cache invalidieren
invalidate_model_cache("PlugStatusLog")
logger.info(f"Steckdosen-Logs bereinigt: {deleted_count} Einträge gelöscht (älter als {days} Tage)")
return deleted_count
except Exception as e:
logger.error(f"Fehler beim Bereinigen der Steckdosen-Logs: {str(e)}")
return 0
# ===== DATENBANK-INITIALISIERUNG MIT OPTIMIERUNGEN ===== # ===== DATENBANK-INITIALISIERUNG MIT OPTIMIERUNGEN =====
def init_db() -> None: def init_db() -> None:

File diff suppressed because it is too large Load Diff