🎉 Verbesserte Installation und Konfiguration des Kiosk-Browsers mit Fallback-Mechanismus für Browser und optimierten Vollbildmodus. 📚
This commit is contained in:
1
backend/app - Kopie/utils/__init__.py
Normal file
1
backend/app - Kopie/utils/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Utils package for MYP
|
||||
107
backend/app - Kopie/utils/add_hardcoded_printers.py
Normal file
107
backend/app - Kopie/utils/add_hardcoded_printers.py
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Skript zum Hinzufügen der hardkodierten Drucker in die Datenbank.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('.')
|
||||
|
||||
from config.settings import PRINTERS
|
||||
from database.db_manager import DatabaseManager
|
||||
from models import Printer
|
||||
from datetime import datetime
|
||||
|
||||
def add_hardcoded_printers():
|
||||
"""Fügt die hardkodierten Drucker in die Datenbank ein."""
|
||||
|
||||
print("=== Hardkodierte Drucker hinzufügen ===")
|
||||
print(f"Zu erstellende Drucker: {len(PRINTERS)}")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
added_count = 0
|
||||
|
||||
for printer_name, config in PRINTERS.items():
|
||||
# Prüfen, ob Drucker bereits existiert
|
||||
existing = session.query(Printer).filter(Printer.name == printer_name).first()
|
||||
|
||||
if existing:
|
||||
print(f"⚠️ {printer_name}: Bereits vorhanden (ID: {existing.id})")
|
||||
continue
|
||||
|
||||
# Neuen Drucker erstellen
|
||||
new_printer = Printer(
|
||||
name=printer_name,
|
||||
model="P115", # Standard-Modell
|
||||
location="Werk 040 - Berlin - TBA", # Aktualisierter Standort
|
||||
ip_address=config["ip"],
|
||||
mac_address=f"98:25:4A:E1:{printer_name[-1]}0:0{printer_name[-1]}", # Dummy MAC
|
||||
plug_ip=config["ip"],
|
||||
plug_username="admin",
|
||||
plug_password="admin",
|
||||
status="available", # Verfügbar, da in Konfiguration
|
||||
active=True,
|
||||
created_at=datetime.now()
|
||||
)
|
||||
|
||||
session.add(new_printer)
|
||||
print(f"✅ {printer_name}: Hinzugefügt (IP: {config['ip']})")
|
||||
added_count += 1
|
||||
|
||||
# Änderungen speichern
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
print(f"\n✅ {added_count} neue Drucker hinzugefügt")
|
||||
print("Drucker-Erstellung abgeschlossen!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Hinzufügen: {e}")
|
||||
if 'session' in locals():
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
def list_all_printers():
|
||||
"""Zeigt alle Drucker in der Datenbank an."""
|
||||
|
||||
print("\n=== Alle Drucker in der Datenbank ===")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
printers = session.query(Printer).all()
|
||||
|
||||
if not printers:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
return
|
||||
|
||||
print(f"{'ID':<5} {'Name':<15} {'Status':<12} {'IP-Adresse':<15} {'Aktiv':<8}")
|
||||
print("-" * 60)
|
||||
|
||||
for printer in printers:
|
||||
active_str = "✅" if printer.active else "❌"
|
||||
print(f"{printer.id:<5} {printer.name:<15} {printer.status:<12} {printer.ip_address:<15} {active_str:<8}")
|
||||
|
||||
session.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Abrufen: {e}")
|
||||
if 'session' in locals():
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Hardkodierte Drucker-Erstellung")
|
||||
print("=" * 35)
|
||||
|
||||
# Aktuelle Drucker anzeigen
|
||||
list_all_printers()
|
||||
|
||||
# Hardkodierte Drucker hinzufügen
|
||||
add_hardcoded_printers()
|
||||
|
||||
# Alle Drucker anzeigen
|
||||
list_all_printers()
|
||||
178
backend/app - Kopie/utils/add_test_printers.py
Normal file
178
backend/app - Kopie/utils/add_test_printers.py
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Skript zum Hinzufügen von Testdruckern zur Datenbank
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Füge das Anwendungsverzeichnis zum Python-Pfad hinzu
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from models import get_db_session, Printer
|
||||
|
||||
def add_test_printers():
|
||||
"""Fügt Testdrucker zur Datenbank hinzu"""
|
||||
|
||||
test_printers = [
|
||||
{
|
||||
"name": "Prusa i3 MK3S+",
|
||||
"model": "Prusa i3 MK3S+",
|
||||
"location": "Labor A - Arbeitsplatz 1",
|
||||
"mac_address": "AA:BB:CC:DD:EE:01",
|
||||
"plug_ip": "192.168.1.101",
|
||||
"status": "available",
|
||||
"active": True
|
||||
},
|
||||
{
|
||||
"name": "Ender 3 V2",
|
||||
"model": "Creality Ender 3 V2",
|
||||
"location": "Labor A - Arbeitsplatz 2",
|
||||
"mac_address": "AA:BB:CC:DD:EE:02",
|
||||
"plug_ip": "192.168.1.102",
|
||||
"status": "available",
|
||||
"active": True
|
||||
},
|
||||
{
|
||||
"name": "Ultimaker S3",
|
||||
"model": "Ultimaker S3",
|
||||
"location": "Labor B - Arbeitsplatz 1",
|
||||
"mac_address": "AA:BB:CC:DD:EE:03",
|
||||
"plug_ip": "192.168.1.103",
|
||||
"status": "available",
|
||||
"active": True
|
||||
},
|
||||
{
|
||||
"name": "Bambu Lab X1 Carbon",
|
||||
"model": "Bambu Lab X1 Carbon",
|
||||
"location": "Labor B - Arbeitsplatz 2",
|
||||
"mac_address": "AA:BB:CC:DD:EE:04",
|
||||
"plug_ip": "192.168.1.104",
|
||||
"status": "available",
|
||||
"active": True
|
||||
},
|
||||
{
|
||||
"name": "Formlabs Form 3",
|
||||
"model": "Formlabs Form 3",
|
||||
"location": "Labor C - Harz-Bereich",
|
||||
"mac_address": "AA:BB:CC:DD:EE:05",
|
||||
"plug_ip": "192.168.1.105",
|
||||
"status": "offline",
|
||||
"active": False
|
||||
}
|
||||
]
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
added_count = 0
|
||||
|
||||
for printer_data in test_printers:
|
||||
# Prüfen, ob Drucker bereits existiert
|
||||
existing = db_session.query(Printer).filter(
|
||||
Printer.name == printer_data["name"]
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
print(f"⚠️ Drucker '{printer_data['name']}' existiert bereits - überspringe")
|
||||
continue
|
||||
|
||||
# Neuen Drucker erstellen
|
||||
new_printer = Printer(
|
||||
name=printer_data["name"],
|
||||
model=printer_data["model"],
|
||||
location=printer_data["location"],
|
||||
mac_address=printer_data["mac_address"],
|
||||
plug_ip=printer_data["plug_ip"],
|
||||
status=printer_data["status"],
|
||||
active=printer_data["active"],
|
||||
created_at=datetime.now()
|
||||
)
|
||||
|
||||
db_session.add(new_printer)
|
||||
added_count += 1
|
||||
print(f"✅ Drucker '{printer_data['name']}' hinzugefügt")
|
||||
|
||||
if added_count > 0:
|
||||
db_session.commit()
|
||||
print(f"\n🎉 {added_count} Testdrucker erfolgreich zur Datenbank hinzugefügt!")
|
||||
else:
|
||||
print("\n📋 Alle Testdrucker existieren bereits in der Datenbank")
|
||||
|
||||
# Zeige alle Drucker in der Datenbank
|
||||
all_printers = db_session.query(Printer).all()
|
||||
print(f"\n📊 Gesamt {len(all_printers)} Drucker in der Datenbank:")
|
||||
print("-" * 80)
|
||||
print(f"{'ID':<4} {'Name':<20} {'Modell':<20} {'Status':<12} {'Aktiv':<6}")
|
||||
print("-" * 80)
|
||||
|
||||
for printer in all_printers:
|
||||
active_str = "✅" if printer.active else "❌"
|
||||
print(f"{printer.id:<4} {printer.name[:19]:<20} {(printer.model or 'Unbekannt')[:19]:<20} {printer.status:<12} {active_str:<6}")
|
||||
|
||||
db_session.close()
|
||||
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
db_session.close()
|
||||
print(f"❌ Fehler beim Hinzufügen der Testdrucker: {str(e)}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def remove_test_printers():
|
||||
"""Entfernt alle Testdrucker aus der Datenbank"""
|
||||
|
||||
test_printer_names = [
|
||||
"Prusa i3 MK3S+",
|
||||
"Ender 3 V2",
|
||||
"Ultimaker S3",
|
||||
"Bambu Lab X1 Carbon",
|
||||
"Formlabs Form 3"
|
||||
]
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
removed_count = 0
|
||||
|
||||
for name in test_printer_names:
|
||||
printer = db_session.query(Printer).filter(Printer.name == name).first()
|
||||
if printer:
|
||||
db_session.delete(printer)
|
||||
removed_count += 1
|
||||
print(f"🗑️ Drucker '{name}' entfernt")
|
||||
|
||||
if removed_count > 0:
|
||||
db_session.commit()
|
||||
print(f"\n🧹 {removed_count} Testdrucker erfolgreich entfernt!")
|
||||
else:
|
||||
print("\n📋 Keine Testdrucker zum Entfernen gefunden")
|
||||
|
||||
db_session.close()
|
||||
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
db_session.close()
|
||||
print(f"❌ Fehler beim Entfernen der Testdrucker: {str(e)}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("=== MYP Druckerverwaltung - Testdrucker-Verwaltung ===")
|
||||
print()
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] == "--remove":
|
||||
print("Entferne Testdrucker...")
|
||||
remove_test_printers()
|
||||
else:
|
||||
print("Füge Testdrucker hinzu...")
|
||||
print("(Verwende --remove um Testdrucker zu entfernen)")
|
||||
print()
|
||||
add_test_printers()
|
||||
|
||||
print("\nFertig! 🚀")
|
||||
37
backend/app - Kopie/utils/aktiviere_drucker.py
Normal file
37
backend/app - Kopie/utils/aktiviere_drucker.py
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from models import get_db_session, Printer
|
||||
|
||||
def aktiviere_alle_drucker():
|
||||
"""Aktiviert alle Drucker in der Datenbank."""
|
||||
try:
|
||||
session = get_db_session()
|
||||
drucker = session.query(Printer).all()
|
||||
|
||||
if not drucker:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
session.close()
|
||||
return
|
||||
|
||||
print(f"Anzahl Drucker: {len(drucker)}")
|
||||
print("Aktiviere alle Drucker...")
|
||||
|
||||
for d in drucker:
|
||||
d.active = True
|
||||
print(f"Drucker {d.id}: {d.name} - IP: {d.plug_ip} - Aktiv: {d.active}")
|
||||
|
||||
session.commit()
|
||||
print("Alle Drucker wurden erfolgreich aktiviert!")
|
||||
session.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler: {str(e)}")
|
||||
try:
|
||||
session.rollback()
|
||||
session.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
aktiviere_alle_drucker()
|
||||
667
backend/app - Kopie/utils/analytics.py
Normal file
667
backend/app - Kopie/utils/analytics.py
Normal file
@@ -0,0 +1,667 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Erweiterte Analytik und Statistiken für MYP Platform
|
||||
Umfassende Datenanalyse, Berichte und KPI-Tracking
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Tuple, Any
|
||||
from sqlalchemy import func, desc, and_, or_, extract
|
||||
from sqlalchemy.orm import Session
|
||||
from dataclasses import dataclass, asdict
|
||||
from enum import Enum
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger("analytics")
|
||||
|
||||
# ===== ANALYTICS ENUMS =====
|
||||
|
||||
class MetricType(Enum):
|
||||
"""Typen von Metriken"""
|
||||
COUNTER = "counter" # Zähler (erhöht sich)
|
||||
GAUGE = "gauge" # Momentanwert
|
||||
HISTOGRAM = "histogram" # Verteilung von Werten
|
||||
RATE = "rate" # Rate über Zeit
|
||||
|
||||
class TimeRange(Enum):
|
||||
"""Zeiträume für Analysen"""
|
||||
HOUR = "hour"
|
||||
DAY = "day"
|
||||
WEEK = "week"
|
||||
MONTH = "month"
|
||||
QUARTER = "quarter"
|
||||
YEAR = "year"
|
||||
CUSTOM = "custom"
|
||||
|
||||
class ReportFormat(Enum):
|
||||
"""Ausgabeformate für Berichte"""
|
||||
JSON = "json"
|
||||
CSV = "csv"
|
||||
PDF = "pdf"
|
||||
EXCEL = "excel"
|
||||
|
||||
# ===== DATA CLASSES =====
|
||||
|
||||
@dataclass
|
||||
class Metric:
|
||||
"""Einzelne Metrik"""
|
||||
name: str
|
||||
value: float
|
||||
unit: str
|
||||
timestamp: datetime
|
||||
tags: Dict[str, str] = None
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
result = asdict(self)
|
||||
result['timestamp'] = self.timestamp.isoformat()
|
||||
return result
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Container für Analytik-Daten"""
|
||||
metrics: List[Metric]
|
||||
timerange: TimeRange
|
||||
start_date: datetime
|
||||
end_date: datetime
|
||||
filters: Dict[str, Any] = None
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
return {
|
||||
'metrics': [m.to_dict() for m in self.metrics],
|
||||
'timerange': self.timerange.value,
|
||||
'start_date': self.start_date.isoformat(),
|
||||
'end_date': self.end_date.isoformat(),
|
||||
'filters': self.filters or {}
|
||||
}
|
||||
|
||||
@dataclass
|
||||
class KPI:
|
||||
"""Key Performance Indicator"""
|
||||
name: str
|
||||
current_value: float
|
||||
previous_value: float
|
||||
target_value: float
|
||||
unit: str
|
||||
trend: str # "up", "down", "stable"
|
||||
change_percent: float
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
return asdict(self)
|
||||
|
||||
# ===== ANALYTICS ENGINE =====
|
||||
|
||||
class AnalyticsEngine:
|
||||
"""Hauptklasse für Analytik und Statistiken"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
self.cache_timeout = timedelta(minutes=10)
|
||||
|
||||
def get_printer_statistics(self, time_range: TimeRange = TimeRange.MONTH,
|
||||
start_date: datetime = None, end_date: datetime = None) -> Dict:
|
||||
"""
|
||||
Drucker-Statistiken abrufen
|
||||
|
||||
Args:
|
||||
time_range: Zeitraum für Analyse
|
||||
start_date: Startdatum (optional)
|
||||
end_date: Enddatum (optional)
|
||||
|
||||
Returns:
|
||||
Dict: Drucker-Statistiken
|
||||
"""
|
||||
try:
|
||||
from models import get_db_session, Printer, Job
|
||||
|
||||
if not start_date or not end_date:
|
||||
start_date, end_date = self._get_date_range(time_range)
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Basis-Statistiken
|
||||
total_printers = db_session.query(Printer).filter(Printer.active == True).count()
|
||||
online_printers = db_session.query(Printer).filter(
|
||||
and_(Printer.active == True, Printer.status.in_(["online", "idle"]))
|
||||
).count()
|
||||
|
||||
# Auslastung nach Druckern
|
||||
printer_usage = db_session.query(
|
||||
Printer.name,
|
||||
func.count(Job.id).label('job_count'),
|
||||
func.sum(Job.duration_minutes).label('total_duration')
|
||||
).outerjoin(Job, and_(
|
||||
Job.printer_id == Printer.id,
|
||||
Job.created_at.between(start_date, end_date)
|
||||
)).group_by(Printer.id, Printer.name).all()
|
||||
|
||||
# Status-Verteilung
|
||||
status_distribution = db_session.query(
|
||||
Printer.status,
|
||||
func.count(Printer.id).label('count')
|
||||
).filter(Printer.active == True).group_by(Printer.status).all()
|
||||
|
||||
# Durchschnittliche Verfügbarkeit
|
||||
availability_stats = self._calculate_printer_availability(db_session, start_date, end_date)
|
||||
|
||||
db_session.close()
|
||||
|
||||
return {
|
||||
'summary': {
|
||||
'total_printers': total_printers,
|
||||
'online_printers': online_printers,
|
||||
'availability_rate': round((online_printers / total_printers * 100) if total_printers > 0 else 0, 1)
|
||||
},
|
||||
'usage_by_printer': [
|
||||
{
|
||||
'name': usage.name,
|
||||
'jobs': usage.job_count or 0,
|
||||
'total_hours': round((usage.total_duration or 0) / 60, 1),
|
||||
'utilization_rate': self._calculate_utilization_rate(usage.total_duration, start_date, end_date)
|
||||
}
|
||||
for usage in printer_usage
|
||||
],
|
||||
'status_distribution': [
|
||||
{'status': status.status, 'count': status.count}
|
||||
for status in status_distribution
|
||||
],
|
||||
'availability': availability_stats,
|
||||
'time_range': {
|
||||
'start': start_date.isoformat(),
|
||||
'end': end_date.isoformat(),
|
||||
'type': time_range.value
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der Drucker-Statistiken: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
def get_job_statistics(self, time_range: TimeRange = TimeRange.MONTH,
|
||||
start_date: datetime = None, end_date: datetime = None) -> Dict:
|
||||
"""
|
||||
Job-Statistiken abrufen
|
||||
|
||||
Args:
|
||||
time_range: Zeitraum für Analyse
|
||||
start_date: Startdatum (optional)
|
||||
end_date: Enddatum (optional)
|
||||
|
||||
Returns:
|
||||
Dict: Job-Statistiken
|
||||
"""
|
||||
try:
|
||||
from models import get_db_session, Job, User
|
||||
|
||||
if not start_date or not end_date:
|
||||
start_date, end_date = self._get_date_range(time_range)
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Basis-Statistiken
|
||||
base_query = db_session.query(Job).filter(
|
||||
Job.created_at.between(start_date, end_date)
|
||||
)
|
||||
|
||||
total_jobs = base_query.count()
|
||||
completed_jobs = base_query.filter(Job.status == 'completed').count()
|
||||
failed_jobs = base_query.filter(Job.status == 'failed').count()
|
||||
cancelled_jobs = base_query.filter(Job.status == 'cancelled').count()
|
||||
|
||||
# Status-Verteilung
|
||||
status_distribution = db_session.query(
|
||||
Job.status,
|
||||
func.count(Job.id).label('count')
|
||||
).filter(
|
||||
Job.created_at.between(start_date, end_date)
|
||||
).group_by(Job.status).all()
|
||||
|
||||
# Durchschnittliche Job-Dauer
|
||||
avg_duration = db_session.query(
|
||||
func.avg(Job.duration_minutes)
|
||||
).filter(
|
||||
and_(
|
||||
Job.created_at.between(start_date, end_date),
|
||||
Job.status == 'completed'
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
# Top-Benutzer
|
||||
top_users = db_session.query(
|
||||
User.username,
|
||||
User.name,
|
||||
func.count(Job.id).label('job_count'),
|
||||
func.sum(Job.duration_minutes).label('total_duration')
|
||||
).join(Job).filter(
|
||||
Job.created_at.between(start_date, end_date)
|
||||
).group_by(User.id, User.username, User.name).order_by(
|
||||
desc('job_count')
|
||||
).limit(10).all()
|
||||
|
||||
# Jobs über Zeit (täglich)
|
||||
daily_jobs = self._get_daily_job_trend(db_session, start_date, end_date)
|
||||
|
||||
# Material-Verbrauch (falls verfügbar)
|
||||
material_usage = db_session.query(
|
||||
func.sum(Job.material_used)
|
||||
).filter(
|
||||
and_(
|
||||
Job.created_at.between(start_date, end_date),
|
||||
Job.material_used.isnot(None)
|
||||
)
|
||||
).scalar() or 0
|
||||
|
||||
db_session.close()
|
||||
|
||||
success_rate = round((completed_jobs / total_jobs * 100) if total_jobs > 0 else 0, 1)
|
||||
|
||||
return {
|
||||
'summary': {
|
||||
'total_jobs': total_jobs,
|
||||
'completed_jobs': completed_jobs,
|
||||
'failed_jobs': failed_jobs,
|
||||
'cancelled_jobs': cancelled_jobs,
|
||||
'success_rate': success_rate,
|
||||
'avg_duration_hours': round(avg_duration / 60, 1),
|
||||
'total_material_g': round(material_usage, 1)
|
||||
},
|
||||
'status_distribution': [
|
||||
{'status': status.status, 'count': status.count}
|
||||
for status in status_distribution
|
||||
],
|
||||
'top_users': [
|
||||
{
|
||||
'username': user.username,
|
||||
'name': user.name,
|
||||
'jobs': user.job_count,
|
||||
'total_hours': round((user.total_duration or 0) / 60, 1)
|
||||
}
|
||||
for user in top_users
|
||||
],
|
||||
'daily_trend': daily_jobs,
|
||||
'time_range': {
|
||||
'start': start_date.isoformat(),
|
||||
'end': end_date.isoformat(),
|
||||
'type': time_range.value
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der Job-Statistiken: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
def get_user_statistics(self, time_range: TimeRange = TimeRange.MONTH,
|
||||
start_date: datetime = None, end_date: datetime = None) -> Dict:
|
||||
"""
|
||||
Benutzer-Statistiken abrufen
|
||||
|
||||
Args:
|
||||
time_range: Zeitraum für Analyse
|
||||
start_date: Startdatum (optional)
|
||||
end_date: Enddatum (optional)
|
||||
|
||||
Returns:
|
||||
Dict: Benutzer-Statistiken
|
||||
"""
|
||||
try:
|
||||
from models import get_db_session, User, Job
|
||||
|
||||
if not start_date or not end_date:
|
||||
start_date, end_date = self._get_date_range(time_range)
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Basis-Statistiken
|
||||
total_users = db_session.query(User).filter(User.active == True).count()
|
||||
active_users = db_session.query(func.distinct(Job.user_id)).filter(
|
||||
Job.created_at.between(start_date, end_date)
|
||||
).count()
|
||||
|
||||
# Neue Benutzer im Zeitraum
|
||||
new_users = db_session.query(User).filter(
|
||||
and_(
|
||||
User.created_at.between(start_date, end_date),
|
||||
User.active == True
|
||||
)
|
||||
).count()
|
||||
|
||||
# Benutzer-Aktivität
|
||||
user_activity = db_session.query(
|
||||
User.username,
|
||||
User.name,
|
||||
func.count(Job.id).label('jobs'),
|
||||
func.max(Job.created_at).label('last_activity'),
|
||||
func.sum(Job.duration_minutes).label('total_duration')
|
||||
).outerjoin(Job, and_(
|
||||
Job.user_id == User.id,
|
||||
Job.created_at.between(start_date, end_date)
|
||||
)).filter(User.active == True).group_by(
|
||||
User.id, User.username, User.name
|
||||
).all()
|
||||
|
||||
# Rollenverteilung
|
||||
role_distribution = db_session.query(
|
||||
User.role,
|
||||
func.count(User.id).label('count')
|
||||
).filter(User.active == True).group_by(User.role).all()
|
||||
|
||||
db_session.close()
|
||||
|
||||
# Engagement-Rate berechnen
|
||||
engagement_rate = round((active_users / total_users * 100) if total_users > 0 else 0, 1)
|
||||
|
||||
return {
|
||||
'summary': {
|
||||
'total_users': total_users,
|
||||
'active_users': active_users,
|
||||
'new_users': new_users,
|
||||
'engagement_rate': engagement_rate
|
||||
},
|
||||
'role_distribution': [
|
||||
{'role': role.role or 'user', 'count': role.count}
|
||||
for role in role_distribution
|
||||
],
|
||||
'user_activity': [
|
||||
{
|
||||
'username': user.username,
|
||||
'name': user.name,
|
||||
'jobs': user.jobs or 0,
|
||||
'last_activity': user.last_activity.isoformat() if user.last_activity else None,
|
||||
'total_hours': round((user.total_duration or 0) / 60, 1)
|
||||
}
|
||||
for user in user_activity
|
||||
],
|
||||
'time_range': {
|
||||
'start': start_date.isoformat(),
|
||||
'end': end_date.isoformat(),
|
||||
'type': time_range.value
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der Benutzer-Statistiken: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
def get_system_kpis(self, time_range: TimeRange = TimeRange.MONTH) -> Dict:
|
||||
"""
|
||||
System-KPIs abrufen
|
||||
|
||||
Args:
|
||||
time_range: Zeitraum für Vergleich
|
||||
|
||||
Returns:
|
||||
Dict: KPI-Daten
|
||||
"""
|
||||
try:
|
||||
current_start, current_end = self._get_date_range(time_range)
|
||||
previous_start, previous_end = self._get_previous_period(current_start, current_end)
|
||||
|
||||
# Aktuelle Periode
|
||||
current_printer_stats = self.get_printer_statistics(TimeRange.CUSTOM, current_start, current_end)
|
||||
current_job_stats = self.get_job_statistics(TimeRange.CUSTOM, current_start, current_end)
|
||||
current_user_stats = self.get_user_statistics(TimeRange.CUSTOM, current_start, current_end)
|
||||
|
||||
# Vorherige Periode
|
||||
previous_printer_stats = self.get_printer_statistics(TimeRange.CUSTOM, previous_start, previous_end)
|
||||
previous_job_stats = self.get_job_statistics(TimeRange.CUSTOM, previous_start, previous_end)
|
||||
previous_user_stats = self.get_user_statistics(TimeRange.CUSTOM, previous_start, previous_end)
|
||||
|
||||
# KPIs berechnen
|
||||
kpis = [
|
||||
self._create_kpi(
|
||||
name="Drucker-Verfügbarkeit",
|
||||
current=current_printer_stats['summary']['availability_rate'],
|
||||
previous=previous_printer_stats['summary']['availability_rate'],
|
||||
target=95.0,
|
||||
unit="%"
|
||||
),
|
||||
self._create_kpi(
|
||||
name="Job-Erfolgsrate",
|
||||
current=current_job_stats['summary']['success_rate'],
|
||||
previous=previous_job_stats['summary']['success_rate'],
|
||||
target=90.0,
|
||||
unit="%"
|
||||
),
|
||||
self._create_kpi(
|
||||
name="Aktive Benutzer",
|
||||
current=current_user_stats['summary']['active_users'],
|
||||
previous=previous_user_stats['summary']['active_users'],
|
||||
target=50,
|
||||
unit="Benutzer"
|
||||
),
|
||||
self._create_kpi(
|
||||
name="Durchschnittliche Job-Dauer",
|
||||
current=current_job_stats['summary']['avg_duration_hours'],
|
||||
previous=previous_job_stats['summary']['avg_duration_hours'],
|
||||
target=4.0,
|
||||
unit="Stunden"
|
||||
),
|
||||
self._create_kpi(
|
||||
name="Material-Verbrauch",
|
||||
current=current_job_stats['summary']['total_material_g'],
|
||||
previous=previous_job_stats['summary']['total_material_g'],
|
||||
target=10000,
|
||||
unit="g"
|
||||
)
|
||||
]
|
||||
|
||||
return {
|
||||
'kpis': [kpi.to_dict() for kpi in kpis],
|
||||
'period': {
|
||||
'current': {
|
||||
'start': current_start.isoformat(),
|
||||
'end': current_end.isoformat()
|
||||
},
|
||||
'previous': {
|
||||
'start': previous_start.isoformat(),
|
||||
'end': previous_end.isoformat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der System-KPIs: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
def generate_report(self, report_type: str, time_range: TimeRange = TimeRange.MONTH,
|
||||
format: ReportFormat = ReportFormat.JSON, **kwargs) -> Dict:
|
||||
"""
|
||||
Bericht generieren
|
||||
|
||||
Args:
|
||||
report_type: Art des Berichts
|
||||
time_range: Zeitraum
|
||||
format: Ausgabeformat
|
||||
**kwargs: Zusätzliche Parameter
|
||||
|
||||
Returns:
|
||||
Dict: Bericht-Daten
|
||||
"""
|
||||
try:
|
||||
start_date = kwargs.get('start_date')
|
||||
end_date = kwargs.get('end_date')
|
||||
|
||||
if not start_date or not end_date:
|
||||
start_date, end_date = self._get_date_range(time_range)
|
||||
|
||||
if report_type == "comprehensive":
|
||||
return self._generate_comprehensive_report(start_date, end_date, format)
|
||||
elif report_type == "printer_usage":
|
||||
return self._generate_printer_usage_report(start_date, end_date, format)
|
||||
elif report_type == "user_activity":
|
||||
return self._generate_user_activity_report(start_date, end_date, format)
|
||||
elif report_type == "efficiency":
|
||||
return self._generate_efficiency_report(start_date, end_date, format)
|
||||
else:
|
||||
raise ValueError(f"Unbekannter Berichtstyp: {report_type}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Berichts: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
# ===== HELPER METHODS =====
|
||||
|
||||
def _get_date_range(self, time_range: TimeRange) -> Tuple[datetime, datetime]:
|
||||
"""Berechnet Datumsbereich basierend auf TimeRange"""
|
||||
end_date = datetime.now()
|
||||
|
||||
if time_range == TimeRange.HOUR:
|
||||
start_date = end_date - timedelta(hours=1)
|
||||
elif time_range == TimeRange.DAY:
|
||||
start_date = end_date - timedelta(days=1)
|
||||
elif time_range == TimeRange.WEEK:
|
||||
start_date = end_date - timedelta(weeks=1)
|
||||
elif time_range == TimeRange.MONTH:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
elif time_range == TimeRange.QUARTER:
|
||||
start_date = end_date - timedelta(days=90)
|
||||
elif time_range == TimeRange.YEAR:
|
||||
start_date = end_date - timedelta(days=365)
|
||||
else:
|
||||
start_date = end_date - timedelta(days=30) # Default
|
||||
|
||||
return start_date, end_date
|
||||
|
||||
def _get_previous_period(self, start_date: datetime, end_date: datetime) -> Tuple[datetime, datetime]:
|
||||
"""Berechnet vorherige Periode für Vergleiche"""
|
||||
duration = end_date - start_date
|
||||
previous_end = start_date
|
||||
previous_start = previous_end - duration
|
||||
return previous_start, previous_end
|
||||
|
||||
def _create_kpi(self, name: str, current: float, previous: float,
|
||||
target: float, unit: str) -> KPI:
|
||||
"""Erstellt KPI-Objekt mit Berechnungen"""
|
||||
if previous > 0:
|
||||
change_percent = round(((current - previous) / previous) * 100, 1)
|
||||
else:
|
||||
change_percent = 0.0
|
||||
|
||||
if abs(change_percent) < 1:
|
||||
trend = "stable"
|
||||
elif change_percent > 0:
|
||||
trend = "up"
|
||||
else:
|
||||
trend = "down"
|
||||
|
||||
return KPI(
|
||||
name=name,
|
||||
current_value=current,
|
||||
previous_value=previous,
|
||||
target_value=target,
|
||||
unit=unit,
|
||||
trend=trend,
|
||||
change_percent=change_percent
|
||||
)
|
||||
|
||||
def _calculate_printer_availability(self, db_session: Session,
|
||||
start_date: datetime, end_date: datetime) -> Dict:
|
||||
"""Berechnet Drucker-Verfügbarkeit"""
|
||||
# Vereinfachte Berechnung - kann erweitert werden
|
||||
from models import Printer
|
||||
|
||||
total_printers = db_session.query(Printer).filter(Printer.active == True).count()
|
||||
online_printers = db_session.query(Printer).filter(
|
||||
and_(Printer.active == True, Printer.status.in_(["online", "idle"]))
|
||||
).count()
|
||||
|
||||
availability_rate = round((online_printers / total_printers * 100) if total_printers > 0 else 0, 1)
|
||||
|
||||
return {
|
||||
'total_printers': total_printers,
|
||||
'online_printers': online_printers,
|
||||
'availability_rate': availability_rate,
|
||||
'downtime_hours': 0 # Placeholder - kann mit detaillierter Logging berechnet werden
|
||||
}
|
||||
|
||||
def _calculate_utilization_rate(self, total_minutes: int,
|
||||
start_date: datetime, end_date: datetime) -> float:
|
||||
"""Berechnet Auslastungsrate"""
|
||||
if not total_minutes:
|
||||
return 0.0
|
||||
|
||||
total_hours = (end_date - start_date).total_seconds() / 3600
|
||||
utilization_rate = (total_minutes / 60) / total_hours * 100
|
||||
return round(min(utilization_rate, 100), 1)
|
||||
|
||||
def _get_daily_job_trend(self, db_session: Session,
|
||||
start_date: datetime, end_date: datetime) -> List[Dict]:
|
||||
"""Holt tägliche Job-Trends"""
|
||||
from models import Job
|
||||
|
||||
daily_jobs = db_session.query(
|
||||
func.date(Job.created_at).label('date'),
|
||||
func.count(Job.id).label('count')
|
||||
).filter(
|
||||
Job.created_at.between(start_date, end_date)
|
||||
).group_by(
|
||||
func.date(Job.created_at)
|
||||
).order_by('date').all()
|
||||
|
||||
return [
|
||||
{
|
||||
'date': job.date.isoformat(),
|
||||
'jobs': job.count
|
||||
}
|
||||
for job in daily_jobs
|
||||
]
|
||||
|
||||
def _generate_comprehensive_report(self, start_date: datetime,
|
||||
end_date: datetime, format: ReportFormat) -> Dict:
|
||||
"""Generiert umfassenden Bericht"""
|
||||
printer_stats = self.get_printer_statistics(TimeRange.CUSTOM, start_date, end_date)
|
||||
job_stats = self.get_job_statistics(TimeRange.CUSTOM, start_date, end_date)
|
||||
user_stats = self.get_user_statistics(TimeRange.CUSTOM, start_date, end_date)
|
||||
kpis = self.get_system_kpis(TimeRange.CUSTOM)
|
||||
|
||||
report = {
|
||||
'title': 'Umfassender System-Bericht',
|
||||
'generated_at': datetime.now().isoformat(),
|
||||
'period': {
|
||||
'start': start_date.isoformat(),
|
||||
'end': end_date.isoformat()
|
||||
},
|
||||
'summary': {
|
||||
'total_jobs': job_stats['summary']['total_jobs'],
|
||||
'success_rate': job_stats['summary']['success_rate'],
|
||||
'active_users': user_stats['summary']['active_users'],
|
||||
'printer_availability': printer_stats['summary']['availability_rate']
|
||||
},
|
||||
'sections': {
|
||||
'printers': printer_stats,
|
||||
'jobs': job_stats,
|
||||
'users': user_stats,
|
||||
'kpis': kpis
|
||||
}
|
||||
}
|
||||
|
||||
if format == ReportFormat.JSON:
|
||||
return report
|
||||
else:
|
||||
# Für andere Formate würde hier die Konvertierung stattfinden
|
||||
return {'error': f'Format {format.value} noch nicht implementiert'}
|
||||
|
||||
# ===== GLOBALE INSTANZ =====
|
||||
|
||||
analytics_engine = AnalyticsEngine()
|
||||
|
||||
# ===== UTILITY FUNCTIONS =====
|
||||
|
||||
def get_dashboard_stats() -> Dict:
|
||||
"""Schnelle Dashboard-Statistiken"""
|
||||
return analytics_engine.get_system_kpis(TimeRange.DAY)
|
||||
|
||||
def export_statistics(report_type: str, time_range: TimeRange, format: ReportFormat = ReportFormat.JSON) -> Dict:
|
||||
"""Exportiert Statistiken in verschiedenen Formaten"""
|
||||
return analytics_engine.generate_report(report_type, time_range, format)
|
||||
|
||||
def track_event(event_name: str, properties: Dict = None):
|
||||
"""Verfolgt Events für Analytik"""
|
||||
try:
|
||||
logger.info(f"📊 Event tracked: {event_name} - {properties or {}}")
|
||||
# Hier könnte Event-Tracking implementiert werden
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Event-Tracking: {e}")
|
||||
|
||||
# Logging für Analytics-System
|
||||
logger.info("📈 Analytics Engine initialisiert")
|
||||
110
backend/app - Kopie/utils/clean_and_add_printers.py
Normal file
110
backend/app - Kopie/utils/clean_and_add_printers.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Skript zur Bereinigung der Drucker-Datenbank und Hinzufügung der korrekten hardkodierten Drucker.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('.')
|
||||
|
||||
from config.settings import PRINTERS
|
||||
from database.db_manager import DatabaseManager
|
||||
from models import Printer
|
||||
from datetime import datetime
|
||||
|
||||
def clean_and_add_printers():
|
||||
"""Bereinigt die Drucker-Datenbank und fügt die korrekten hardkodierten Drucker hinzu."""
|
||||
|
||||
print("=== Drucker-Datenbank bereinigen und neu erstellen ===")
|
||||
print(f"Hardkodierte Drucker: {len(PRINTERS)}")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
# Alle existierenden Drucker löschen
|
||||
existing_printers = session.query(Printer).all()
|
||||
print(f"Lösche {len(existing_printers)} existierende Drucker...")
|
||||
|
||||
for printer in existing_printers:
|
||||
session.delete(printer)
|
||||
|
||||
session.commit()
|
||||
print("✅ Alle alten Drucker gelöscht")
|
||||
|
||||
# Neue Drucker hinzufügen
|
||||
added_count = 0
|
||||
|
||||
for printer_name, config in PRINTERS.items():
|
||||
# Neuen Drucker erstellen
|
||||
new_printer = Printer(
|
||||
name=printer_name,
|
||||
model="P115", # Standard-Modell
|
||||
location="Werk 040 - Berlin - TBA", # Aktualisierter Standort
|
||||
ip_address=config["ip"],
|
||||
mac_address=f"98:25:4A:E1:{printer_name[-1]}0:0{printer_name[-1]}", # Dummy MAC
|
||||
plug_ip=config["ip"],
|
||||
plug_username="admin",
|
||||
plug_password="admin",
|
||||
status="available", # Verfügbar, da in Konfiguration
|
||||
active=True,
|
||||
created_at=datetime.now()
|
||||
)
|
||||
|
||||
session.add(new_printer)
|
||||
print(f"✅ {printer_name}: Hinzugefügt (IP: {config['ip']})")
|
||||
added_count += 1
|
||||
|
||||
# Änderungen speichern
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
print(f"\n✅ {added_count} neue Drucker hinzugefügt")
|
||||
print("Drucker-Datenbank erfolgreich bereinigt und neu erstellt!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Bereinigen: {e}")
|
||||
if 'session' in locals():
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
def list_final_printers():
|
||||
"""Zeigt die finalen Drucker in der Datenbank an."""
|
||||
|
||||
print("\n=== Finale Drucker-Liste ===")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
printers = session.query(Printer).all()
|
||||
|
||||
if not printers:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
return
|
||||
|
||||
print(f"{'ID':<5} {'Name':<15} {'Status':<12} {'IP-Adresse':<15} {'Aktiv':<8}")
|
||||
print("-" * 60)
|
||||
|
||||
for printer in printers:
|
||||
active_str = "✅" if printer.active else "❌"
|
||||
print(f"{printer.id:<5} {printer.name:<15} {printer.status:<12} {printer.ip_address:<15} {active_str:<8}")
|
||||
|
||||
session.close()
|
||||
|
||||
print(f"\nGesamt: {len(printers)} Drucker")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Abrufen: {e}")
|
||||
if 'session' in locals():
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Drucker-Datenbank Bereinigung und Neuerstellung")
|
||||
print("=" * 50)
|
||||
|
||||
# Datenbank bereinigen und neue Drucker hinzufügen
|
||||
clean_and_add_printers()
|
||||
|
||||
# Finale Liste anzeigen
|
||||
list_final_printers()
|
||||
95
backend/app - Kopie/utils/create_ssl_cert.py
Normal file
95
backend/app - Kopie/utils/create_ssl_cert.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
SSL-Zertifikat-Generator für die MYP-Plattform
|
||||
Erstellt selbstsignierte SSL-Zertifikate für die lokale Entwicklung
|
||||
"""
|
||||
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
# Überprüfen, ob die notwendigen Pakete installiert sind
|
||||
try:
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import NameOID
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption
|
||||
except ImportError:
|
||||
print("Fehler: Paket 'cryptography' nicht gefunden.")
|
||||
print("Bitte installieren Sie es mit: pip install cryptography")
|
||||
sys.exit(1)
|
||||
|
||||
def create_self_signed_cert(cert_path, key_path, hostname="localhost"):
|
||||
"""
|
||||
Erstellt ein selbstsigniertes SSL-Zertifikat mit dem angegebenen Hostnamen.
|
||||
|
||||
Args:
|
||||
cert_path: Pfad zur Zertifikatsdatei
|
||||
key_path: Pfad zur privaten Schlüsseldatei
|
||||
hostname: Hostname für das Zertifikat (Standard: localhost)
|
||||
"""
|
||||
# Verzeichnis erstellen, falls es nicht existiert
|
||||
cert_dir = os.path.dirname(cert_path)
|
||||
if cert_dir and not os.path.exists(cert_dir):
|
||||
os.makedirs(cert_dir, exist_ok=True)
|
||||
|
||||
# Privaten Schlüssel generieren
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
# Schlüsseldatei schreiben
|
||||
with open(key_path, "wb") as key_file:
|
||||
key_file.write(private_key.private_bytes(
|
||||
encoding=Encoding.PEM,
|
||||
format=PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=NoEncryption()
|
||||
))
|
||||
|
||||
# Name für das Zertifikat erstellen
|
||||
subject = issuer = x509.Name([
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, hostname),
|
||||
])
|
||||
|
||||
# Zertifikat erstellen
|
||||
cert = x509.CertificateBuilder().subject_name(
|
||||
subject
|
||||
).issuer_name(
|
||||
issuer
|
||||
).public_key(
|
||||
private_key.public_key()
|
||||
).serial_number(
|
||||
x509.random_serial_number()
|
||||
).not_valid_before(
|
||||
datetime.datetime.utcnow()
|
||||
).not_valid_after(
|
||||
datetime.datetime.utcnow() + datetime.timedelta(days=365)
|
||||
).add_extension(
|
||||
x509.SubjectAlternativeName([x509.DNSName(hostname)]),
|
||||
critical=False,
|
||||
).sign(private_key, hashes.SHA256())
|
||||
|
||||
# Zertifikatsdatei schreiben
|
||||
with open(cert_path, "wb") as cert_file:
|
||||
cert_file.write(cert.public_bytes(Encoding.PEM))
|
||||
|
||||
print(f"Selbstsigniertes SSL-Zertifikat für '{hostname}' erstellt:")
|
||||
print(f"Zertifikat: {cert_path}")
|
||||
print(f"Schlüssel: {key_path}")
|
||||
print(f"Gültig für 1 Jahr.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Erstellt selbstsignierte SSL-Zertifikate für die lokale Entwicklung")
|
||||
parser.add_argument("-c", "--cert", default="/home/user/Projektarbeit-MYP/backend/app/certs/myp.crt", help="Pfad zur Zertifikatsdatei")
|
||||
parser.add_argument("-k", "--key", default="/home/user/Projektarbeit-MYP/backend/app/certs/myp.key", help="Pfad zur Schlüsseldatei")
|
||||
parser.add_argument("-n", "--hostname", default="localhost", help="Hostname für das Zertifikat")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
create_self_signed_cert(args.cert, args.key, args.hostname)
|
||||
106
backend/app - Kopie/utils/create_test_printers.py
Normal file
106
backend/app - Kopie/utils/create_test_printers.py
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script zum Erstellen von Test-Druckern für die MYP Plattform
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('.')
|
||||
|
||||
from models import *
|
||||
from datetime import datetime
|
||||
|
||||
def create_test_printers():
|
||||
"""Erstellt Test-Drucker in der Datenbank."""
|
||||
|
||||
# Verbindung zur Datenbank
|
||||
db_session = get_db_session()
|
||||
|
||||
# Test-Drucker Daten
|
||||
test_printers = [
|
||||
{
|
||||
'name': 'Mercedes-Benz FDM Pro #01',
|
||||
'model': 'Ultimaker S5 Pro',
|
||||
'location': 'Werkhalle Sindelfingen',
|
||||
'plug_ip': '192.168.10.101',
|
||||
'status': 'available',
|
||||
'active': True
|
||||
},
|
||||
{
|
||||
'name': 'Mercedes-Benz FDM #02',
|
||||
'model': 'Prusa MK3S+',
|
||||
'location': 'Entwicklungszentrum Stuttgart',
|
||||
'plug_ip': '192.168.10.102',
|
||||
'status': 'printing',
|
||||
'active': True
|
||||
},
|
||||
{
|
||||
'name': 'Mercedes-Benz SLA #01',
|
||||
'model': 'Formlabs Form 3+',
|
||||
'location': 'Prototypenlabor',
|
||||
'plug_ip': '192.168.10.103',
|
||||
'status': 'available',
|
||||
'active': True
|
||||
},
|
||||
{
|
||||
'name': 'Mercedes-Benz Industrial #01',
|
||||
'model': 'Stratasys F370',
|
||||
'location': 'Industriehalle Bremen',
|
||||
'plug_ip': '192.168.10.104',
|
||||
'status': 'maintenance',
|
||||
'active': False
|
||||
},
|
||||
{
|
||||
'name': 'Mercedes-Benz Rapid #01',
|
||||
'model': 'Bambu Lab X1 Carbon',
|
||||
'location': 'Designabteilung',
|
||||
'plug_ip': '192.168.10.105',
|
||||
'status': 'offline',
|
||||
'active': True
|
||||
},
|
||||
{
|
||||
'name': 'Mercedes-Benz SLS #01',
|
||||
'model': 'HP Jet Fusion 5200',
|
||||
'location': 'Produktionszentrum Berlin',
|
||||
'plug_ip': '192.168.10.106',
|
||||
'status': 'available',
|
||||
'active': True
|
||||
}
|
||||
]
|
||||
try:
|
||||
created_count = 0
|
||||
for printer_data in test_printers:
|
||||
# Prüfen ob Drucker bereits existiert
|
||||
existing = db_session.query(Printer).filter_by(name=printer_data['name']).first()
|
||||
if not existing:
|
||||
printer = Printer(
|
||||
name=printer_data['name'],
|
||||
model=printer_data['model'],
|
||||
location=printer_data['location'],
|
||||
plug_ip=printer_data['plug_ip'],
|
||||
status=printer_data['status'],
|
||||
active=printer_data['active'],
|
||||
created_at=datetime.now()
|
||||
)
|
||||
db_session.add(printer)
|
||||
created_count += 1
|
||||
print(f"✅ Drucker '{printer_data['name']}' erstellt")
|
||||
else:
|
||||
print(f"ℹ️ Drucker '{printer_data['name']}' existiert bereits")
|
||||
|
||||
db_session.commit()
|
||||
|
||||
total_count = db_session.query(Printer).count()
|
||||
print(f"\n🎉 {created_count} neue Test-Drucker erstellt!")
|
||||
print(f"📊 Insgesamt {total_count} Drucker in der Datenbank.")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Erstellen der Test-Drucker: {str(e)}")
|
||||
db_session.rollback()
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🚀 Erstelle Test-Drucker für MYP Plattform...")
|
||||
create_test_printers()
|
||||
print("✅ Fertig!")
|
||||
252
backend/app - Kopie/utils/database_migration.py
Normal file
252
backend/app - Kopie/utils/database_migration.py
Normal file
@@ -0,0 +1,252 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database Migration Utility für MYP Platform
|
||||
Überprüft und aktualisiert die Datenbankschema automatisch.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import logging
|
||||
from typing import List, Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
from config.settings import DATABASE_PATH
|
||||
from models import init_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def get_table_columns(table_name: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Ruft die Spalten einer Tabelle ab.
|
||||
|
||||
Args:
|
||||
table_name: Name der Tabelle
|
||||
|
||||
Returns:
|
||||
List[Dict]: Liste der Spalten mit ihren Eigenschaften
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f'PRAGMA table_info({table_name})')
|
||||
columns = cursor.fetchall()
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
{
|
||||
'name': col[1],
|
||||
'type': col[2],
|
||||
'not_null': bool(col[3]),
|
||||
'default': col[4],
|
||||
'primary_key': bool(col[5])
|
||||
}
|
||||
for col in columns
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der Spalten für Tabelle {table_name}: {e}")
|
||||
return []
|
||||
|
||||
def table_exists(table_name: str) -> bool:
|
||||
"""
|
||||
Prüft, ob eine Tabelle existiert.
|
||||
|
||||
Args:
|
||||
table_name: Name der Tabelle
|
||||
|
||||
Returns:
|
||||
bool: True wenn die Tabelle existiert
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name=?
|
||||
""", (table_name,))
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
return result is not None
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Prüfen der Tabelle {table_name}: {e}")
|
||||
return False
|
||||
|
||||
def column_exists(table_name: str, column_name: str) -> bool:
|
||||
"""
|
||||
Prüft, ob eine Spalte in einer Tabelle existiert.
|
||||
|
||||
Args:
|
||||
table_name: Name der Tabelle
|
||||
column_name: Name der Spalte
|
||||
|
||||
Returns:
|
||||
bool: True wenn die Spalte existiert
|
||||
"""
|
||||
columns = get_table_columns(table_name)
|
||||
return any(col['name'] == column_name for col in columns)
|
||||
|
||||
def add_column_if_missing(table_name: str, column_name: str, column_type: str, default_value: str = None) -> bool:
|
||||
"""
|
||||
Fügt eine Spalte hinzu, falls sie nicht existiert.
|
||||
|
||||
Args:
|
||||
table_name: Name der Tabelle
|
||||
column_name: Name der Spalte
|
||||
column_type: Datentyp der Spalte
|
||||
default_value: Optional - Standardwert
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich
|
||||
"""
|
||||
if column_exists(table_name, column_name):
|
||||
logger.info(f"Spalte {column_name} existiert bereits in Tabelle {table_name}")
|
||||
return True
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
sql = f"ALTER TABLE {table_name} ADD COLUMN {column_name} {column_type}"
|
||||
if default_value:
|
||||
sql += f" DEFAULT {default_value}"
|
||||
|
||||
cursor.execute(sql)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Spalte {column_name} erfolgreich zu Tabelle {table_name} hinzugefügt")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Hinzufügen der Spalte {column_name} zu Tabelle {table_name}: {e}")
|
||||
return False
|
||||
|
||||
def migrate_database() -> bool:
|
||||
"""
|
||||
Führt alle notwendigen Datenbankmigrationen durch.
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich
|
||||
"""
|
||||
logger.info("Starte Datenbankmigration...")
|
||||
|
||||
try:
|
||||
# Prüfe, ob grundlegende Tabellen existieren
|
||||
required_tables = ['users', 'printers', 'jobs', 'stats']
|
||||
missing_tables = [table for table in required_tables if not table_exists(table)]
|
||||
|
||||
if missing_tables:
|
||||
logger.warning(f"Fehlende Tabellen gefunden: {missing_tables}")
|
||||
logger.info("Erstelle alle Tabellen neu...")
|
||||
init_db()
|
||||
logger.info("Tabellen erfolgreich erstellt")
|
||||
return True
|
||||
|
||||
# Prüfe spezifische Spalten, die möglicherweise fehlen
|
||||
migrations = [
|
||||
# Printers Tabelle
|
||||
('printers', 'last_checked', 'DATETIME', 'NULL'),
|
||||
('printers', 'active', 'BOOLEAN', '1'),
|
||||
('printers', 'created_at', 'DATETIME', 'CURRENT_TIMESTAMP'),
|
||||
|
||||
# Jobs Tabelle
|
||||
('jobs', 'duration_minutes', 'INTEGER', '60'),
|
||||
('jobs', 'actual_end_time', 'DATETIME', 'NULL'),
|
||||
('jobs', 'owner_id', 'INTEGER', 'NULL'),
|
||||
('jobs', 'file_path', 'VARCHAR(500)', 'NULL'),
|
||||
|
||||
# Users Tabelle
|
||||
('users', 'username', 'VARCHAR(100)', 'NULL'),
|
||||
('users', 'active', 'BOOLEAN', '1'),
|
||||
('users', 'created_at', 'DATETIME', 'CURRENT_TIMESTAMP'),
|
||||
]
|
||||
|
||||
success = True
|
||||
for table_name, column_name, column_type, default_value in migrations:
|
||||
if not add_column_if_missing(table_name, column_name, column_type, default_value):
|
||||
success = False
|
||||
|
||||
if success:
|
||||
logger.info("Datenbankmigration erfolgreich abgeschlossen")
|
||||
else:
|
||||
logger.warning("Datenbankmigration mit Fehlern abgeschlossen")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Datenbankmigration: {e}")
|
||||
return False
|
||||
|
||||
def check_database_integrity() -> bool:
|
||||
"""
|
||||
Überprüft die Integrität der Datenbank.
|
||||
|
||||
Returns:
|
||||
bool: True wenn die Datenbank integer ist
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('PRAGMA integrity_check')
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if result and result[0] == 'ok':
|
||||
logger.info("Datenbankintegrität: OK")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Datenbankintegrität: FEHLER - {result}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Integritätsprüfung: {e}")
|
||||
return False
|
||||
|
||||
def backup_database(backup_path: str = None) -> bool:
|
||||
"""
|
||||
Erstellt ein Backup der Datenbank.
|
||||
|
||||
Args:
|
||||
backup_path: Optional - Pfad für das Backup
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich
|
||||
"""
|
||||
if not backup_path:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = f"database/myp_backup_{timestamp}.db"
|
||||
|
||||
try:
|
||||
import shutil
|
||||
shutil.copy2(DATABASE_PATH, backup_path)
|
||||
logger.info(f"Datenbank-Backup erstellt: {backup_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Erstellen des Backups: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Logging konfigurieren
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
print("=== MYP Platform - Datenbankmigration ===")
|
||||
|
||||
# Backup erstellen
|
||||
if backup_database():
|
||||
print("✅ Backup erstellt")
|
||||
else:
|
||||
print("⚠️ Backup-Erstellung fehlgeschlagen")
|
||||
|
||||
# Integrität prüfen
|
||||
if check_database_integrity():
|
||||
print("✅ Datenbankintegrität OK")
|
||||
else:
|
||||
print("❌ Datenbankintegrität FEHLER")
|
||||
|
||||
# Migration durchführen
|
||||
if migrate_database():
|
||||
print("✅ Migration erfolgreich")
|
||||
else:
|
||||
print("❌ Migration fehlgeschlagen")
|
||||
|
||||
print("\nMigration abgeschlossen!")
|
||||
290
backend/app - Kopie/utils/database_schema_migration.py
Normal file
290
backend/app - Kopie/utils/database_schema_migration.py
Normal file
@@ -0,0 +1,290 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Optimiertes Datenbank-Schema-Migrationsskript
|
||||
Mit WAL-Checkpoint und ordnungsgemäßer Ressourcenverwaltung
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import signal
|
||||
import time
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
|
||||
# Pfad zur App hinzufügen - KORRIGIERT
|
||||
app_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.insert(0, app_dir)
|
||||
|
||||
# Alternative Datenbankpfad-Definition falls Import fehlschlägt
|
||||
DATABASE_PATH = None
|
||||
try:
|
||||
from config.settings import DATABASE_PATH
|
||||
except ImportError:
|
||||
# Fallback: Datenbankpfad manuell setzen
|
||||
DATABASE_PATH = os.path.join(app_dir, "database", "myp.db")
|
||||
print(f"⚠️ Fallback: Verwende Datenbankpfad: {DATABASE_PATH}")
|
||||
|
||||
# Logging-Setup mit Fallback
|
||||
try:
|
||||
from utils.logging_config import get_logger
|
||||
logger = get_logger("schema_migration")
|
||||
except ImportError:
|
||||
# Fallback: Standard-Logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("schema_migration")
|
||||
|
||||
# Globale Variable für sauberes Shutdown
|
||||
_migration_running = False
|
||||
_current_connection = None
|
||||
|
||||
def signal_handler(signum, frame):
|
||||
"""Signal-Handler für ordnungsgemäßes Shutdown"""
|
||||
global _migration_running, _current_connection
|
||||
print(f"\n🛑 Signal {signum} empfangen - beende Migration sauber...")
|
||||
_migration_running = False
|
||||
|
||||
if _current_connection:
|
||||
try:
|
||||
print("🔄 Führe WAL-Checkpoint durch...")
|
||||
_current_connection.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
_current_connection.commit()
|
||||
_current_connection.close()
|
||||
print("✅ Datenbank ordnungsgemäß geschlossen")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Fehler beim Schließen: {e}")
|
||||
|
||||
print("🏁 Migration beendet")
|
||||
sys.exit(0)
|
||||
|
||||
# Signal-Handler registrieren
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
@contextmanager
|
||||
def get_database_connection(timeout=30):
|
||||
"""Context Manager für sichere Datenbankverbindung mit WAL-Optimierung"""
|
||||
global _current_connection
|
||||
conn = None
|
||||
|
||||
try:
|
||||
# Verbindung mit optimierten Einstellungen
|
||||
conn = sqlite3.connect(
|
||||
DATABASE_PATH,
|
||||
timeout=timeout,
|
||||
isolation_level=None # Autocommit aus für manuelle Transaktionen
|
||||
)
|
||||
_current_connection = conn
|
||||
|
||||
# WAL-Modus und Optimierungen
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL") # Bessere Performance mit WAL
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
conn.execute("PRAGMA busy_timeout=30000") # 30 Sekunden Timeout
|
||||
conn.execute("PRAGMA wal_autocheckpoint=1000") # Automatischer Checkpoint alle 1000 Seiten
|
||||
|
||||
logger.info("Datenbankverbindung mit WAL-Optimierungen hergestellt")
|
||||
yield conn
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Datenbankverbindungsfehler: {e}")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
try:
|
||||
# Kritisch: WAL-Checkpoint vor dem Schließen
|
||||
logger.info("Führe finalen WAL-Checkpoint durch...")
|
||||
conn.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
conn.commit()
|
||||
|
||||
# Prüfe WAL-Status
|
||||
wal_info = conn.execute("PRAGMA wal_checkpoint").fetchone()
|
||||
if wal_info:
|
||||
logger.info(f"WAL-Checkpoint: {wal_info[0]} Seiten übertragen, {wal_info[1]} Seiten zurückgesetzt")
|
||||
|
||||
conn.close()
|
||||
logger.info("Datenbankverbindung ordnungsgemäß geschlossen")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Schließen der Datenbankverbindung: {e}")
|
||||
finally:
|
||||
_current_connection = None
|
||||
|
||||
def force_wal_checkpoint():
|
||||
"""Erzwingt WAL-Checkpoint um alle Daten in die Hauptdatei zu schreiben"""
|
||||
try:
|
||||
with get_database_connection(timeout=10) as conn:
|
||||
# Aggressive WAL-Checkpoint-Strategien
|
||||
strategies = [
|
||||
("TRUNCATE", "Vollständiger Checkpoint mit WAL-Truncate"),
|
||||
("RESTART", "Checkpoint mit WAL-Restart"),
|
||||
("FULL", "Vollständiger Checkpoint")
|
||||
]
|
||||
|
||||
for strategy, description in strategies:
|
||||
try:
|
||||
result = conn.execute(f"PRAGMA wal_checkpoint({strategy})").fetchone()
|
||||
if result and result[0] == 0: # Erfolg
|
||||
logger.info(f"✅ {description} erfolgreich: {result}")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"⚠️ {description} teilweise erfolgreich: {result}")
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ {description} fehlgeschlagen: {e}")
|
||||
continue
|
||||
|
||||
# Fallback: VACUUM für komplette Reorganisation
|
||||
logger.info("Führe VACUUM als Fallback durch...")
|
||||
conn.execute("VACUUM")
|
||||
logger.info("✅ VACUUM erfolgreich")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Kritischer Fehler bei WAL-Checkpoint: {e}")
|
||||
return False
|
||||
|
||||
def optimize_migration_performance():
|
||||
"""Optimiert die Datenbank für die Migration"""
|
||||
try:
|
||||
with get_database_connection(timeout=5) as conn:
|
||||
# Performance-Optimierungen für Migration
|
||||
optimizations = [
|
||||
("PRAGMA cache_size = -64000", "Cache-Größe auf 64MB erhöht"),
|
||||
("PRAGMA temp_store = MEMORY", "Temp-Store in Memory"),
|
||||
("PRAGMA mmap_size = 268435456", "Memory-Mapped I/O aktiviert"),
|
||||
("PRAGMA optimize", "Automatische Optimierungen")
|
||||
]
|
||||
|
||||
for pragma, description in optimizations:
|
||||
try:
|
||||
conn.execute(pragma)
|
||||
logger.info(f"✅ {description}")
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ Optimierung fehlgeschlagen ({description}): {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler bei Performance-Optimierung: {e}")
|
||||
|
||||
def main():
|
||||
"""Führt die optimierte Schema-Migration aus."""
|
||||
global _migration_running
|
||||
_migration_running = True
|
||||
|
||||
try:
|
||||
logger.info("🚀 Starte optimierte Datenbank-Schema-Migration...")
|
||||
|
||||
# Überprüfe Datenbankdatei
|
||||
if not os.path.exists(DATABASE_PATH):
|
||||
logger.error(f"❌ Datenbankdatei nicht gefunden: {DATABASE_PATH}")
|
||||
return False
|
||||
|
||||
# Initial WAL-Checkpoint um sauberen Zustand sicherzustellen
|
||||
logger.info("🔄 Führe initialen WAL-Checkpoint durch...")
|
||||
force_wal_checkpoint()
|
||||
|
||||
# Performance-Optimierungen
|
||||
optimize_migration_performance()
|
||||
|
||||
# Eigentliche Migration mit optimierter Verbindung
|
||||
with get_database_connection(timeout=60) as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Backup erstellen (mit Timeout)
|
||||
backup_path = f"{DATABASE_PATH}.backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
try:
|
||||
logger.info(f"📦 Erstelle Backup: {backup_path}")
|
||||
cursor.execute(f"VACUUM INTO '{backup_path}'")
|
||||
logger.info("✅ Backup erfolgreich erstellt")
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ Backup-Erstellung fehlgeschlagen: {e}")
|
||||
|
||||
# Migrationen durchführen (verkürzt für bessere Performance)
|
||||
migrations_performed = []
|
||||
|
||||
if not _migration_running:
|
||||
return False
|
||||
|
||||
# Schnelle Schema-Checks
|
||||
try:
|
||||
# Test der kritischen Abfrage
|
||||
cursor.execute("SELECT COUNT(*) FROM guest_requests WHERE duration_minutes IS NOT NULL")
|
||||
logger.info("✅ Schema-Integritätstest bestanden")
|
||||
except Exception:
|
||||
logger.info("🔧 Führe kritische Schema-Reparaturen durch...")
|
||||
|
||||
# Nur die wichtigsten Reparaturen
|
||||
critical_fixes = [
|
||||
("ALTER TABLE guest_requests ADD COLUMN duration_minutes INTEGER", "duration_minutes zu guest_requests"),
|
||||
("ALTER TABLE users ADD COLUMN username VARCHAR(100)", "username zu users"),
|
||||
("UPDATE users SET username = email WHERE username IS NULL", "Username-Fallback")
|
||||
]
|
||||
|
||||
for sql, description in critical_fixes:
|
||||
if not _migration_running:
|
||||
break
|
||||
try:
|
||||
cursor.execute(sql)
|
||||
logger.info(f"✅ {description}")
|
||||
migrations_performed.append(description)
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column" not in str(e).lower():
|
||||
logger.warning(f"⚠️ {description}: {e}")
|
||||
|
||||
# Commit und WAL-Checkpoint zwischen Operationen
|
||||
if migrations_performed:
|
||||
conn.commit()
|
||||
cursor.execute("PRAGMA wal_checkpoint(PASSIVE)")
|
||||
|
||||
# Finale Optimierungen (reduziert)
|
||||
if _migration_running:
|
||||
essential_indices = [
|
||||
"CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_guest_requests_status ON guest_requests(status)"
|
||||
]
|
||||
|
||||
for index_sql in essential_indices:
|
||||
try:
|
||||
cursor.execute(index_sql)
|
||||
except Exception:
|
||||
pass # Indices sind nicht kritisch
|
||||
|
||||
# Finale Statistiken
|
||||
cursor.execute("ANALYZE")
|
||||
migrations_performed.append("optimizations")
|
||||
|
||||
# Finale Commit
|
||||
conn.commit()
|
||||
logger.info(f"✅ Migration abgeschlossen. Bereiche: {', '.join(migrations_performed)}")
|
||||
|
||||
# Abschließender WAL-Checkpoint
|
||||
logger.info("🔄 Führe abschließenden WAL-Checkpoint durch...")
|
||||
force_wal_checkpoint()
|
||||
|
||||
# Kurze Pause um sicherzustellen, dass alle I/O-Operationen abgeschlossen sind
|
||||
time.sleep(1)
|
||||
|
||||
logger.info("🎉 Optimierte Schema-Migration erfolgreich abgeschlossen!")
|
||||
return True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("🔄 Migration durch Benutzer unterbrochen")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Kritischer Fehler bei der Migration: {str(e)}")
|
||||
return False
|
||||
finally:
|
||||
_migration_running = False
|
||||
# Finale WAL-Bereinigung
|
||||
try:
|
||||
force_wal_checkpoint()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
if not success:
|
||||
sys.exit(1)
|
||||
425
backend/app - Kopie/utils/database_utils.py
Normal file
425
backend/app - Kopie/utils/database_utils.py
Normal file
@@ -0,0 +1,425 @@
|
||||
"""
|
||||
Erweiterte Datenbank-Utilities für Backup, Monitoring und Wartung.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import threading
|
||||
import time
|
||||
import gzip
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.engine import Engine
|
||||
|
||||
from config.settings import DATABASE_PATH
|
||||
from utils.logging_config import get_logger
|
||||
from models import get_cached_session, create_optimized_engine
|
||||
|
||||
logger = get_logger("database")
|
||||
|
||||
# ===== BACKUP-SYSTEM =====
|
||||
|
||||
class DatabaseBackupManager:
|
||||
"""
|
||||
Verwaltet automatische Datenbank-Backups mit Rotation.
|
||||
"""
|
||||
|
||||
def __init__(self, backup_dir: str = None):
|
||||
self.backup_dir = backup_dir or os.path.join(os.path.dirname(DATABASE_PATH), "backups")
|
||||
self.ensure_backup_directory()
|
||||
self._backup_lock = threading.Lock()
|
||||
|
||||
def ensure_backup_directory(self):
|
||||
"""Stellt sicher, dass das Backup-Verzeichnis existiert."""
|
||||
Path(self.backup_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def create_backup(self, compress: bool = True) -> str:
|
||||
"""
|
||||
Erstellt ein Backup der Datenbank.
|
||||
|
||||
Args:
|
||||
compress: Ob das Backup komprimiert werden soll
|
||||
|
||||
Returns:
|
||||
str: Pfad zum erstellten Backup
|
||||
"""
|
||||
with self._backup_lock:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_filename = f"myp_backup_{timestamp}.db"
|
||||
|
||||
if compress:
|
||||
backup_filename += ".gz"
|
||||
|
||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
||||
|
||||
try:
|
||||
if compress:
|
||||
# Komprimiertes Backup erstellen
|
||||
with open(DATABASE_PATH, 'rb') as f_in:
|
||||
with gzip.open(backup_path, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
else:
|
||||
# Einfache Kopie
|
||||
shutil.copy2(DATABASE_PATH, backup_path)
|
||||
|
||||
logger.info(f"Datenbank-Backup erstellt: {backup_path}")
|
||||
return backup_path
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Erstellen des Backups: {str(e)}")
|
||||
raise
|
||||
|
||||
def restore_backup(self, backup_path: str) -> bool:
|
||||
"""
|
||||
Stellt ein Backup wieder her.
|
||||
|
||||
Args:
|
||||
backup_path: Pfad zum Backup
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg
|
||||
"""
|
||||
with self._backup_lock:
|
||||
try:
|
||||
# Aktuelles Backup der bestehenden DB erstellen
|
||||
current_backup = self.create_backup()
|
||||
logger.info(f"Sicherheitsbackup erstellt: {current_backup}")
|
||||
|
||||
if backup_path.endswith('.gz'):
|
||||
# Komprimiertes Backup wiederherstellen
|
||||
with gzip.open(backup_path, 'rb') as f_in:
|
||||
with open(DATABASE_PATH, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
else:
|
||||
# Einfache Kopie
|
||||
shutil.copy2(backup_path, DATABASE_PATH)
|
||||
|
||||
logger.info(f"Datenbank aus Backup wiederhergestellt: {backup_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Wiederherstellen des Backups: {str(e)}")
|
||||
return False
|
||||
|
||||
def cleanup_old_backups(self, keep_days: int = 30):
|
||||
"""
|
||||
Löscht alte Backups.
|
||||
|
||||
Args:
|
||||
keep_days: Anzahl Tage, die Backups aufbewahrt werden sollen
|
||||
"""
|
||||
cutoff_date = datetime.now() - timedelta(days=keep_days)
|
||||
deleted_count = 0
|
||||
|
||||
try:
|
||||
for filename in os.listdir(self.backup_dir):
|
||||
if filename.startswith("myp_backup_"):
|
||||
file_path = os.path.join(self.backup_dir, filename)
|
||||
file_time = datetime.fromtimestamp(os.path.getctime(file_path))
|
||||
|
||||
if file_time < cutoff_date:
|
||||
os.remove(file_path)
|
||||
deleted_count += 1
|
||||
logger.info(f"Altes Backup gelöscht: {filename}")
|
||||
|
||||
if deleted_count > 0:
|
||||
logger.info(f"{deleted_count} alte Backups gelöscht")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Bereinigen alter Backups: {str(e)}")
|
||||
|
||||
def get_backup_list(self) -> List[Dict]:
|
||||
"""
|
||||
Gibt eine Liste aller verfügbaren Backups zurück.
|
||||
|
||||
Returns:
|
||||
List[Dict]: Liste mit Backup-Informationen
|
||||
"""
|
||||
backups = []
|
||||
|
||||
try:
|
||||
for filename in os.listdir(self.backup_dir):
|
||||
if filename.startswith("myp_backup_"):
|
||||
file_path = os.path.join(self.backup_dir, filename)
|
||||
file_stat = os.stat(file_path)
|
||||
|
||||
backups.append({
|
||||
"filename": filename,
|
||||
"path": file_path,
|
||||
"size": file_stat.st_size,
|
||||
"created": datetime.fromtimestamp(file_stat.st_ctime),
|
||||
"compressed": filename.endswith('.gz')
|
||||
})
|
||||
|
||||
# Nach Erstellungsdatum sortieren (neueste zuerst)
|
||||
backups.sort(key=lambda x: x['created'], reverse=True)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Abrufen der Backup-Liste: {str(e)}")
|
||||
|
||||
return backups
|
||||
|
||||
|
||||
# ===== DATENBANK-MONITORING =====
|
||||
|
||||
class DatabaseMonitor:
|
||||
"""
|
||||
Überwacht die Datenbank-Performance und -Gesundheit.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.engine = create_optimized_engine()
|
||||
|
||||
def get_database_stats(self) -> Dict:
|
||||
"""
|
||||
Sammelt Datenbank-Statistiken.
|
||||
|
||||
Returns:
|
||||
Dict: Datenbank-Statistiken
|
||||
"""
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
with self.engine.connect() as conn:
|
||||
# Datenbankgröße
|
||||
result = conn.execute(text("SELECT page_count * page_size as size FROM pragma_page_count(), pragma_page_size()"))
|
||||
db_size = result.fetchone()[0]
|
||||
stats['database_size_bytes'] = db_size
|
||||
stats['database_size_mb'] = round(db_size / (1024 * 1024), 2)
|
||||
|
||||
# WAL-Datei-Größe
|
||||
wal_path = DATABASE_PATH + "-wal"
|
||||
if os.path.exists(wal_path):
|
||||
wal_size = os.path.getsize(wal_path)
|
||||
stats['wal_size_bytes'] = wal_size
|
||||
stats['wal_size_mb'] = round(wal_size / (1024 * 1024), 2)
|
||||
else:
|
||||
stats['wal_size_bytes'] = 0
|
||||
stats['wal_size_mb'] = 0
|
||||
|
||||
# Journal-Modus
|
||||
result = conn.execute(text("PRAGMA journal_mode"))
|
||||
stats['journal_mode'] = result.fetchone()[0]
|
||||
|
||||
# Cache-Statistiken
|
||||
result = conn.execute(text("PRAGMA cache_size"))
|
||||
stats['cache_size'] = result.fetchone()[0]
|
||||
|
||||
# Synchronous-Modus
|
||||
result = conn.execute(text("PRAGMA synchronous"))
|
||||
stats['synchronous_mode'] = result.fetchone()[0]
|
||||
|
||||
# Tabellen-Statistiken
|
||||
result = conn.execute(text("""
|
||||
SELECT name,
|
||||
(SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name=m.name) as table_count
|
||||
FROM sqlite_master m WHERE type='table'
|
||||
"""))
|
||||
|
||||
table_stats = {}
|
||||
for table_name, _ in result.fetchall():
|
||||
if not table_name.startswith('sqlite_'):
|
||||
count_result = conn.execute(text(f"SELECT COUNT(*) FROM {table_name}"))
|
||||
table_stats[table_name] = count_result.fetchone()[0]
|
||||
|
||||
stats['table_counts'] = table_stats
|
||||
|
||||
# Letzte Wartung
|
||||
stats['last_analyze'] = self._get_last_analyze_time()
|
||||
stats['last_vacuum'] = self._get_last_vacuum_time()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Sammeln der Datenbank-Statistiken: {str(e)}")
|
||||
stats['error'] = str(e)
|
||||
|
||||
return stats
|
||||
|
||||
def _get_last_analyze_time(self) -> Optional[str]:
|
||||
"""Ermittelt den Zeitpunkt der letzten ANALYZE-Operation."""
|
||||
try:
|
||||
# SQLite speichert keine direkten Timestamps für ANALYZE
|
||||
# Wir verwenden die Modifikationszeit der Statistik-Tabellen
|
||||
stat_path = DATABASE_PATH + "-stat"
|
||||
if os.path.exists(stat_path):
|
||||
return datetime.fromtimestamp(os.path.getmtime(stat_path)).isoformat()
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _get_last_vacuum_time(self) -> Optional[str]:
|
||||
"""Ermittelt den Zeitpunkt der letzten VACUUM-Operation."""
|
||||
try:
|
||||
# Approximation über Datei-Modifikationszeit
|
||||
return datetime.fromtimestamp(os.path.getmtime(DATABASE_PATH)).isoformat()
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
def check_database_health(self) -> Dict:
|
||||
"""
|
||||
Führt eine Gesundheitsprüfung der Datenbank durch.
|
||||
|
||||
Returns:
|
||||
Dict: Gesundheitsstatus
|
||||
"""
|
||||
health = {
|
||||
"status": "healthy",
|
||||
"issues": [],
|
||||
"recommendations": []
|
||||
}
|
||||
|
||||
try:
|
||||
with self.engine.connect() as conn:
|
||||
# Integritätsprüfung
|
||||
result = conn.execute(text("PRAGMA integrity_check"))
|
||||
integrity_result = result.fetchone()[0]
|
||||
|
||||
if integrity_result != "ok":
|
||||
health["status"] = "critical"
|
||||
health["issues"].append(f"Integritätsprüfung fehlgeschlagen: {integrity_result}")
|
||||
|
||||
# WAL-Dateigröße prüfen
|
||||
wal_path = DATABASE_PATH + "-wal"
|
||||
if os.path.exists(wal_path):
|
||||
wal_size_mb = os.path.getsize(wal_path) / (1024 * 1024)
|
||||
if wal_size_mb > 100: # Über 100MB
|
||||
health["issues"].append(f"WAL-Datei sehr groß: {wal_size_mb:.1f}MB")
|
||||
health["recommendations"].append("WAL-Checkpoint durchführen")
|
||||
|
||||
# Freier Speicherplatz prüfen
|
||||
db_dir = os.path.dirname(DATABASE_PATH)
|
||||
free_space = shutil.disk_usage(db_dir).free / (1024 * 1024 * 1024) # GB
|
||||
|
||||
if free_space < 1: # Weniger als 1GB
|
||||
health["status"] = "warning" if health["status"] == "healthy" else health["status"]
|
||||
health["issues"].append(f"Wenig freier Speicherplatz: {free_space:.1f}GB")
|
||||
health["recommendations"].append("Speicherplatz freigeben oder alte Backups löschen")
|
||||
|
||||
# Connection Pool Status (falls verfügbar)
|
||||
# Hier könnten weitere Checks hinzugefügt werden
|
||||
|
||||
except Exception as e:
|
||||
health["status"] = "error"
|
||||
health["issues"].append(f"Fehler bei Gesundheitsprüfung: {str(e)}")
|
||||
logger.error(f"Fehler bei Datenbank-Gesundheitsprüfung: {str(e)}")
|
||||
|
||||
return health
|
||||
|
||||
def optimize_database(self) -> Dict:
|
||||
"""
|
||||
Führt Optimierungsoperationen auf der Datenbank durch.
|
||||
|
||||
Returns:
|
||||
Dict: Ergebnis der Optimierung
|
||||
"""
|
||||
result = {
|
||||
"operations": [],
|
||||
"success": True,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
try:
|
||||
with self.engine.connect() as conn:
|
||||
# ANALYZE für bessere Query-Planung
|
||||
conn.execute(text("ANALYZE"))
|
||||
result["operations"].append("ANALYZE ausgeführt")
|
||||
|
||||
# WAL-Checkpoint
|
||||
checkpoint_result = conn.execute(text("PRAGMA wal_checkpoint(TRUNCATE)"))
|
||||
checkpoint_info = checkpoint_result.fetchone()
|
||||
result["operations"].append(f"WAL-Checkpoint: {checkpoint_info}")
|
||||
|
||||
# Incremental Vacuum
|
||||
conn.execute(text("PRAGMA incremental_vacuum"))
|
||||
result["operations"].append("Incremental Vacuum ausgeführt")
|
||||
|
||||
# Optimize Pragma
|
||||
conn.execute(text("PRAGMA optimize"))
|
||||
result["operations"].append("PRAGMA optimize ausgeführt")
|
||||
|
||||
conn.commit()
|
||||
|
||||
except Exception as e:
|
||||
result["success"] = False
|
||||
result["errors"].append(str(e))
|
||||
logger.error(f"Fehler bei Datenbank-Optimierung: {str(e)}")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ===== AUTOMATISCHE WARTUNG =====
|
||||
|
||||
class DatabaseMaintenanceScheduler:
|
||||
"""
|
||||
Plant und führt automatische Wartungsaufgaben durch.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.backup_manager = DatabaseBackupManager()
|
||||
self.monitor = DatabaseMonitor()
|
||||
self._running = False
|
||||
self._thread = None
|
||||
|
||||
def start_maintenance_scheduler(self):
|
||||
"""Startet den Wartungs-Scheduler."""
|
||||
if self._running:
|
||||
return
|
||||
|
||||
self._running = True
|
||||
self._thread = threading.Thread(target=self._maintenance_loop, daemon=True)
|
||||
self._thread.start()
|
||||
logger.info("Datenbank-Wartungs-Scheduler gestartet")
|
||||
|
||||
def stop_maintenance_scheduler(self):
|
||||
"""Stoppt den Wartungs-Scheduler."""
|
||||
self._running = False
|
||||
if self._thread:
|
||||
self._thread.join(timeout=5)
|
||||
logger.info("Datenbank-Wartungs-Scheduler gestoppt")
|
||||
|
||||
def _maintenance_loop(self):
|
||||
"""Hauptschleife für Wartungsaufgaben."""
|
||||
last_backup = datetime.now()
|
||||
last_cleanup = datetime.now()
|
||||
last_optimization = datetime.now()
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
now = datetime.now()
|
||||
|
||||
# Tägliches Backup (alle 24 Stunden)
|
||||
if (now - last_backup).total_seconds() > 86400: # 24 Stunden
|
||||
self.backup_manager.create_backup()
|
||||
last_backup = now
|
||||
|
||||
# Wöchentliche Bereinigung alter Backups (alle 7 Tage)
|
||||
if (now - last_cleanup).total_seconds() > 604800: # 7 Tage
|
||||
self.backup_manager.cleanup_old_backups()
|
||||
last_cleanup = now
|
||||
|
||||
# Tägliche Optimierung (alle 24 Stunden)
|
||||
if (now - last_optimization).total_seconds() > 86400: # 24 Stunden
|
||||
self.monitor.optimize_database()
|
||||
last_optimization = now
|
||||
|
||||
# 1 Stunde warten bis zum nächsten Check
|
||||
time.sleep(3600)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler im Wartungs-Scheduler: {str(e)}")
|
||||
time.sleep(300) # 5 Minuten warten bei Fehlern
|
||||
|
||||
|
||||
# ===== GLOBALE INSTANZEN =====
|
||||
|
||||
# Globale Instanzen für einfachen Zugriff
|
||||
backup_manager = DatabaseBackupManager()
|
||||
database_monitor = DatabaseMonitor()
|
||||
maintenance_scheduler = DatabaseMaintenanceScheduler()
|
||||
|
||||
# Automatisch starten
|
||||
maintenance_scheduler.start_maintenance_scheduler()
|
||||
743
backend/app - Kopie/utils/debug_cli.py
Normal file
743
backend/app - Kopie/utils/debug_cli.py
Normal file
@@ -0,0 +1,743 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
MYP Debug CLI
|
||||
Kommandozeilen-Tool für Diagnose und Debugging der MYP-Anwendung
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
import importlib
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
import traceback
|
||||
from pprint import pprint
|
||||
|
||||
# Eigene Module importieren
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Farbige Ausgabe für die Konsole
|
||||
COLORS = {
|
||||
'RESET': '\033[0m',
|
||||
'BOLD': '\033[1m',
|
||||
'RED': '\033[31m',
|
||||
'GREEN': '\033[32m',
|
||||
'YELLOW': '\033[33m',
|
||||
'BLUE': '\033[34m',
|
||||
'MAGENTA': '\033[35m',
|
||||
'CYAN': '\033[36m',
|
||||
}
|
||||
|
||||
# Emojis für verschiedene Log-Level und Kategorien
|
||||
LOG_EMOJIS = {
|
||||
'DEBUG': '🔍',
|
||||
'INFO': 'ℹ️',
|
||||
'WARNING': '⚠️',
|
||||
'ERROR': '❌',
|
||||
'CRITICAL': '🔥',
|
||||
'SUCCESS': '✅',
|
||||
'DATABASE': '💾',
|
||||
'NETWORK': '🌐',
|
||||
'SYSTEM': '💻',
|
||||
'PRINTER': '🖨️',
|
||||
'API': '📡',
|
||||
'USER': '👤'
|
||||
}
|
||||
|
||||
# Prüfen, ob das Terminal Farben unterstützt
|
||||
def supports_color():
|
||||
"""Prüft, ob das Terminal Farben unterstützt."""
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
import ctypes
|
||||
kernel32 = ctypes.windll.kernel32
|
||||
# Aktiviere VT100-Unterstützung unter Windows
|
||||
kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return sys.stdout.isatty()
|
||||
|
||||
USE_COLOR = supports_color()
|
||||
|
||||
def colorize(text, color):
|
||||
"""Färbt den Text ein, wenn Farben unterstützt werden."""
|
||||
if USE_COLOR and color in COLORS:
|
||||
return f"{COLORS[color]}{text}{COLORS['RESET']}"
|
||||
return text
|
||||
|
||||
def print_success(message):
|
||||
print(f"{LOG_EMOJIS['SUCCESS']} {colorize(message, 'GREEN')}")
|
||||
|
||||
def print_error(message):
|
||||
print(f"{LOG_EMOJIS['ERROR']} {colorize(message, 'RED')}")
|
||||
|
||||
def print_warning(message):
|
||||
print(f"{LOG_EMOJIS['WARNING']} {colorize(message, 'YELLOW')}")
|
||||
|
||||
def print_info(message):
|
||||
print(f"{LOG_EMOJIS['INFO']} {colorize(message, 'BLUE')}")
|
||||
|
||||
def print_debug(message):
|
||||
print(f"{LOG_EMOJIS['DEBUG']} {colorize(message, 'CYAN')}")
|
||||
|
||||
def print_database(message):
|
||||
print(f"{LOG_EMOJIS['DATABASE']} {colorize(message, 'MAGENTA')}")
|
||||
|
||||
def print_network(message):
|
||||
print(f"{LOG_EMOJIS['NETWORK']} {colorize(message, 'CYAN')}")
|
||||
|
||||
def print_system(message):
|
||||
print(f"{LOG_EMOJIS['SYSTEM']} {colorize(message, 'BLUE')}")
|
||||
|
||||
def print_printer(message):
|
||||
print(f"{LOG_EMOJIS['PRINTER']} {colorize(message, 'GREEN')}")
|
||||
|
||||
def print_header(message):
|
||||
print(f"\n{colorize('='*80, 'BOLD')}")
|
||||
print(f"{colorize(message.center(80), 'BOLD')}")
|
||||
print(f"{colorize('='*80, 'BOLD')}\n")
|
||||
|
||||
def print_section(message):
|
||||
print(f"\n{colorize('-'*40, 'BOLD')}")
|
||||
print(f"{colorize(message, 'BOLD')}")
|
||||
print(f"{colorize('-'*40, 'BOLD')}\n")
|
||||
|
||||
# Hilfsfunktionen
|
||||
|
||||
def get_database_path():
|
||||
"""Gibt den Pfad zur Datenbank zurück."""
|
||||
try:
|
||||
from config.settings import DATABASE_PATH
|
||||
return DATABASE_PATH
|
||||
except ImportError:
|
||||
# Fallback auf Standard-Pfad
|
||||
base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return os.path.join(base_dir, "database", "myp.db")
|
||||
|
||||
def check_database():
|
||||
"""Prüft den Zustand der Datenbank."""
|
||||
db_path = get_database_path()
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print_error(f"Datenbank nicht gefunden: {db_path}")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Tabellen auflisten
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
tables = cursor.fetchall()
|
||||
|
||||
print_database(f"Datenbank gefunden: {db_path}")
|
||||
print_database(f"Größe: {os.path.getsize(db_path) / (1024*1024):.2f} MB")
|
||||
print_database(f"Tabellen ({len(tables)}):")
|
||||
|
||||
for table in tables:
|
||||
# Anzahl der Datensätze pro Tabelle
|
||||
cursor.execute(f"SELECT COUNT(*) FROM {table[0]}")
|
||||
count = cursor.fetchone()[0]
|
||||
print(f" 📋 {table[0]}: {count} Einträge")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
print_error(f"Datenbankfehler: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Prüfen der Datenbank: {e}")
|
||||
return False
|
||||
|
||||
def check_log_files():
|
||||
"""Prüft die Log-Dateien und zeigt die neuesten Einträge an."""
|
||||
try:
|
||||
from config.settings import LOG_DIR, LOG_SUBDIRS
|
||||
|
||||
if not os.path.exists(LOG_DIR):
|
||||
print_error(f"Log-Verzeichnis nicht gefunden: {LOG_DIR}")
|
||||
return False
|
||||
|
||||
print_info(f"Log-Verzeichnis: {LOG_DIR}")
|
||||
|
||||
for subdir in LOG_SUBDIRS:
|
||||
log_path = os.path.join(LOG_DIR, subdir, f"{subdir}.log")
|
||||
|
||||
if not os.path.exists(log_path):
|
||||
print_warning(f"Log-Datei nicht gefunden: {log_path}")
|
||||
continue
|
||||
|
||||
size = os.path.getsize(log_path) / 1024 # KB
|
||||
print_info(f"Log-Datei: {subdir}.log ({size:.1f} KB)")
|
||||
|
||||
# Letzte Zeilen anzeigen
|
||||
try:
|
||||
with open(log_path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
last_lines = lines[-5:] # Letzte 5 Zeilen
|
||||
|
||||
print(" Letzte Einträge:")
|
||||
for line in last_lines:
|
||||
line = line.strip()
|
||||
|
||||
# Farbliche Hervorhebung je nach Log-Level
|
||||
if "ERROR" in line:
|
||||
print(f" {colorize(line, 'RED')}")
|
||||
elif "WARNING" in line:
|
||||
print(f" {colorize(line, 'YELLOW')}")
|
||||
elif "INFO" in line:
|
||||
print(f" {colorize(line, 'GREEN')}")
|
||||
elif "DEBUG" in line:
|
||||
print(f" {colorize(line, 'CYAN')}")
|
||||
else:
|
||||
print(f" {line}")
|
||||
except Exception as e:
|
||||
print_warning(f" Fehler beim Lesen der Log-Datei: {e}")
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
print_error("Konfiguration für Logs nicht gefunden")
|
||||
return False
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Prüfen der Log-Dateien: {e}")
|
||||
return False
|
||||
|
||||
def check_environment():
|
||||
"""Prüft die Umgebungsvariablen und System-Einstellungen."""
|
||||
print_info("Umgebungsinformationen:")
|
||||
print(f" Python-Version: {sys.version.split()[0]}")
|
||||
print(f" Betriebssystem: {os.name} - {sys.platform}")
|
||||
print(f" Arbeitsverzeichnis: {os.getcwd()}")
|
||||
|
||||
print_info("Wichtige Umgebungsvariablen:")
|
||||
env_vars = [
|
||||
"FLASK_ENV", "FLASK_DEBUG", "MYP_SSL_ENABLED",
|
||||
"MYP_SSL_HOSTNAME", "PYTHONPATH"
|
||||
]
|
||||
|
||||
for var in env_vars:
|
||||
value = os.environ.get(var, "nicht gesetzt")
|
||||
print(f" {var}: {value}")
|
||||
|
||||
try:
|
||||
# Flask-Konfiguration prüfen
|
||||
print_info("Flask-Konfiguration:")
|
||||
from config.settings import FLASK_HOST, FLASK_PORT, FLASK_DEBUG, SSL_ENABLED
|
||||
|
||||
print(f" Host: {FLASK_HOST}")
|
||||
print(f" Port: {FLASK_PORT}")
|
||||
print(f" Debug-Modus: {FLASK_DEBUG}")
|
||||
print(f" SSL aktiviert: {SSL_ENABLED}")
|
||||
|
||||
# Module prüfen
|
||||
required_modules = [
|
||||
'flask', 'sqlalchemy', 'flask_login', 'werkzeug'
|
||||
]
|
||||
|
||||
print_info("Benötigte Module:")
|
||||
for module in required_modules:
|
||||
try:
|
||||
mod = importlib.import_module(module)
|
||||
version = getattr(mod, '__version__', 'unbekannt')
|
||||
print(f" {module}: {colorize('OK', 'GREEN')} (Version {version})")
|
||||
except ImportError:
|
||||
print(f" {module}: {colorize('FEHLT', 'RED')}")
|
||||
|
||||
except ImportError:
|
||||
print_warning("Flask-Konfiguration konnte nicht geladen werden")
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Prüfen der Umgebung: {e}")
|
||||
|
||||
def scan_printer(ip_address, timeout=5):
|
||||
"""Scannt einen Drucker und zeigt Informationen an."""
|
||||
import socket
|
||||
|
||||
print_printer(f"Prüfe Drucker mit IP: {ip_address}")
|
||||
|
||||
# Ping testen
|
||||
import subprocess
|
||||
try:
|
||||
if os.name == 'nt': # Windows
|
||||
cmd = ['ping', '-n', '1', '-w', str(timeout * 1000), ip_address]
|
||||
else: # Unix/Linux/macOS
|
||||
cmd = ['ping', '-c', '1', '-W', str(timeout), ip_address]
|
||||
|
||||
print(f" 🏓 Ping-Test: ", end="")
|
||||
result = subprocess.run(cmd, capture_output=True, text=True,
|
||||
encoding='utf-8', errors='replace')
|
||||
|
||||
if result.returncode == 0:
|
||||
print(colorize("Erreichbar", "GREEN"))
|
||||
else:
|
||||
print(colorize("Nicht erreichbar", "RED"))
|
||||
print(f" 📄 Details: {result.stdout}")
|
||||
return
|
||||
except Exception as e:
|
||||
print(colorize(f"Fehler bei Ping-Test: {e}", "RED"))
|
||||
|
||||
# Offene Ports prüfen
|
||||
common_ports = [80, 443, 8080, 8443, 631, 9100, 9101, 9102]
|
||||
open_ports = []
|
||||
|
||||
print(" 🔍 Port-Scan: ", end="")
|
||||
for port in common_ports:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((ip_address, port))
|
||||
if result == 0:
|
||||
open_ports.append(port)
|
||||
sock.close()
|
||||
|
||||
if open_ports:
|
||||
print(colorize(f"Offene Ports: {', '.join(map(str, open_ports))}", "GREEN"))
|
||||
else:
|
||||
print(colorize("Keine offenen Ports gefunden", "YELLOW"))
|
||||
|
||||
# Drucker-Info über Tapo-API testen (wenn vorhanden)
|
||||
try:
|
||||
from PyP100 import PyP110
|
||||
|
||||
print(" 🔌 Smart Plug Test: ", end="")
|
||||
try:
|
||||
# Standardmäßig Anmeldeinformationen aus der Konfiguration verwenden
|
||||
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
|
||||
|
||||
p110 = PyP110.P110(ip_address, TAPO_USERNAME, TAPO_PASSWORD)
|
||||
p110.handshake()
|
||||
p110.login()
|
||||
|
||||
device_info = p110.getDeviceInfo()
|
||||
print(colorize("Verbunden", "GREEN"))
|
||||
print(f" 📛 Gerätename: {device_info.get('nickname', 'Unbekannt')}")
|
||||
print(f" ⚡ Status: {'Ein' if device_info.get('device_on', False) else 'Aus'}")
|
||||
|
||||
if 'on_time' in device_info:
|
||||
on_time = device_info['on_time']
|
||||
print(f" ⏱️ Betriebszeit: {on_time // 60} Minuten, {on_time % 60} Sekunden")
|
||||
|
||||
except Exception as e:
|
||||
print(colorize(f"Fehler: {e}", "RED"))
|
||||
except ImportError:
|
||||
print_warning(" PyP100-Modul nicht verfügbar - Smart Plug Test übersprungen")
|
||||
|
||||
def check_printers_from_db():
|
||||
"""Prüft die in der Datenbank gespeicherten Drucker."""
|
||||
db_path = get_database_path()
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print_error(f"Datenbank nicht gefunden: {db_path}")
|
||||
return
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Drucker-Tabelle prüfen
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='printer';")
|
||||
if not cursor.fetchone():
|
||||
print_error("Drucker-Tabelle nicht gefunden")
|
||||
conn.close()
|
||||
return
|
||||
|
||||
# Drucker auslesen
|
||||
cursor.execute("SELECT * FROM printer;")
|
||||
printers = cursor.fetchall()
|
||||
|
||||
if not printers:
|
||||
print_warning("Keine Drucker in der Datenbank gefunden")
|
||||
conn.close()
|
||||
return
|
||||
|
||||
print_info(f"{len(printers)} Drucker gefunden:")
|
||||
|
||||
for printer in printers:
|
||||
status_color = 'GREEN' if printer['status'] == 'online' else 'RED'
|
||||
print(f" {printer['name']}: {colorize(printer['status'], status_color)}")
|
||||
print(f" IP: {printer['ip_address']}")
|
||||
print(f" Plug IP: {printer['plug_ip'] or 'Nicht konfiguriert'}")
|
||||
|
||||
# Detaillierteren Status prüfen
|
||||
if printer['plug_ip']:
|
||||
ask = input(f" Möchten Sie den Drucker {printer['name']} scannen? (j/n): ")
|
||||
if ask.lower() in ('j', 'ja', 'y', 'yes'):
|
||||
scan_printer(printer['plug_ip'])
|
||||
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Prüfen der Drucker: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
def check_flask_routes():
|
||||
"""Zeigt alle verfügbaren Flask-Routen an."""
|
||||
try:
|
||||
# Versuche, die Flask-App zu importieren
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
try:
|
||||
from app import app as flask_app
|
||||
except ImportError:
|
||||
print_error("Flask-App konnte nicht importiert werden")
|
||||
return
|
||||
|
||||
# Alle Routen auflisten
|
||||
print_info("Verfügbare Flask-Routen:")
|
||||
|
||||
routes = []
|
||||
for rule in flask_app.url_map.iter_rules():
|
||||
routes.append({
|
||||
'endpoint': rule.endpoint,
|
||||
'methods': ', '.join(sorted(rule.methods - {'HEAD', 'OPTIONS'})),
|
||||
'path': rule.rule
|
||||
})
|
||||
|
||||
# Nach Pfad sortieren
|
||||
routes = sorted(routes, key=lambda x: x['path'])
|
||||
|
||||
# Routen anzeigen
|
||||
for route in routes:
|
||||
method_color = 'GREEN' if 'GET' in route['methods'] else 'BLUE'
|
||||
print(f" {colorize(route['methods'], method_color)} {route['path']}")
|
||||
print(f" → {route['endpoint']}")
|
||||
|
||||
print_info(f"Insgesamt {len(routes)} Routen gefunden")
|
||||
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Abrufen der Flask-Routen: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
def print_system_info():
|
||||
"""Zeigt detaillierte Systeminformationen an."""
|
||||
print_header("Systeminformationen")
|
||||
|
||||
print_section("Basisinformationen")
|
||||
import platform
|
||||
print(f"Python-Version: {platform.python_version()}")
|
||||
print(f"Betriebssystem: {platform.system()} {platform.release()}")
|
||||
print(f"Architektur: {platform.machine()}")
|
||||
print(f"Prozessor: {platform.processor()}")
|
||||
|
||||
print_section("Speicher")
|
||||
try:
|
||||
import psutil
|
||||
vm = psutil.virtual_memory()
|
||||
print(f"Gesamter Speicher: {vm.total / (1024**3):.1f} GB")
|
||||
print(f"Verfügbarer Speicher: {vm.available / (1024**3):.1f} GB")
|
||||
print(f"Speicherauslastung: {vm.percent}%")
|
||||
|
||||
disk = psutil.disk_usage('/')
|
||||
print(f"Festplatte gesamt: {disk.total / (1024**3):.1f} GB")
|
||||
print(f"Festplatte frei: {disk.free / (1024**3):.1f} GB")
|
||||
print(f"Festplattenauslastung: {disk.percent}%")
|
||||
except ImportError:
|
||||
print_warning("psutil-Modul nicht verfügbar - eingeschränkte Informationen")
|
||||
|
||||
print_section("Netzwerk")
|
||||
try:
|
||||
import socket
|
||||
hostname = socket.gethostname()
|
||||
ip_address = socket.gethostbyname(hostname)
|
||||
print(f"Hostname: {hostname}")
|
||||
print(f"IP-Adresse: {ip_address}")
|
||||
|
||||
# Netzwerkschnittstellen
|
||||
if 'psutil' in sys.modules:
|
||||
print("Netzwerkschnittstellen:")
|
||||
for name, addrs in psutil.net_if_addrs().items():
|
||||
for addr in addrs:
|
||||
if addr.family == socket.AF_INET:
|
||||
print(f" {name}: {addr.address}")
|
||||
except Exception as e:
|
||||
print_warning(f"Fehler beim Abrufen der Netzwerkinformationen: {e}")
|
||||
|
||||
def test_logging_system():
|
||||
"""Testet das verbesserte Logging-System mit allen Features."""
|
||||
print_header("Logging-System Test")
|
||||
|
||||
try:
|
||||
# Versuche die neuen Logging-Funktionen zu importieren
|
||||
from utils.logging_config import get_logger, debug_request, debug_response, measure_execution_time
|
||||
|
||||
print_success("Neue Logging-Module erfolgreich importiert")
|
||||
|
||||
# Test verschiedener Logger
|
||||
test_loggers = ['app', 'auth', 'jobs', 'printers', 'errors']
|
||||
|
||||
print_section("Logger-Tests")
|
||||
for logger_name in test_loggers:
|
||||
try:
|
||||
logger = get_logger(logger_name)
|
||||
|
||||
# Test verschiedener Log-Level
|
||||
logger.debug(f"🔍 Debug-Test für {logger_name}")
|
||||
logger.info(f"ℹ️ Info-Test für {logger_name}")
|
||||
logger.warning(f"⚠️ Warning-Test für {logger_name}")
|
||||
|
||||
print_success(f"Logger '{logger_name}' funktioniert korrekt")
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Testen von Logger '{logger_name}': {e}")
|
||||
|
||||
# Test Performance-Monitoring
|
||||
print_section("Performance-Monitoring Test")
|
||||
|
||||
@measure_execution_time(logger=get_logger("app"), task_name="Test-Funktion")
|
||||
def test_function():
|
||||
"""Eine Test-Funktion für das Performance-Monitoring."""
|
||||
import time
|
||||
time.sleep(0.1) # Simuliere etwas Arbeit
|
||||
return "Test erfolgreich"
|
||||
|
||||
result = test_function()
|
||||
print_success(f"Performance-Monitoring Test: {result}")
|
||||
|
||||
# Test der Debug-Utilities
|
||||
print_section("Debug-Utilities Test")
|
||||
|
||||
try:
|
||||
from utils.debug_utils import debug_dump, debug_trace, memory_usage
|
||||
|
||||
# Test debug_dump
|
||||
test_data = {
|
||||
"version": "1.0.0",
|
||||
"features": ["emojis", "colors", "performance-monitoring"],
|
||||
"status": "active"
|
||||
}
|
||||
debug_dump(test_data, "Test-Konfiguration")
|
||||
|
||||
# Test memory_usage
|
||||
memory_info = memory_usage()
|
||||
print_system(f"Aktueller Speicherverbrauch: {memory_info['rss']:.2f} MB")
|
||||
|
||||
print_success("Debug-Utilities funktionieren korrekt")
|
||||
|
||||
except ImportError as e:
|
||||
print_warning(f"Debug-Utilities nicht verfügbar: {e}")
|
||||
|
||||
# Zusammenfassung
|
||||
print_section("Test-Zusammenfassung")
|
||||
print_success("🎉 Alle Logging-System-Tests erfolgreich abgeschlossen!")
|
||||
print_info("Features verfügbar:")
|
||||
print(" ✅ Farbige Log-Ausgaben mit ANSI-Codes")
|
||||
print(" ✅ Emoji-Integration für bessere Lesbarkeit")
|
||||
print(" ✅ HTTP-Request/Response-Logging")
|
||||
print(" ✅ Performance-Monitoring mit Ausführungszeit")
|
||||
print(" ✅ Cross-Platform-Unterstützung (Windows/Unix)")
|
||||
print(" ✅ Strukturierte Debug-Informationen")
|
||||
|
||||
except ImportError as e:
|
||||
print_error(f"Logging-Module nicht verfügbar: {e}")
|
||||
print_warning("Stelle sicher, dass alle Module korrekt installiert sind")
|
||||
except Exception as e:
|
||||
print_error(f"Unerwarteter Fehler beim Logging-Test: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
# Hauptfunktionen für die Befehlszeile
|
||||
|
||||
def diagnose():
|
||||
"""Führt eine umfassende Diagnose durch."""
|
||||
print_header("MYP Diagnose-Tool")
|
||||
|
||||
print_section("Systemprüfung")
|
||||
check_environment()
|
||||
|
||||
print_section("Datenbankprüfung")
|
||||
check_database()
|
||||
|
||||
print_section("Log-Dateien")
|
||||
check_log_files()
|
||||
|
||||
print_success("Diagnose abgeschlossen!")
|
||||
|
||||
def scan_printers():
|
||||
"""Scannt und prüft alle Drucker."""
|
||||
print_header("Drucker-Scanner")
|
||||
|
||||
# Direkter Scan einer IP-Adresse
|
||||
ip = input("IP-Adresse zum Scannen (leer lassen, um Drucker aus der Datenbank zu prüfen): ")
|
||||
|
||||
if ip:
|
||||
scan_printer(ip)
|
||||
else:
|
||||
check_printers_from_db()
|
||||
|
||||
def show_routes():
|
||||
"""Zeigt alle verfügbaren API-Routen an."""
|
||||
print_header("API-Routen")
|
||||
check_flask_routes()
|
||||
|
||||
def system_info():
|
||||
"""Zeigt detaillierte Systeminformationen an."""
|
||||
print_system_info()
|
||||
|
||||
def show_logs():
|
||||
"""Zeigt und analysiert Log-Dateien."""
|
||||
print_header("Log-Analyse")
|
||||
|
||||
try:
|
||||
from config.settings import LOG_DIR, LOG_SUBDIRS
|
||||
|
||||
if not os.path.exists(LOG_DIR):
|
||||
print_error(f"Log-Verzeichnis nicht gefunden: {LOG_DIR}")
|
||||
return
|
||||
|
||||
print_info(f"Log-Verzeichnis: {LOG_DIR}")
|
||||
print_info("Verfügbare Logs:")
|
||||
|
||||
for i, subdir in enumerate(LOG_SUBDIRS, 1):
|
||||
log_path = os.path.join(LOG_DIR, subdir, f"{subdir}.log")
|
||||
size = "Nicht gefunden"
|
||||
|
||||
if os.path.exists(log_path):
|
||||
size = f"{os.path.getsize(log_path) / 1024:.1f} KB"
|
||||
|
||||
print(f" {i}. {subdir}.log ({size})")
|
||||
|
||||
choice = input("\nWelches Log möchten Sie anzeigen? (Nummer oder Name): ")
|
||||
|
||||
# Nummer in Namen umwandeln
|
||||
try:
|
||||
choice_num = int(choice) - 1
|
||||
if 0 <= choice_num < len(LOG_SUBDIRS):
|
||||
choice = LOG_SUBDIRS[choice_num]
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Prüfen, ob die Wahl gültig ist
|
||||
if choice not in LOG_SUBDIRS:
|
||||
print_error(f"Ungültige Auswahl: {choice}")
|
||||
return
|
||||
|
||||
log_path = os.path.join(LOG_DIR, choice, f"{choice}.log")
|
||||
|
||||
if not os.path.exists(log_path):
|
||||
print_error(f"Log-Datei nicht gefunden: {log_path}")
|
||||
return
|
||||
|
||||
# Anzahl der anzuzeigenden Zeilen
|
||||
lines_count = input("Anzahl der anzuzeigenden Zeilen (Standard: 20): ")
|
||||
lines_count = int(lines_count) if lines_count.isdigit() else 20
|
||||
|
||||
# Filter für bestimmte Log-Level
|
||||
level_filter = input("Nach Log-Level filtern (INFO, WARNING, ERROR oder leer für alle): ").upper()
|
||||
|
||||
# Log-Datei anzeigen
|
||||
with open(log_path, 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
# Filtern nach Log-Level
|
||||
if level_filter:
|
||||
lines = [line for line in lines if level_filter in line]
|
||||
|
||||
# Letzte n Zeilen auswählen
|
||||
lines = lines[-lines_count:]
|
||||
|
||||
print_section(f"Log-Datei: {choice}.log (letzte {len(lines)} Einträge)")
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
|
||||
# Farbliche Hervorhebung je nach Log-Level
|
||||
if "ERROR" in line:
|
||||
print(colorize(line, 'RED'))
|
||||
elif "WARNING" in line:
|
||||
print(colorize(line, 'YELLOW'))
|
||||
elif "INFO" in line:
|
||||
print(colorize(line, 'GREEN'))
|
||||
elif "DEBUG" in line:
|
||||
print(colorize(line, 'CYAN'))
|
||||
else:
|
||||
print(line)
|
||||
|
||||
except ImportError:
|
||||
print_error("Konfiguration für Logs nicht gefunden")
|
||||
except Exception as e:
|
||||
print_error(f"Fehler beim Anzeigen der Log-Dateien: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
def parse_args():
|
||||
"""Parse command line arguments."""
|
||||
parser = argparse.ArgumentParser(description="MYP Debug CLI")
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", help="Befehl")
|
||||
|
||||
# Diagnose
|
||||
diag_parser = subparsers.add_parser("diagnose", help="Führt eine umfassende Diagnose durch")
|
||||
|
||||
# Drucker scannen
|
||||
scan_parser = subparsers.add_parser("scan", help="Scannt und prüft alle Drucker")
|
||||
|
||||
# Routen anzeigen
|
||||
routes_parser = subparsers.add_parser("routes", help="Zeigt alle verfügbaren API-Routen an")
|
||||
|
||||
# Systeminformationen
|
||||
sysinfo_parser = subparsers.add_parser("sysinfo", help="Zeigt detaillierte Systeminformationen an")
|
||||
|
||||
# Logs anzeigen
|
||||
logs_parser = subparsers.add_parser("logs", help="Zeigt und analysiert Log-Dateien")
|
||||
|
||||
# Logging-System testen
|
||||
logging_test_parser = subparsers.add_parser("test-logging", help="Testet das verbesserte Logging-System")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
"""Hauptfunktion."""
|
||||
args = parse_args()
|
||||
|
||||
if args.command == "diagnose":
|
||||
diagnose()
|
||||
elif args.command == "scan":
|
||||
scan_printers()
|
||||
elif args.command == "routes":
|
||||
show_routes()
|
||||
elif args.command == "sysinfo":
|
||||
system_info()
|
||||
elif args.command == "logs":
|
||||
show_logs()
|
||||
elif args.command == "test-logging":
|
||||
test_logging_system()
|
||||
else:
|
||||
# Interaktives Menü, wenn kein Befehl angegeben wurde
|
||||
print_header("MYP Debug CLI")
|
||||
print("Wählen Sie eine Option:")
|
||||
print(" 1. Diagnose durchführen")
|
||||
print(" 2. Drucker scannen")
|
||||
print(" 3. API-Routen anzeigen")
|
||||
print(" 4. Systeminformationen anzeigen")
|
||||
print(" 5. Log-Dateien anzeigen")
|
||||
print(" 6. Logging-System testen")
|
||||
print(" 0. Beenden")
|
||||
|
||||
choice = input("\nIhre Wahl: ")
|
||||
|
||||
if choice == "1":
|
||||
diagnose()
|
||||
elif choice == "2":
|
||||
scan_printers()
|
||||
elif choice == "3":
|
||||
show_routes()
|
||||
elif choice == "4":
|
||||
system_info()
|
||||
elif choice == "5":
|
||||
show_logs()
|
||||
elif choice == "6":
|
||||
test_logging_system()
|
||||
elif choice == "0":
|
||||
print("Auf Wiedersehen!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print_error("Ungültige Auswahl")
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print_info("\nProgramm wurde durch Benutzer abgebrochen")
|
||||
except Exception as e:
|
||||
print_error(f"Unerwarteter Fehler: {e}")
|
||||
traceback.print_exc()
|
||||
422
backend/app - Kopie/utils/debug_drucker_erkennung.py
Normal file
422
backend/app - Kopie/utils/debug_drucker_erkennung.py
Normal file
@@ -0,0 +1,422 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Debug-Skript für Druckererkennung
|
||||
Testet die Druckererkennung und identifiziert Probleme
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
import time
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
# Füge das Anwendungsverzeichnis zum Python-Pfad hinzu
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
def log_message(message, level="INFO"):
|
||||
"""Logge eine Nachricht mit Zeitstempel"""
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"[{timestamp}] [{level}] {message}")
|
||||
|
||||
def test_database_connection():
|
||||
"""Teste die Datenbankverbindung"""
|
||||
log_message("Teste Datenbankverbindung...")
|
||||
|
||||
try:
|
||||
# Versuche SQLite-Datenbank zu öffnen
|
||||
db_files = ['database.db', 'app.db', 'myp.db']
|
||||
|
||||
for db_file in db_files:
|
||||
if os.path.exists(db_file):
|
||||
log_message(f"Gefundene Datenbankdatei: {db_file}")
|
||||
|
||||
conn = sqlite3.connect(db_file)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Prüfe ob Printer-Tabelle existiert
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='printer';")
|
||||
if cursor.fetchone():
|
||||
log_message("✅ Printer-Tabelle gefunden")
|
||||
|
||||
# Zähle Drucker
|
||||
cursor.execute("SELECT COUNT(*) FROM printer;")
|
||||
count = cursor.fetchone()[0]
|
||||
log_message(f"📊 Anzahl Drucker in Datenbank: {count}")
|
||||
|
||||
# Zeige Drucker-Details
|
||||
cursor.execute("SELECT id, name, plug_ip, status FROM printer;")
|
||||
printers = cursor.fetchall()
|
||||
|
||||
for printer in printers:
|
||||
log_message(f" Drucker {printer[0]}: {printer[1]} ({printer[2]}) - Status: {printer[3]}")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
else:
|
||||
log_message("❌ Printer-Tabelle nicht gefunden")
|
||||
conn.close()
|
||||
|
||||
log_message("❌ Keine gültige Datenbank gefunden")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
log_message(f"❌ Datenbankfehler: {str(e)}", "ERROR")
|
||||
return False
|
||||
|
||||
def test_api_endpoints():
|
||||
"""Teste die API-Endpunkte"""
|
||||
log_message("Teste API-Endpunkte...")
|
||||
|
||||
base_url = "http://localhost:5000"
|
||||
endpoints = [
|
||||
"/api/printers",
|
||||
"/api/printers/status"
|
||||
]
|
||||
|
||||
for endpoint in endpoints:
|
||||
try:
|
||||
log_message(f"Teste {endpoint}...")
|
||||
|
||||
response = requests.get(f"{base_url}{endpoint}", timeout=10)
|
||||
|
||||
log_message(f" Status Code: {response.status_code}")
|
||||
|
||||
if response.status_code == 200:
|
||||
try:
|
||||
data = response.json()
|
||||
if endpoint == "/api/printers":
|
||||
if 'printers' in data:
|
||||
log_message(f" ✅ {len(data['printers'])} Drucker geladen")
|
||||
else:
|
||||
log_message(f" ⚠️ Unerwartete Antwortstruktur: {list(data.keys())}")
|
||||
else:
|
||||
if isinstance(data, list):
|
||||
log_message(f" ✅ {len(data)} Drucker mit Status geladen")
|
||||
else:
|
||||
log_message(f" ⚠️ Unerwartete Antwortstruktur: {type(data)}")
|
||||
except json.JSONDecodeError:
|
||||
log_message(f" ❌ Ungültige JSON-Antwort", "ERROR")
|
||||
else:
|
||||
log_message(f" ❌ HTTP-Fehler: {response.status_code}", "ERROR")
|
||||
try:
|
||||
error_data = response.json()
|
||||
log_message(f" Fehlermeldung: {error_data.get('error', 'Unbekannt')}", "ERROR")
|
||||
except:
|
||||
log_message(f" Antwort: {response.text[:200]}", "ERROR")
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
log_message(f" ❌ Verbindung zu {base_url} fehlgeschlagen", "ERROR")
|
||||
log_message(" Ist die Flask-Anwendung gestartet?", "ERROR")
|
||||
except requests.exceptions.Timeout:
|
||||
log_message(f" ❌ Timeout bei {endpoint}", "ERROR")
|
||||
except Exception as e:
|
||||
log_message(f" ❌ Fehler: {str(e)}", "ERROR")
|
||||
|
||||
def test_network_connectivity():
|
||||
"""Teste Netzwerkverbindung zu Druckern"""
|
||||
log_message("Teste Netzwerkverbindung zu Druckern...")
|
||||
|
||||
# Lade Drucker aus Datenbank
|
||||
try:
|
||||
db_files = ['database.db', 'app.db', 'myp.db']
|
||||
printers = []
|
||||
|
||||
for db_file in db_files:
|
||||
if os.path.exists(db_file):
|
||||
conn = sqlite3.connect(db_file)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT name, plug_ip FROM printer WHERE plug_ip IS NOT NULL;")
|
||||
printers = cursor.fetchall()
|
||||
conn.close()
|
||||
break
|
||||
|
||||
if not printers:
|
||||
log_message("❌ Keine Drucker mit IP-Adressen gefunden")
|
||||
return
|
||||
|
||||
for name, ip in printers:
|
||||
log_message(f"Teste Verbindung zu {name} ({ip})...")
|
||||
|
||||
# Ping-Test
|
||||
try:
|
||||
if platform.system().lower() == "windows":
|
||||
result = subprocess.run(['ping', '-n', '1', '-w', '3000', ip],
|
||||
capture_output=True, text=True, timeout=5,
|
||||
encoding='utf-8', errors='replace')
|
||||
else:
|
||||
result = subprocess.run(['ping', '-c', '1', '-W', '3', ip],
|
||||
capture_output=True, text=True, timeout=5,
|
||||
encoding='utf-8', errors='replace')
|
||||
|
||||
if result.returncode == 0:
|
||||
log_message(f" ✅ Ping erfolgreich")
|
||||
else:
|
||||
log_message(f" ❌ Ping fehlgeschlagen")
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
log_message(f" ❌ Ping-Timeout")
|
||||
except Exception as e:
|
||||
log_message(f" ❌ Ping-Fehler: {str(e)}")
|
||||
|
||||
# HTTP-Test (falls Drucker Webinterface hat)
|
||||
try:
|
||||
response = requests.get(f"http://{ip}", timeout=3)
|
||||
log_message(f" ✅ HTTP-Verbindung erfolgreich (Status: {response.status_code})")
|
||||
except requests.exceptions.Timeout:
|
||||
log_message(f" ⚠️ HTTP-Timeout (normal für Drucker ohne Webinterface)")
|
||||
except requests.exceptions.ConnectionError:
|
||||
log_message(f" ⚠️ HTTP-Verbindung fehlgeschlagen (normal für Drucker ohne Webinterface)")
|
||||
except Exception as e:
|
||||
log_message(f" ⚠️ HTTP-Fehler: {str(e)}")
|
||||
|
||||
except Exception as e:
|
||||
log_message(f"❌ Fehler beim Testen der Netzwerkverbindung: {str(e)}", "ERROR")
|
||||
|
||||
def test_tapo_connections():
|
||||
"""Teste TP-Link Tapo P110-Steckdosen-Verbindungen"""
|
||||
log_message("Teste TP-Link Tapo P110-Steckdosen-Verbindungen...")
|
||||
|
||||
try:
|
||||
# PyP100 importieren
|
||||
from PyP100 import PyP110
|
||||
log_message("✅ PyP100-Modul erfolgreich importiert")
|
||||
except ImportError:
|
||||
log_message("❌ PyP100-Modul nicht verfügbar", "ERROR")
|
||||
log_message(" Installiere mit: pip install PyP100", "INFO")
|
||||
return
|
||||
|
||||
# Lade Drucker aus Datenbank
|
||||
try:
|
||||
db_files = ['database.db', 'app.db', 'myp.db']
|
||||
printers = []
|
||||
|
||||
for db_file in db_files:
|
||||
if os.path.exists(db_file):
|
||||
conn = sqlite3.connect(db_file)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT id, name, plug_ip, plug_username, plug_password FROM printer WHERE plug_ip IS NOT NULL;")
|
||||
printers = cursor.fetchall()
|
||||
conn.close()
|
||||
break
|
||||
|
||||
if not printers:
|
||||
log_message("❌ Keine Drucker mit Tapo-Konfiguration gefunden")
|
||||
return
|
||||
|
||||
successful_connections = 0
|
||||
total_printers = len(printers)
|
||||
|
||||
for printer_id, name, plug_ip, plug_username, plug_password in printers:
|
||||
log_message(f"Teste Tapo-Verbindung zu {name} ({plug_ip})...")
|
||||
|
||||
# Konfiguration validieren
|
||||
if not all([plug_ip, plug_username, plug_password]):
|
||||
log_message(f" ❌ Unvollständige Konfiguration")
|
||||
missing = []
|
||||
if not plug_ip: missing.append("IP-Adresse")
|
||||
if not plug_username: missing.append("Benutzername")
|
||||
if not plug_password: missing.append("Passwort")
|
||||
log_message(f" Fehlend: {', '.join(missing)}")
|
||||
continue
|
||||
|
||||
try:
|
||||
# Tapo-Verbindung herstellen
|
||||
p110 = PyP110.P110(plug_ip, plug_username, plug_password)
|
||||
p110.handshake() # Authentifizierung
|
||||
p110.login() # Login
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
device_info = p110.getDeviceInfo()
|
||||
|
||||
log_message(f" ✅ Tapo-Verbindung erfolgreich")
|
||||
log_message(f" 📛 Gerätename: {device_info.get('nickname', 'Unbekannt')}")
|
||||
log_message(f" ⚡ Status: {'Ein' if device_info.get('device_on', False) else 'Aus'}")
|
||||
|
||||
if 'on_time' in device_info:
|
||||
on_time = device_info.get('on_time', 0)
|
||||
hours, minutes = divmod(on_time // 60, 60)
|
||||
log_message(f" ⏱️ Betriebszeit: {hours}h {minutes}m")
|
||||
|
||||
if 'power_usage' in device_info:
|
||||
power_usage = device_info.get('power_usage', {})
|
||||
current_power = power_usage.get('power_mw', 0) / 1000 # mW zu W
|
||||
log_message(f" 🔋 Aktueller Verbrauch: {current_power:.1f}W")
|
||||
|
||||
successful_connections += 1
|
||||
|
||||
except Exception as e:
|
||||
log_message(f" ❌ Tapo-Verbindung fehlgeschlagen: {str(e)}")
|
||||
|
||||
# Detaillierte Fehleranalyse
|
||||
if "login" in str(e).lower():
|
||||
log_message(f" 🔐 Mögliche Ursache: Falsche Anmeldedaten")
|
||||
elif "timeout" in str(e).lower():
|
||||
log_message(f" ⏱️ Mögliche Ursache: Netzwerk-Timeout")
|
||||
elif "connect" in str(e).lower():
|
||||
log_message(f" 🌐 Mögliche Ursache: Steckdose nicht erreichbar")
|
||||
elif "handshake" in str(e).lower():
|
||||
log_message(f" 🤝 Mögliche Ursache: Protokoll-Handshake fehlgeschlagen")
|
||||
|
||||
# Zusammenfassung
|
||||
success_rate = (successful_connections / total_printers * 100) if total_printers > 0 else 0
|
||||
log_message(f"📊 Tapo-Verbindungs-Zusammenfassung:")
|
||||
log_message(f" Getestete Drucker: {total_printers}")
|
||||
log_message(f" Erfolgreiche Verbindungen: {successful_connections}")
|
||||
log_message(f" Erfolgsrate: {success_rate:.1f}%")
|
||||
|
||||
if successful_connections == total_printers:
|
||||
log_message("🎉 Alle Tapo-Verbindungen erfolgreich!")
|
||||
elif successful_connections > 0:
|
||||
log_message("⚠️ Einige Tapo-Verbindungen fehlgeschlagen")
|
||||
else:
|
||||
log_message("❌ Keine Tapo-Verbindungen erfolgreich", "ERROR")
|
||||
|
||||
except Exception as e:
|
||||
log_message(f"❌ Fehler beim Testen der Tapo-Verbindungen: {str(e)}", "ERROR")
|
||||
|
||||
def test_flask_app_status():
|
||||
"""Teste den Status der Flask-Anwendung"""
|
||||
log_message("Teste Flask-Anwendung...")
|
||||
|
||||
try:
|
||||
# Teste Hauptseite
|
||||
response = requests.get("http://localhost:5000", timeout=5)
|
||||
if response.status_code == 200:
|
||||
log_message("✅ Flask-Anwendung läuft")
|
||||
else:
|
||||
log_message(f"⚠️ Flask-Anwendung antwortet mit Status {response.status_code}")
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
log_message("❌ Flask-Anwendung nicht erreichbar", "ERROR")
|
||||
log_message(" Starte die Anwendung mit: python app.py", "INFO")
|
||||
except Exception as e:
|
||||
log_message(f"❌ Fehler beim Testen der Flask-Anwendung: {str(e)}", "ERROR")
|
||||
|
||||
def test_threading_timeout():
|
||||
"""Teste die Threading-basierte Timeout-Implementierung"""
|
||||
log_message("Teste Threading-Timeout-Implementierung...")
|
||||
|
||||
def test_function():
|
||||
"""Simuliere eine langsame Datenbankabfrage"""
|
||||
time.sleep(2)
|
||||
return "Erfolgreich"
|
||||
|
||||
try:
|
||||
result = None
|
||||
timeout_occurred = False
|
||||
|
||||
def run_test():
|
||||
nonlocal result, timeout_occurred
|
||||
try:
|
||||
result = test_function()
|
||||
except Exception as e:
|
||||
log_message(f"Fehler in Test-Thread: {str(e)}", "ERROR")
|
||||
timeout_occurred = True
|
||||
|
||||
# Starte Test in separatem Thread
|
||||
thread = threading.Thread(target=run_test)
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
thread.join(timeout=3) # 3 Sekunden Timeout
|
||||
|
||||
if thread.is_alive() or timeout_occurred or result is None:
|
||||
log_message("❌ Threading-Timeout-Test fehlgeschlagen", "ERROR")
|
||||
else:
|
||||
log_message("✅ Threading-Timeout-Implementierung funktioniert")
|
||||
|
||||
except Exception as e:
|
||||
log_message(f"❌ Fehler beim Threading-Test: {str(e)}", "ERROR")
|
||||
|
||||
def check_system_requirements():
|
||||
"""Prüfe Systemanforderungen"""
|
||||
log_message("Prüfe Systemanforderungen...")
|
||||
|
||||
# Python-Version
|
||||
python_version = sys.version_info
|
||||
log_message(f"Python-Version: {python_version.major}.{python_version.minor}.{python_version.micro}")
|
||||
|
||||
if python_version.major >= 3 and python_version.minor >= 7:
|
||||
log_message("✅ Python-Version ist kompatibel")
|
||||
else:
|
||||
log_message("❌ Python 3.7+ erforderlich", "ERROR")
|
||||
|
||||
# Erforderliche Module
|
||||
required_modules = ['flask', 'requests', 'sqlite3', 'threading']
|
||||
|
||||
for module in required_modules:
|
||||
try:
|
||||
__import__(module)
|
||||
log_message(f"✅ Modul {module} verfügbar")
|
||||
except ImportError:
|
||||
log_message(f"❌ Modul {module} nicht verfügbar", "ERROR")
|
||||
|
||||
# Betriebssystem
|
||||
os_name = platform.system()
|
||||
log_message(f"Betriebssystem: {os_name}")
|
||||
|
||||
if os_name == "Windows":
|
||||
log_message("✅ Windows-spezifische Fixes wurden angewendet")
|
||||
else:
|
||||
log_message("ℹ️ Unix-basiertes System erkannt")
|
||||
|
||||
def run_comprehensive_test():
|
||||
"""Führe alle Tests aus"""
|
||||
log_message("=== MYP Druckerverwaltung - Diagnose-Tool ===")
|
||||
log_message("Starte umfassende Systemdiagnose...")
|
||||
print()
|
||||
|
||||
# Systemanforderungen prüfen
|
||||
check_system_requirements()
|
||||
print()
|
||||
|
||||
# Threading-Test
|
||||
test_threading_timeout()
|
||||
print()
|
||||
|
||||
# Datenbanktest
|
||||
test_database_connection()
|
||||
print()
|
||||
|
||||
# Flask-App-Test
|
||||
test_flask_app_status()
|
||||
print()
|
||||
|
||||
# API-Tests
|
||||
test_api_endpoints()
|
||||
print()
|
||||
|
||||
# Netzwerk-Tests
|
||||
test_network_connectivity()
|
||||
print()
|
||||
|
||||
# Tapo-Verbindungen testen
|
||||
test_tapo_connections()
|
||||
print()
|
||||
|
||||
log_message("=== Diagnose abgeschlossen ===")
|
||||
print()
|
||||
|
||||
# Empfehlungen
|
||||
log_message("📋 Empfehlungen:")
|
||||
log_message("1. Stelle sicher, dass die Flask-Anwendung läuft: python app.py")
|
||||
log_message("2. Prüfe die Datenbankverbindung und Drucker-Konfiguration")
|
||||
log_message("3. Teste die Netzwerkverbindung zu den Druckern")
|
||||
log_message("4. Bei Windows: Threading-basierte Timeouts wurden implementiert")
|
||||
log_message("5. Überprüfe die Logs in logs/app/ für weitere Details")
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
run_comprehensive_test()
|
||||
except KeyboardInterrupt:
|
||||
log_message("Diagnose durch Benutzer abgebrochen", "INFO")
|
||||
except Exception as e:
|
||||
log_message(f"Unerwarteter Fehler: {str(e)}", "ERROR")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
80
backend/app - Kopie/utils/debug_guest_requests.py
Normal file
80
backend/app - Kopie/utils/debug_guest_requests.py
Normal file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Debug-Script für Gastanträge und Admin-Berechtigungen
|
||||
"""
|
||||
|
||||
from models import get_cached_session, GuestRequest, User, UserPermission
|
||||
from flask_login import current_user
|
||||
|
||||
def check_guest_requests():
|
||||
"""Prüfe Gastanträge nach Status"""
|
||||
print("=== GASTANTRÄGE STATUS ===")
|
||||
|
||||
with get_cached_session() as db:
|
||||
pending = db.query(GuestRequest).filter_by(status='pending').count()
|
||||
approved = db.query(GuestRequest).filter_by(status='approved').count()
|
||||
rejected = db.query(GuestRequest).filter_by(status='rejected').count()
|
||||
total = db.query(GuestRequest).count()
|
||||
|
||||
print(f"Gesamt: {total}")
|
||||
print(f"Pending (Wird geprüft): {pending}")
|
||||
print(f"Approved (Genehmigt): {approved}")
|
||||
print(f"Rejected (Abgelehnt): {rejected}")
|
||||
|
||||
if pending == 0:
|
||||
print("\n⚠️ PROBLEM: Keine Anträge mit Status 'pending' gefunden!")
|
||||
print(" → Die Genehmigen/Ablehnen-Buttons werden nur bei Status 'pending' angezeigt")
|
||||
|
||||
# Erstelle einen Test-Antrag
|
||||
print("\n🔧 Erstelle Test-Gastantrag...")
|
||||
test_request = GuestRequest(
|
||||
name="Test Admin",
|
||||
email="admin@test.de",
|
||||
reason="Test für Admin-Buttons",
|
||||
duration_min=30,
|
||||
status="pending"
|
||||
)
|
||||
db.add(test_request)
|
||||
db.commit()
|
||||
print(f"✅ Test-Antrag erstellt (ID: {test_request.id})")
|
||||
else:
|
||||
print(f"\n✅ {pending} Anträge mit Status 'pending' gefunden")
|
||||
|
||||
# Zeige pending Anträge
|
||||
pending_requests = db.query(GuestRequest).filter_by(status='pending').all()
|
||||
for req in pending_requests:
|
||||
print(f" ID {req.id}: {req.name} - {req.email}")
|
||||
|
||||
def check_admin_users():
|
||||
"""Prüfe Admin-Benutzer und Berechtigungen"""
|
||||
print("\n=== ADMIN-BENUTZER ===")
|
||||
|
||||
with get_cached_session() as db:
|
||||
# Alle Admins
|
||||
admins = db.query(User).filter_by(is_admin=True).all()
|
||||
print(f"Admin-Benutzer: {len(admins)}")
|
||||
for admin in admins:
|
||||
print(f" {admin.username} (ID: {admin.id}) - Email: {admin.email}")
|
||||
|
||||
# Benutzer mit can_approve_jobs
|
||||
users_with_approval = db.query(User).join(UserPermission).filter(
|
||||
UserPermission.can_approve_jobs == True
|
||||
).all()
|
||||
print(f"\nBenutzer mit can_approve_jobs: {len(users_with_approval)}")
|
||||
for user in users_with_approval:
|
||||
print(f" {user.username} (ID: {user.id}) - Email: {user.email}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
check_guest_requests()
|
||||
check_admin_users()
|
||||
print("\n=== LÖSUNG ===")
|
||||
print("1. Gehen Sie zu: http://127.0.0.1:5000/requests/overview")
|
||||
print("2. Öffnen Sie die Browser-Konsole (F12)")
|
||||
print("3. Suchen Sie nach 'Admin-Berechtigungen:' in der Konsole")
|
||||
print("4. Die Buttons sollten bei Anträgen mit Status 'pending' erscheinen")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
199
backend/app - Kopie/utils/debug_login.py
Normal file
199
backend/app - Kopie/utils/debug_login.py
Normal file
@@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python3.11
|
||||
"""
|
||||
Debug-Script für Login-Probleme
|
||||
Prüft Admin-Benutzer und Passwort-Hashing
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
# Path für imports setzen
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from models import get_db_session, User, create_initial_admin
|
||||
import bcrypt
|
||||
|
||||
def debug_admin_user():
|
||||
"""Prüft den Admin-Benutzer in der Datenbank"""
|
||||
print("=== DEBUG: Admin-Benutzer Analyse ===")
|
||||
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Alle Benutzer anzeigen
|
||||
users = db_session.query(User).all()
|
||||
print(f"\n📊 Gefundene Benutzer: {len(users)}")
|
||||
|
||||
for user in users:
|
||||
print(f"\n👤 Benutzer ID: {user.id}")
|
||||
print(f" Email: {user.email}")
|
||||
print(f" Username: {user.username}")
|
||||
print(f" Name: {user.name}")
|
||||
print(f" Role: {user.role}")
|
||||
print(f" Is Admin: {user.is_admin}")
|
||||
print(f" Active: {user.active}")
|
||||
print(f" Password Hash: {user.password_hash[:20]}...")
|
||||
print(f" Created: {user.created_at}")
|
||||
|
||||
# Admin-Benutzer spezifisch prüfen
|
||||
admin_email = "admin@mercedes-benz.com"
|
||||
admin_username = "admin"
|
||||
|
||||
print(f"\n🔍 Suche nach Admin-Benutzer:")
|
||||
print(f" Email: {admin_email}")
|
||||
print(f" Username: {admin_username}")
|
||||
|
||||
# Suche nach E-Mail
|
||||
admin_by_email = db_session.query(User).filter(User.email == admin_email).first()
|
||||
if admin_by_email:
|
||||
print(f"✅ Admin gefunden per E-Mail: {admin_by_email.email}")
|
||||
else:
|
||||
print(f"❌ Kein Admin mit E-Mail {admin_email} gefunden")
|
||||
|
||||
# Suche nach Username
|
||||
admin_by_username = db_session.query(User).filter(User.username == admin_username).first()
|
||||
if admin_by_username:
|
||||
print(f"✅ Admin gefunden per Username: {admin_by_username.username}")
|
||||
else:
|
||||
print(f"❌ Kein Admin mit Username {admin_username} gefunden")
|
||||
|
||||
db_session.close()
|
||||
|
||||
return admin_by_email or admin_by_username
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Datenbankzugriff: {str(e)}")
|
||||
return None
|
||||
|
||||
def test_password_verification(user, test_password="744563017196A"):
|
||||
"""Testet die Passwort-Verifikation"""
|
||||
print(f"\n=== DEBUG: Passwort-Test ===")
|
||||
print(f"Test-Passwort: {test_password}")
|
||||
|
||||
if not user:
|
||||
print("❌ Kein Benutzer für Passwort-Test vorhanden")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Manueller bcrypt-Test
|
||||
password_bytes = test_password.encode('utf-8')
|
||||
hash_bytes = user.password_hash.encode('utf-8')
|
||||
|
||||
print(f"Password Bytes: {password_bytes}")
|
||||
print(f"Hash (first 50 chars): {user.password_hash[:50]}")
|
||||
|
||||
# Test mit bcrypt
|
||||
is_valid_bcrypt = bcrypt.checkpw(password_bytes, hash_bytes)
|
||||
print(f"✅ bcrypt.checkpw() Ergebnis: {is_valid_bcrypt}")
|
||||
|
||||
# Test mit User-Methode
|
||||
is_valid_user_method = user.check_password(test_password)
|
||||
print(f"✅ user.check_password() Ergebnis: {is_valid_user_method}")
|
||||
|
||||
return is_valid_bcrypt and is_valid_user_method
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Passwort-Test: {str(e)}")
|
||||
return False
|
||||
|
||||
def recreate_admin():
|
||||
"""Erstellt den Admin-Benutzer neu"""
|
||||
print(f"\n=== DEBUG: Admin-Benutzer neu erstellen ===")
|
||||
|
||||
try:
|
||||
success = create_initial_admin(
|
||||
email="admin@mercedes-benz.com",
|
||||
password="744563017196A",
|
||||
name="System Administrator",
|
||||
username="admin"
|
||||
)
|
||||
|
||||
if success:
|
||||
print("✅ Admin-Benutzer erfolgreich erstellt/aktualisiert")
|
||||
else:
|
||||
print("❌ Fehler beim Erstellen des Admin-Benutzers")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Erstellen des Admins: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_login_credentials():
|
||||
"""Testet verschiedene Login-Kombinationen"""
|
||||
print(f"\n=== DEBUG: Login-Kombinationen testen ===")
|
||||
|
||||
test_combinations = [
|
||||
("admin@mercedes-benz.com", "744563017196A"),
|
||||
("admin", "744563017196A"),
|
||||
]
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
for email_or_username, password in test_combinations:
|
||||
print(f"\n🔍 Teste: {email_or_username} / {password}")
|
||||
|
||||
# Simuliere Login-Logic aus app.py
|
||||
user = db_session.query(User).filter(
|
||||
(User.username == email_or_username) | (User.email == email_or_username)
|
||||
).first()
|
||||
|
||||
if user:
|
||||
print(f"✅ Benutzer gefunden: {user.email} ({user.username})")
|
||||
|
||||
if user.check_password(password):
|
||||
print(f"✅ Passwort korrekt!")
|
||||
print(f"✅ Login wäre erfolgreich für: {user.email}")
|
||||
else:
|
||||
print(f"❌ Passwort falsch!")
|
||||
else:
|
||||
print(f"❌ Kein Benutzer mit {email_or_username} gefunden")
|
||||
|
||||
db_session.close()
|
||||
|
||||
def check_rate_limiting():
|
||||
"""Prüft Rate Limiting Status"""
|
||||
print(f"\n=== DEBUG: Rate Limiting Status ===")
|
||||
|
||||
# Simuliere localStorage-Werte (die wären normalerweise im Browser)
|
||||
# In einer echten Anwendung würden diese aus der Datenbank oder einem Cache kommen
|
||||
print("ℹ️ Rate Limiting wird client-seitig im localStorage verwaltet")
|
||||
print("ℹ️ Überprüfen Sie Ihren Browser-localStorage:")
|
||||
print(" - loginAttempts: sollte < 5 sein")
|
||||
print(" - lastAttemptTime: Zeit des letzten Versuchs")
|
||||
print("\n💡 Tipp: Öffnen Sie Entwicklertools > Application > Local Storage")
|
||||
print(" und löschen Sie 'loginAttempts' und 'lastAttemptTime' Einträge")
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🚀 MYP Login Debug-Tool gestartet")
|
||||
print("=" * 50)
|
||||
|
||||
# 1. Admin-Benutzer prüfen
|
||||
admin_user = debug_admin_user()
|
||||
|
||||
# 2. Passwort-Verifikation testen
|
||||
if admin_user:
|
||||
test_password_verification(admin_user)
|
||||
|
||||
# 3. Admin neu erstellen falls Probleme
|
||||
if not admin_user:
|
||||
print("\n⚠️ Kein Admin gefunden - erstelle neuen Admin...")
|
||||
recreate_admin()
|
||||
admin_user = debug_admin_user()
|
||||
if admin_user:
|
||||
test_password_verification(admin_user)
|
||||
|
||||
# 4. Login-Kombinationen testen
|
||||
test_login_credentials()
|
||||
|
||||
# 5. Rate Limiting prüfen
|
||||
check_rate_limiting()
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("🎯 Debug abgeschlossen!")
|
||||
print("\n💡 Lösungsvorschläge:")
|
||||
print("1. Verwenden Sie admin@mercedes-benz.com + 744563017196A")
|
||||
print("2. Oder verwenden Sie admin + 744563017196A")
|
||||
print("3. Löschen Sie Rate-Limiting im Browser localStorage")
|
||||
print("4. Prüfen Sie die Browser-Konsole auf JavaScript-Fehler")
|
||||
392
backend/app - Kopie/utils/debug_utils.py
Normal file
392
backend/app - Kopie/utils/debug_utils.py
Normal file
@@ -0,0 +1,392 @@
|
||||
"""
|
||||
Debug-Utilities für die MYP-Anwendung
|
||||
Hilft bei der Diagnose und Behebung von Problemen in der Anwendung
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import traceback
|
||||
import inspect
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union, Callable
|
||||
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
# Logger für dieses Modul erstellen
|
||||
debug_logger = get_logger("app")
|
||||
|
||||
# Konstanten für Formatierung
|
||||
DEBUG_SEPARATOR = "=" * 60
|
||||
DEBUG_SUBSEPARATOR = "-" * 60
|
||||
|
||||
class DebugLevel:
|
||||
"""Enum für Debug-Level"""
|
||||
MINIMAL = 0 # Nur kritische Fehler
|
||||
NORMAL = 1 # Standardfehler und wichtige Informationen
|
||||
VERBOSE = 2 # Ausführliche Informationen
|
||||
TRACE = 3 # Vollständige Trace-Informationen
|
||||
|
||||
# Aktuelles Debug-Level (kann zur Laufzeit geändert werden)
|
||||
CURRENT_DEBUG_LEVEL = DebugLevel.NORMAL
|
||||
|
||||
def set_debug_level(level: int):
|
||||
"""Setzt das aktuelle Debug-Level für die Anwendung"""
|
||||
global CURRENT_DEBUG_LEVEL
|
||||
CURRENT_DEBUG_LEVEL = level
|
||||
debug_logger.info(f"🔧 Debug-Level gesetzt auf: {level}")
|
||||
|
||||
def debug_print(message: str, level: int = DebugLevel.NORMAL):
|
||||
"""
|
||||
Gibt eine Debug-Nachricht aus, wenn das aktuelle Debug-Level mindestens dem angegebenen entspricht.
|
||||
|
||||
Args:
|
||||
message: Die auszugebende Nachricht
|
||||
level: Das erforderliche Debug-Level
|
||||
"""
|
||||
if level <= CURRENT_DEBUG_LEVEL:
|
||||
# Aktuelle Funktion und Zeilennummer ermitteln
|
||||
frame = inspect.currentframe().f_back
|
||||
func_name = frame.f_code.co_name
|
||||
file_name = os.path.basename(frame.f_code.co_filename)
|
||||
line_no = frame.f_lineno
|
||||
|
||||
# Debug-Ausgabe formatieren
|
||||
timestamp = datetime.now().strftime('%H:%M:%S.%f')[:-3]
|
||||
debug_prefix = f"[DEBUG {timestamp} {file_name}:{func_name}:{line_no}]"
|
||||
|
||||
# Verschiedene Levels mit unterschiedlichen Emojis markieren
|
||||
level_emoji = "🐞" if level >= DebugLevel.VERBOSE else "🔍"
|
||||
|
||||
# Ausgabe
|
||||
print(f"{level_emoji} {debug_prefix} {message}")
|
||||
|
||||
def debug_dump(obj: Any, name: str = "Object", level: int = DebugLevel.VERBOSE):
|
||||
"""
|
||||
Gibt den Inhalt eines Objekts für Debug-Zwecke aus.
|
||||
|
||||
Args:
|
||||
obj: Das zu untersuchende Objekt
|
||||
name: Name des Objekts für die Ausgabe
|
||||
level: Das erforderliche Debug-Level
|
||||
"""
|
||||
if level > CURRENT_DEBUG_LEVEL:
|
||||
return
|
||||
|
||||
debug_print(f"📦 Debug-Dump von {name}:", level)
|
||||
|
||||
try:
|
||||
# Für dict-ähnliche Objekte
|
||||
if hasattr(obj, 'items'):
|
||||
for k, v in obj.items():
|
||||
debug_print(f" {k}: {v}", level)
|
||||
# Für list/tuple-ähnliche Objekte
|
||||
elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes)):
|
||||
for i, item in enumerate(obj):
|
||||
debug_print(f" [{i}]: {item}", level)
|
||||
# Für einfache Objekte
|
||||
else:
|
||||
# Versuche als JSON zu formatieren
|
||||
try:
|
||||
json_str = json.dumps(obj, indent=2, default=str)
|
||||
debug_print(f" {json_str}", level)
|
||||
except:
|
||||
# Fallback auf einfache String-Darstellung
|
||||
debug_print(f" {obj}", level)
|
||||
except Exception as e:
|
||||
debug_print(f" Fehler beim Dump: {e}", level)
|
||||
|
||||
def debug_trace(message: str = "Execution trace"):
|
||||
"""
|
||||
Gibt einen vollständigen Stack-Trace für Debug-Zwecke aus.
|
||||
|
||||
Args:
|
||||
message: Begleitende Nachricht für den Trace
|
||||
"""
|
||||
if CURRENT_DEBUG_LEVEL < DebugLevel.TRACE:
|
||||
return
|
||||
|
||||
debug_print(f"🔬 TRACE: {message}", DebugLevel.TRACE)
|
||||
debug_print(DEBUG_SUBSEPARATOR, DebugLevel.TRACE)
|
||||
|
||||
# Stack-Trace sammeln
|
||||
stack = traceback.extract_stack()
|
||||
# Letzten Frame (diese Funktion) entfernen
|
||||
stack = stack[:-1]
|
||||
|
||||
for frame in stack:
|
||||
file_name = os.path.basename(frame.filename)
|
||||
debug_print(f" {file_name}:{frame.lineno} - {frame.name}", DebugLevel.TRACE)
|
||||
|
||||
debug_print(DEBUG_SUBSEPARATOR, DebugLevel.TRACE)
|
||||
|
||||
def debug_function(func=None, level: int = DebugLevel.NORMAL):
|
||||
"""
|
||||
Dekorator, der Eingang und Ausgang einer Funktion sowie die Ausführungszeit loggt.
|
||||
|
||||
Args:
|
||||
func: Die zu dekorierende Funktion
|
||||
level: Das erforderliche Debug-Level
|
||||
|
||||
Returns:
|
||||
Dekorierte Funktion
|
||||
"""
|
||||
def decorator(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
if CURRENT_DEBUG_LEVEL < level:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
# Funktionsaufruf loggen
|
||||
arg_str = ", ".join([
|
||||
*[str(arg) for arg in args],
|
||||
*[f"{k}={v}" for k, v in kwargs.items()]
|
||||
])
|
||||
if len(arg_str) > 100:
|
||||
arg_str = arg_str[:97] + "..."
|
||||
|
||||
debug_print(f"▶️ Starte {fn.__name__}({arg_str})", level)
|
||||
|
||||
# Ausführungszeit messen
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Funktion ausführen
|
||||
result = fn(*args, **kwargs)
|
||||
|
||||
# Ausführungszeit und Ergebnis loggen
|
||||
end_time = time.time()
|
||||
duration = (end_time - start_time) * 1000
|
||||
|
||||
result_str = str(result)
|
||||
if len(result_str) > 100:
|
||||
result_str = result_str[:97] + "..."
|
||||
|
||||
duration_emoji = "⏱️" if duration < 1000 else "⏳"
|
||||
debug_print(f"{duration_emoji} {fn.__name__} beendet in {duration:.2f} ms", level)
|
||||
debug_print(f"📤 Ergebnis: {result_str}", level)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
# Fehler loggen
|
||||
end_time = time.time()
|
||||
duration = (end_time - start_time) * 1000
|
||||
|
||||
debug_print(f"❌ {fn.__name__} fehlgeschlagen nach {duration:.2f} ms: {str(e)}", level)
|
||||
|
||||
# Stack-Trace nur bei hohem Debug-Level
|
||||
if CURRENT_DEBUG_LEVEL >= DebugLevel.VERBOSE:
|
||||
debug_print(f"🔬 Stack-Trace für {fn.__name__}:", DebugLevel.VERBOSE)
|
||||
traceback_str = traceback.format_exc()
|
||||
for line in traceback_str.split('\n'):
|
||||
debug_print(f" {line}", DebugLevel.VERBOSE)
|
||||
|
||||
# Exception weiterleiten
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
if func:
|
||||
return decorator(func)
|
||||
return decorator
|
||||
|
||||
def debug_timer(name: str = None, level: int = DebugLevel.NORMAL):
|
||||
"""
|
||||
Kontext-Manager, der die Ausführungszeit eines Code-Blocks misst.
|
||||
|
||||
Args:
|
||||
name: Name des Code-Blocks für die Ausgabe
|
||||
level: Das erforderliche Debug-Level
|
||||
|
||||
Beispiel:
|
||||
with debug_timer("Datenbankabfrage"):
|
||||
result = db.execute_query()
|
||||
"""
|
||||
class Timer:
|
||||
def __init__(self, block_name, debug_level):
|
||||
self.block_name = block_name
|
||||
self.debug_level = debug_level
|
||||
self.start_time = None
|
||||
|
||||
def __enter__(self):
|
||||
if CURRENT_DEBUG_LEVEL >= self.debug_level:
|
||||
self.start_time = time.time()
|
||||
block_name = self.block_name or "Code-Block"
|
||||
debug_print(f"⏱️ Starte Timer für: {block_name}", self.debug_level)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if CURRENT_DEBUG_LEVEL >= self.debug_level and self.start_time:
|
||||
end_time = time.time()
|
||||
duration = (end_time - self.start_time) * 1000
|
||||
block_name = self.block_name or "Code-Block"
|
||||
|
||||
if exc_type:
|
||||
debug_print(f"❌ {block_name} fehlgeschlagen nach {duration:.2f} ms: {exc_val}", self.debug_level)
|
||||
else:
|
||||
duration_emoji = "⏱️" if duration < 1000 else "⏳"
|
||||
debug_print(f"{duration_emoji} {block_name} beendet in {duration:.2f} ms", self.debug_level)
|
||||
|
||||
return Timer(name, level)
|
||||
|
||||
def debug_exception_handler(logger: Optional[logging.Logger] = None):
|
||||
"""
|
||||
Dekorator, der Ausnahmen abfängt und Details loggt.
|
||||
|
||||
Args:
|
||||
logger: Logger-Instanz für die Protokollierung (optional)
|
||||
|
||||
Returns:
|
||||
Dekorierte Funktion
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
# Logger verwenden oder Fallback auf Standardausgabe
|
||||
log = logger or debug_logger
|
||||
|
||||
# Ausnahmedetails loggen
|
||||
log.error(f"❌ Ausnahme in {func.__name__}: {str(e)}")
|
||||
|
||||
# Stack-Trace bei hohem Debug-Level
|
||||
if CURRENT_DEBUG_LEVEL >= DebugLevel.VERBOSE:
|
||||
log.error("🔬 Stack-Trace:")
|
||||
traceback_str = traceback.format_exc()
|
||||
for line in traceback_str.split('\n'):
|
||||
if line.strip():
|
||||
log.error(f" {line}")
|
||||
|
||||
# Ausnahme weiterleiten
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
# Konsolen-Befehle für interaktives Debugging
|
||||
|
||||
def dump_all_loggers():
|
||||
"""Gibt Informationen über alle konfigurierten Logger aus."""
|
||||
import logging
|
||||
|
||||
debug_print("📋 Konfigurierte Logger:", DebugLevel.VERBOSE)
|
||||
for name, logger in logging.Logger.manager.loggerDict.items():
|
||||
if isinstance(logger, logging.Logger):
|
||||
level_name = logging.getLevelName(logger.level)
|
||||
handlers = len(logger.handlers)
|
||||
debug_print(f" {name}: Level={level_name}, Handlers={handlers}", DebugLevel.VERBOSE)
|
||||
|
||||
def dump_environment():
|
||||
"""Gibt Umgebungsvariablen und Systeminformationen aus."""
|
||||
debug_print("🌐 Umgebungsinformationen:", DebugLevel.VERBOSE)
|
||||
debug_print(f" Python: {sys.version}", DebugLevel.VERBOSE)
|
||||
debug_print(f" Plattform: {sys.platform}", DebugLevel.VERBOSE)
|
||||
debug_print(f" Arbeitsverzeichnis: {os.getcwd()}", DebugLevel.VERBOSE)
|
||||
|
||||
debug_print("🔑 Umgebungsvariablen:", DebugLevel.VERBOSE)
|
||||
for key, value in sorted(os.environ.items()):
|
||||
# Passwörter und Secrets ausblenden
|
||||
if any(secret_key in key.lower() for secret_key in ['key', 'pass', 'secret', 'token', 'pwd']):
|
||||
value = "********"
|
||||
debug_print(f" {key}={value}", DebugLevel.VERBOSE)
|
||||
|
||||
def memory_usage(obj: Any = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Gibt Informationen über den Speicherverbrauch zurück.
|
||||
|
||||
Args:
|
||||
obj: Optional ein Objekt, dessen Größe gemessen werden soll
|
||||
|
||||
Returns:
|
||||
Dict mit Speicherverbrauchsinformationen
|
||||
"""
|
||||
import psutil
|
||||
import sys
|
||||
|
||||
process = psutil.Process(os.getpid())
|
||||
memory_info = process.memory_info()
|
||||
|
||||
result = {
|
||||
"rss": memory_info.rss / (1024 * 1024), # MB
|
||||
"vms": memory_info.vms / (1024 * 1024), # MB
|
||||
"percent": process.memory_percent(),
|
||||
}
|
||||
|
||||
if obj is not None:
|
||||
try:
|
||||
import sys
|
||||
result["object_size"] = sys.getsizeof(obj) / 1024 # KB
|
||||
except:
|
||||
result["object_size"] = "Nicht messbar"
|
||||
|
||||
return result
|
||||
|
||||
def log_memory_usage(obj_name: str = "Anwendung", obj: Any = None, logger: Optional[logging.Logger] = None):
|
||||
"""
|
||||
Loggt den aktuellen Speicherverbrauch.
|
||||
|
||||
Args:
|
||||
obj_name: Name des Objekts oder der Anwendung
|
||||
obj: Optional ein Objekt, dessen Größe gemessen werden soll
|
||||
logger: Logger-Instanz für die Protokollierung (optional)
|
||||
"""
|
||||
log = logger or debug_logger
|
||||
memory = memory_usage(obj)
|
||||
|
||||
log.info(f"📊 Speicherverbrauch von {obj_name}:")
|
||||
log.info(f" RSS: {memory['rss']:.2f} MB")
|
||||
log.info(f" VMS: {memory['vms']:.2f} MB")
|
||||
log.info(f" Prozent: {memory['percent']:.2f}%")
|
||||
|
||||
if 'object_size' in memory:
|
||||
if isinstance(memory['object_size'], (int, float)):
|
||||
log.info(f" Objektgröße: {memory['object_size']:.2f} KB")
|
||||
else:
|
||||
log.info(f" Objektgröße: {memory['object_size']}")
|
||||
|
||||
def profile_function(func):
|
||||
"""
|
||||
Dekorator, der eine Funktion profiliert und Statistiken ausgibt.
|
||||
|
||||
Args:
|
||||
func: Die zu profilierende Funktion
|
||||
|
||||
Returns:
|
||||
Dekorierte Funktion
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
import cProfile
|
||||
import pstats
|
||||
import io
|
||||
|
||||
# Profiler erstellen und Funktion ausführen
|
||||
profiler = cProfile.Profile()
|
||||
profiler.enable()
|
||||
result = func(*args, **kwargs)
|
||||
profiler.disable()
|
||||
|
||||
# Statistiken sammeln
|
||||
s = io.StringIO()
|
||||
ps = pstats.Stats(profiler, stream=s).sort_stats('cumulative')
|
||||
ps.print_stats(20) # Top 20 Zeilen
|
||||
|
||||
# Statistiken ausgeben
|
||||
debug_print(f"📊 Profiling-Ergebnis für {func.__name__}:", DebugLevel.VERBOSE)
|
||||
for line in s.getvalue().split('\n'):
|
||||
if line.strip():
|
||||
debug_print(f" {line}", DebugLevel.VERBOSE)
|
||||
|
||||
return result
|
||||
except ImportError:
|
||||
debug_print(f"⚠️ cProfile nicht verfügbar, Funktion wird ohne Profiling ausgeführt", DebugLevel.NORMAL)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
414
backend/app - Kopie/utils/file_manager.py
Normal file
414
backend/app - Kopie/utils/file_manager.py
Normal file
@@ -0,0 +1,414 @@
|
||||
"""
|
||||
Mercedes-Benz MYP - Datei-Management-System
|
||||
Organisierte Speicherung von hochgeladenen Dateien mit Verzeichniskonventionen
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from werkzeug.utils import secure_filename
|
||||
from typing import Optional, Tuple, Dict, List
|
||||
from config.settings import UPLOAD_FOLDER, ALLOWED_EXTENSIONS
|
||||
|
||||
class FileManager:
|
||||
"""
|
||||
Zentrales Datei-Management-System für die MYP-Platform
|
||||
Organisiert Uploads in strukturierte Unterverzeichnisse
|
||||
"""
|
||||
|
||||
# Verzeichniskonventionen
|
||||
DIRECTORIES = {
|
||||
'jobs': 'jobs', # Druckjob-Dateien
|
||||
'guests': 'guests', # Gastauftrags-Dateien
|
||||
'avatars': 'avatars', # Benutzer-Avatare
|
||||
'temp': 'temp', # Temporäre Dateien
|
||||
'backups': 'backups', # Backup-Dateien
|
||||
'logs': 'logs', # Exportierte Logs
|
||||
'assets': 'assets' # Statische Assets
|
||||
}
|
||||
|
||||
def __init__(self, base_upload_folder: str = UPLOAD_FOLDER):
|
||||
"""
|
||||
Initialisiert den FileManager
|
||||
|
||||
Args:
|
||||
base_upload_folder: Basis-Upload-Verzeichnis
|
||||
"""
|
||||
self.base_folder = base_upload_folder
|
||||
self.ensure_directories()
|
||||
|
||||
def ensure_directories(self) -> None:
|
||||
"""Erstellt alle erforderlichen Verzeichnisse"""
|
||||
try:
|
||||
# Basis-Upload-Ordner erstellen
|
||||
os.makedirs(self.base_folder, exist_ok=True)
|
||||
|
||||
# Alle Unterverzeichnisse erstellen
|
||||
for category, subdir in self.DIRECTORIES.items():
|
||||
dir_path = os.path.join(self.base_folder, subdir)
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
|
||||
# Jahres-/Monatsverzeichnisse für organisierte Speicherung
|
||||
current_date = datetime.now()
|
||||
year_dir = os.path.join(dir_path, str(current_date.year))
|
||||
month_dir = os.path.join(year_dir, f"{current_date.month:02d}")
|
||||
|
||||
os.makedirs(year_dir, exist_ok=True)
|
||||
os.makedirs(month_dir, exist_ok=True)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Erstellen der Verzeichnisse: {e}")
|
||||
|
||||
def allowed_file(self, filename: str) -> bool:
|
||||
"""
|
||||
Prüft, ob eine Datei erlaubt ist
|
||||
|
||||
Args:
|
||||
filename: Name der Datei
|
||||
|
||||
Returns:
|
||||
bool: True wenn erlaubt
|
||||
"""
|
||||
if '.' not in filename:
|
||||
return False
|
||||
|
||||
extension = filename.rsplit('.', 1)[1].lower()
|
||||
return extension in ALLOWED_EXTENSIONS
|
||||
|
||||
def generate_unique_filename(self, original_filename: str, prefix: str = "") -> str:
|
||||
"""
|
||||
Generiert einen eindeutigen Dateinamen
|
||||
|
||||
Args:
|
||||
original_filename: Ursprünglicher Dateiname
|
||||
prefix: Optionaler Präfix
|
||||
|
||||
Returns:
|
||||
str: Eindeutiger Dateiname
|
||||
"""
|
||||
# Dateiname sicher machen
|
||||
secure_name = secure_filename(original_filename)
|
||||
|
||||
# Timestamp hinzufügen für Eindeutigkeit
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
# Dateiname und Erweiterung trennen
|
||||
if '.' in secure_name:
|
||||
name, ext = secure_name.rsplit('.', 1)
|
||||
if prefix:
|
||||
unique_name = f"{prefix}_{name}_{timestamp}.{ext}"
|
||||
else:
|
||||
unique_name = f"{name}_{timestamp}.{ext}"
|
||||
else:
|
||||
if prefix:
|
||||
unique_name = f"{prefix}_{secure_name}_{timestamp}"
|
||||
else:
|
||||
unique_name = f"{secure_name}_{timestamp}"
|
||||
|
||||
return unique_name
|
||||
|
||||
def save_file(self, file, category: str, user_id: int = None,
|
||||
prefix: str = "", metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""
|
||||
Speichert eine Datei in der organisierten Struktur
|
||||
|
||||
Args:
|
||||
file: Werkzeug FileStorage Objekt
|
||||
category: Kategorie (jobs, guests, avatars, etc.)
|
||||
user_id: Benutzer-ID für Pfad-Organisation
|
||||
prefix: Dateiname-Präfix
|
||||
metadata: Zusätzliche Metadaten
|
||||
|
||||
Returns:
|
||||
Tuple[str, str, Dict]: (relativer_pfad, absoluter_pfad, metadaten) oder None bei Fehler
|
||||
"""
|
||||
try:
|
||||
if not file or not file.filename:
|
||||
return None
|
||||
|
||||
if not self.allowed_file(file.filename):
|
||||
raise ValueError(f"Dateityp nicht erlaubt: {file.filename}")
|
||||
|
||||
if category not in self.DIRECTORIES:
|
||||
raise ValueError(f"Unbekannte Kategorie: {category}")
|
||||
|
||||
# Verzeichnisstruktur aufbauen
|
||||
current_date = datetime.now()
|
||||
category_dir = self.DIRECTORIES[category]
|
||||
year_dir = str(current_date.year)
|
||||
month_dir = f"{current_date.month:02d}"
|
||||
|
||||
# Benutzer-spezifischen Unterordner hinzufügen wenn user_id vorhanden
|
||||
if user_id:
|
||||
relative_dir = os.path.join(category_dir, year_dir, month_dir, f"user_{user_id}")
|
||||
else:
|
||||
relative_dir = os.path.join(category_dir, year_dir, month_dir)
|
||||
|
||||
# Vollständigen Pfad erstellen
|
||||
full_dir = os.path.join(self.base_folder, relative_dir)
|
||||
os.makedirs(full_dir, exist_ok=True)
|
||||
|
||||
# Eindeutigen Dateinamen generieren
|
||||
unique_filename = self.generate_unique_filename(file.filename, prefix)
|
||||
|
||||
# Pfade definieren
|
||||
relative_path = os.path.join(relative_dir, unique_filename).replace('\\', '/')
|
||||
absolute_path = os.path.join(full_dir, unique_filename)
|
||||
|
||||
# Datei speichern
|
||||
file.save(absolute_path)
|
||||
|
||||
# Metadaten sammeln
|
||||
file_metadata = {
|
||||
'original_filename': file.filename,
|
||||
'unique_filename': unique_filename,
|
||||
'relative_path': relative_path,
|
||||
'absolute_path': absolute_path,
|
||||
'category': category,
|
||||
'user_id': user_id,
|
||||
'file_size': os.path.getsize(absolute_path),
|
||||
'upload_timestamp': current_date.isoformat(),
|
||||
'mime_type': file.content_type or 'application/octet-stream'
|
||||
}
|
||||
|
||||
# Zusätzliche Metadaten hinzufügen
|
||||
if metadata:
|
||||
file_metadata.update(metadata)
|
||||
|
||||
return relative_path, absolute_path, file_metadata
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Speichern der Datei: {e}")
|
||||
return None
|
||||
|
||||
def delete_file(self, relative_path: str) -> bool:
|
||||
"""
|
||||
Löscht eine Datei
|
||||
|
||||
Args:
|
||||
relative_path: Relativer Pfad zur Datei
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich gelöscht
|
||||
"""
|
||||
try:
|
||||
if not relative_path:
|
||||
return False
|
||||
|
||||
absolute_path = os.path.join(self.base_folder, relative_path)
|
||||
|
||||
if os.path.exists(absolute_path) and os.path.isfile(absolute_path):
|
||||
os.remove(absolute_path)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Löschen der Datei {relative_path}: {e}")
|
||||
return False
|
||||
|
||||
def move_file(self, old_relative_path: str, new_category: str,
|
||||
new_prefix: str = "") -> Optional[str]:
|
||||
"""
|
||||
Verschiebt eine Datei in eine andere Kategorie
|
||||
|
||||
Args:
|
||||
old_relative_path: Alter relativer Pfad
|
||||
new_category: Neue Kategorie
|
||||
new_prefix: Neuer Präfix
|
||||
|
||||
Returns:
|
||||
str: Neuer relativer Pfad oder None bei Fehler
|
||||
"""
|
||||
try:
|
||||
old_absolute_path = os.path.join(self.base_folder, old_relative_path)
|
||||
|
||||
if not os.path.exists(old_absolute_path):
|
||||
return None
|
||||
|
||||
# Dateiname extrahieren
|
||||
filename = os.path.basename(old_absolute_path)
|
||||
|
||||
# Neuen Pfad generieren
|
||||
current_date = datetime.now()
|
||||
new_category_dir = self.DIRECTORIES.get(new_category)
|
||||
if not new_category_dir:
|
||||
return None
|
||||
|
||||
year_dir = str(current_date.year)
|
||||
month_dir = f"{current_date.month:02d}"
|
||||
new_relative_dir = os.path.join(new_category_dir, year_dir, month_dir)
|
||||
new_full_dir = os.path.join(self.base_folder, new_relative_dir)
|
||||
|
||||
os.makedirs(new_full_dir, exist_ok=True)
|
||||
|
||||
# Neuen Dateinamen generieren falls Präfix angegeben
|
||||
if new_prefix:
|
||||
new_filename = self.generate_unique_filename(filename, new_prefix)
|
||||
else:
|
||||
new_filename = filename
|
||||
|
||||
new_relative_path = os.path.join(new_relative_dir, new_filename).replace('\\', '/')
|
||||
new_absolute_path = os.path.join(new_full_dir, new_filename)
|
||||
|
||||
# Datei verschieben
|
||||
shutil.move(old_absolute_path, new_absolute_path)
|
||||
|
||||
return new_relative_path
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Verschieben der Datei: {e}")
|
||||
return None
|
||||
|
||||
def get_file_info(self, relative_path: str) -> Optional[Dict]:
|
||||
"""
|
||||
Gibt Informationen über eine Datei zurück
|
||||
|
||||
Args:
|
||||
relative_path: Relativer Pfad zur Datei
|
||||
|
||||
Returns:
|
||||
Dict: Datei-Informationen oder None
|
||||
"""
|
||||
try:
|
||||
if not relative_path:
|
||||
return None
|
||||
|
||||
absolute_path = os.path.join(self.base_folder, relative_path)
|
||||
|
||||
if not os.path.exists(absolute_path):
|
||||
return None
|
||||
|
||||
stat = os.stat(absolute_path)
|
||||
|
||||
return {
|
||||
'filename': os.path.basename(absolute_path),
|
||||
'relative_path': relative_path,
|
||||
'absolute_path': absolute_path,
|
||||
'size': stat.st_size,
|
||||
'created': datetime.fromtimestamp(stat.st_ctime).isoformat(),
|
||||
'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(),
|
||||
'exists': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Abrufen der Datei-Informationen: {e}")
|
||||
return None
|
||||
|
||||
def cleanup_temp_files(self, max_age_hours: int = 24) -> int:
|
||||
"""
|
||||
Räumt temporäre Dateien auf
|
||||
|
||||
Args:
|
||||
max_age_hours: Maximales Alter in Stunden
|
||||
|
||||
Returns:
|
||||
int: Anzahl gelöschte Dateien
|
||||
"""
|
||||
try:
|
||||
temp_dir = os.path.join(self.base_folder, self.DIRECTORIES['temp'])
|
||||
if not os.path.exists(temp_dir):
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
max_age_seconds = max_age_hours * 3600
|
||||
current_time = datetime.now().timestamp()
|
||||
|
||||
for root, dirs, files in os.walk(temp_dir):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
try:
|
||||
file_age = current_time - os.path.getmtime(file_path)
|
||||
if file_age > max_age_seconds:
|
||||
os.remove(file_path)
|
||||
deleted_count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Aufräumen temporärer Dateien: {e}")
|
||||
return 0
|
||||
|
||||
def get_category_stats(self) -> Dict[str, Dict]:
|
||||
"""
|
||||
Gibt Statistiken für alle Kategorien zurück
|
||||
|
||||
Returns:
|
||||
Dict: Statistiken pro Kategorie
|
||||
"""
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
for category, subdir in self.DIRECTORIES.items():
|
||||
category_path = os.path.join(self.base_folder, subdir)
|
||||
|
||||
if not os.path.exists(category_path):
|
||||
stats[category] = {'file_count': 0, 'total_size': 0}
|
||||
continue
|
||||
|
||||
file_count = 0
|
||||
total_size = 0
|
||||
|
||||
for root, dirs, files in os.walk(category_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
try:
|
||||
total_size += os.path.getsize(file_path)
|
||||
file_count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
stats[category] = {
|
||||
'file_count': file_count,
|
||||
'total_size': total_size,
|
||||
'total_size_mb': round(total_size / (1024 * 1024), 2)
|
||||
}
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Abrufen der Kategorie-Statistiken: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
# Globale FileManager-Instanz
|
||||
file_manager = FileManager()
|
||||
|
||||
# Convenience-Funktionen
|
||||
def save_job_file(file, user_id: int, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Druckjob-Datei"""
|
||||
return file_manager.save_file(file, 'jobs', user_id, 'job', metadata)
|
||||
|
||||
def save_guest_file(file, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Gastauftrags-Datei"""
|
||||
return file_manager.save_file(file, 'guests', None, 'guest', metadata)
|
||||
|
||||
def save_avatar_file(file, user_id: int) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Avatar-Datei"""
|
||||
return file_manager.save_file(file, 'avatars', user_id, 'avatar')
|
||||
|
||||
def save_asset_file(file, user_id: int, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Asset-Datei"""
|
||||
return file_manager.save_file(file, 'assets', user_id, 'asset', metadata)
|
||||
|
||||
def save_log_file(file, user_id: int, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Log-Datei"""
|
||||
return file_manager.save_file(file, 'logs', user_id, 'log', metadata)
|
||||
|
||||
def save_backup_file(file, user_id: int, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine Backup-Datei"""
|
||||
return file_manager.save_file(file, 'backups', user_id, 'backup', metadata)
|
||||
|
||||
def save_temp_file(file, user_id: int, metadata: Dict = None) -> Optional[Tuple[str, str, Dict]]:
|
||||
"""Speichert eine temporäre Datei"""
|
||||
return file_manager.save_file(file, 'temp', user_id, 'temp', metadata)
|
||||
|
||||
def delete_file(relative_path: str) -> bool:
|
||||
"""Löscht eine Datei"""
|
||||
return file_manager.delete_file(relative_path)
|
||||
|
||||
def get_file_info(relative_path: str) -> Optional[Dict]:
|
||||
"""Gibt Datei-Informationen zurück"""
|
||||
return file_manager.get_file_info(relative_path)
|
||||
22
backend/app - Kopie/utils/fix_csrf.py
Normal file
22
backend/app - Kopie/utils/fix_csrf.py
Normal file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Entferne problematischen CSRF-Error-Handler aus app.py"""
|
||||
|
||||
import re
|
||||
|
||||
# Lese die Backup-Datei
|
||||
with open('app_backup.py', 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Entferne den CSRF-Error-Handler-Block
|
||||
# Suche nach @csrf.error_handler bis zum ersten leeren Zeilen-Block
|
||||
pattern = r'@csrf\.error_handler.*?(?=\n\n|\n# [A-Z])'
|
||||
content = re.sub(pattern, '', content, flags=re.DOTALL)
|
||||
|
||||
# Entferne auch mögliche doppelte Leerzeilen
|
||||
content = re.sub(r'\n\n\n+', '\n\n', content)
|
||||
|
||||
# Schreibe die bereinigte Version
|
||||
with open('app.py', 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
|
||||
print("CSRF-Error-Handler erfolgreich entfernt!")
|
||||
253
backend/app - Kopie/utils/fix_database_immediate.py
Normal file
253
backend/app - Kopie/utils/fix_database_immediate.py
Normal file
@@ -0,0 +1,253 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sofortige Datenbank-Reparatur für fehlende updated_at Spalte
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
# Pfad zur App hinzufügen
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from config.settings import DATABASE_PATH
|
||||
|
||||
def fix_users_table_immediate():
|
||||
"""Repariert die users Tabelle sofort."""
|
||||
print(f"Repariere Datenbank: {DATABASE_PATH}")
|
||||
|
||||
if not os.path.exists(DATABASE_PATH):
|
||||
print(f"Datenbankdatei nicht gefunden: {DATABASE_PATH}")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Prüfen, welche Spalten existieren
|
||||
cursor.execute("PRAGMA table_info(users)")
|
||||
existing_columns = [row[1] for row in cursor.fetchall()]
|
||||
print(f"Vorhandene Spalten in users: {existing_columns}")
|
||||
|
||||
# Fehlende Spalten hinzufügen
|
||||
required_columns = [
|
||||
('updated_at', 'DATETIME'),
|
||||
('settings', 'TEXT'),
|
||||
('department', 'VARCHAR(100)'),
|
||||
('position', 'VARCHAR(100)'),
|
||||
('phone', 'VARCHAR(50)'),
|
||||
('bio', 'TEXT')
|
||||
]
|
||||
|
||||
for column_name, column_type in required_columns:
|
||||
if column_name not in existing_columns:
|
||||
try:
|
||||
if column_name == 'updated_at':
|
||||
# Einfacher Ansatz: NULL erlauben und später updaten
|
||||
cursor.execute(f"ALTER TABLE users ADD COLUMN {column_name} {column_type}")
|
||||
print(f"✓ Spalte '{column_name}' hinzugefügt")
|
||||
|
||||
# Alle vorhandenen Benutzer mit aktuellem Timestamp updaten
|
||||
cursor.execute(f"UPDATE users SET {column_name} = CURRENT_TIMESTAMP WHERE {column_name} IS NULL")
|
||||
print(f"✓ Vorhandene Benutzer mit {column_name} aktualisiert")
|
||||
|
||||
# Trigger für automatische Updates erstellen
|
||||
cursor.execute("""
|
||||
CREATE TRIGGER IF NOT EXISTS update_users_updated_at
|
||||
AFTER UPDATE ON users
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE users SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id;
|
||||
END
|
||||
""")
|
||||
print(f"✓ Auto-Update-Trigger für {column_name} erstellt")
|
||||
else:
|
||||
cursor.execute(f"ALTER TABLE users ADD COLUMN {column_name} {column_type}")
|
||||
print(f"✓ Spalte '{column_name}' hinzugefügt")
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Fehler bei Spalte '{column_name}': {str(e)}")
|
||||
else:
|
||||
print(f"○ Spalte '{column_name}' bereits vorhanden")
|
||||
|
||||
# Weitere fehlende Tabellen prüfen und erstellen
|
||||
create_missing_tables(cursor)
|
||||
|
||||
# Optimierungsindizes erstellen
|
||||
create_performance_indexes(cursor)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print("✓ Datenbank-Reparatur erfolgreich abgeschlossen")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Fehler bei der Datenbank-Reparatur: {str(e)}")
|
||||
if 'conn' in locals():
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def create_missing_tables(cursor):
|
||||
"""Erstellt fehlende Tabellen."""
|
||||
|
||||
# Prüfen, welche Tabellen existieren
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
existing_tables = [row[0] for row in cursor.fetchall()]
|
||||
print(f"Vorhandene Tabellen: {existing_tables}")
|
||||
|
||||
# user_permissions Tabelle
|
||||
if 'user_permissions' not in existing_tables:
|
||||
cursor.execute("""
|
||||
CREATE TABLE user_permissions (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
can_start_jobs BOOLEAN DEFAULT 0,
|
||||
needs_approval BOOLEAN DEFAULT 1,
|
||||
can_approve_jobs BOOLEAN DEFAULT 0,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
""")
|
||||
print("✓ Tabelle 'user_permissions' erstellt")
|
||||
|
||||
# notifications Tabelle
|
||||
if 'notifications' not in existing_tables:
|
||||
cursor.execute("""
|
||||
CREATE TABLE notifications (
|
||||
id INTEGER PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL,
|
||||
type VARCHAR(50) NOT NULL,
|
||||
payload TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
read BOOLEAN DEFAULT 0,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
""")
|
||||
print("✓ Tabelle 'notifications' erstellt")
|
||||
|
||||
# stats Tabelle
|
||||
if 'stats' not in existing_tables:
|
||||
cursor.execute("""
|
||||
CREATE TABLE stats (
|
||||
id INTEGER PRIMARY KEY,
|
||||
total_print_time INTEGER DEFAULT 0,
|
||||
total_jobs_completed INTEGER DEFAULT 0,
|
||||
total_material_used REAL DEFAULT 0.0,
|
||||
last_updated DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
print("✓ Tabelle 'stats' erstellt")
|
||||
|
||||
# Initial stats record erstellen
|
||||
cursor.execute("""
|
||||
INSERT INTO stats (total_print_time, total_jobs_completed, total_material_used, last_updated)
|
||||
VALUES (0, 0, 0.0, CURRENT_TIMESTAMP)
|
||||
""")
|
||||
print("✓ Initial-Statistiken erstellt")
|
||||
|
||||
# system_logs Tabelle
|
||||
if 'system_logs' not in existing_tables:
|
||||
cursor.execute("""
|
||||
CREATE TABLE system_logs (
|
||||
id INTEGER PRIMARY KEY,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
level VARCHAR(20) NOT NULL,
|
||||
message VARCHAR(1000) NOT NULL,
|
||||
module VARCHAR(100),
|
||||
user_id INTEGER,
|
||||
ip_address VARCHAR(50),
|
||||
user_agent VARCHAR(500),
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
""")
|
||||
print("✓ Tabelle 'system_logs' erstellt")
|
||||
|
||||
def create_performance_indexes(cursor):
|
||||
"""Erstellt Performance-Indices."""
|
||||
print("Erstelle Performance-Indices...")
|
||||
|
||||
indexes = [
|
||||
("idx_users_email", "users(email)"),
|
||||
("idx_users_username", "users(username)"),
|
||||
("idx_users_role", "users(role)"),
|
||||
("idx_jobs_user_id", "jobs(user_id)"),
|
||||
("idx_jobs_printer_id", "jobs(printer_id)"),
|
||||
("idx_jobs_status", "jobs(status)"),
|
||||
("idx_jobs_start_at", "jobs(start_at)"),
|
||||
("idx_notifications_user_id", "notifications(user_id)"),
|
||||
("idx_notifications_read", "notifications(read)"),
|
||||
("idx_system_logs_timestamp", "system_logs(timestamp)"),
|
||||
("idx_system_logs_level", "system_logs(level)"),
|
||||
("idx_guest_requests_status", "guest_requests(status)"),
|
||||
("idx_printers_status", "printers(status)"),
|
||||
("idx_printers_active", "printers(active)")
|
||||
]
|
||||
|
||||
for index_name, index_def in indexes:
|
||||
try:
|
||||
cursor.execute(f"CREATE INDEX IF NOT EXISTS {index_name} ON {index_def}")
|
||||
print(f"✓ Index '{index_name}' erstellt")
|
||||
except Exception as e:
|
||||
print(f"○ Index '{index_name}': {str(e)}")
|
||||
|
||||
def test_database_access():
|
||||
"""Testet den Datenbankzugriff nach der Reparatur."""
|
||||
print("\nTeste Datenbankzugriff...")
|
||||
|
||||
try:
|
||||
# Models importieren und testen
|
||||
from models import get_cached_session, User, Printer, Job
|
||||
|
||||
with get_cached_session() as session:
|
||||
# Test User-Query
|
||||
users = session.query(User).limit(5).all()
|
||||
print(f"✓ User-Abfrage erfolgreich - {len(users)} Benutzer gefunden")
|
||||
|
||||
# Test Printer-Query
|
||||
printers = session.query(Printer).limit(5).all()
|
||||
print(f"✓ Printer-Abfrage erfolgreich - {len(printers)} Drucker gefunden")
|
||||
|
||||
# Test Job-Query
|
||||
jobs = session.query(Job).limit(5).all()
|
||||
print(f"✓ Job-Abfrage erfolgreich - {len(jobs)} Jobs gefunden")
|
||||
|
||||
print("✓ Alle Datenbank-Tests erfolgreich!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Datenbank-Test fehlgeschlagen: {str(e)}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Hauptfunktion für die sofortige Datenbank-Reparatur."""
|
||||
print("=== SOFORTIGE DATENBANK-REPARATUR ===")
|
||||
print(f"Zeitstempel: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
print(f"Datenbank: {DATABASE_PATH}")
|
||||
print()
|
||||
|
||||
# Backup erstellen
|
||||
if os.path.exists(DATABASE_PATH):
|
||||
backup_path = f"{DATABASE_PATH}.backup_immediate_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
try:
|
||||
import shutil
|
||||
shutil.copy2(DATABASE_PATH, backup_path)
|
||||
print(f"✓ Backup erstellt: {backup_path}")
|
||||
except Exception as e:
|
||||
print(f"⚠ Backup-Erstellung fehlgeschlagen: {str(e)}")
|
||||
|
||||
# Reparatur durchführen
|
||||
if fix_users_table_immediate():
|
||||
print("\n=== DATENBANK-TEST ===")
|
||||
if test_database_access():
|
||||
print("\n🎉 DATENBANK-REPARATUR ERFOLGREICH!")
|
||||
print("Die Anwendung sollte jetzt funktionieren.")
|
||||
else:
|
||||
print("\n❌ DATENBANK-TEST FEHLGESCHLAGEN!")
|
||||
print("Weitere Diagnose erforderlich.")
|
||||
else:
|
||||
print("\n❌ DATENBANK-REPARATUR FEHLGESCHLAGEN!")
|
||||
print("Manuelle Intervention erforderlich.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
25
backend/app - Kopie/utils/init_db.py
Normal file
25
backend/app - Kopie/utils/init_db.py
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python3.11
|
||||
|
||||
from models import init_database, create_initial_admin
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Initialisiere Datenbank...")
|
||||
init_database()
|
||||
|
||||
print("Erstelle initialen Admin-Benutzer...")
|
||||
success = create_initial_admin(
|
||||
email="admin@mercedes-benz.com",
|
||||
password="744563017196A",
|
||||
name="System Administrator",
|
||||
username="admin"
|
||||
)
|
||||
|
||||
if success:
|
||||
print("Admin-Benutzer erfolgreich erstellt.")
|
||||
print("Login-Daten:")
|
||||
print(" Benutzername: admin")
|
||||
print(" Passwort: 744563017196A")
|
||||
else:
|
||||
print("Admin-Benutzer konnte nicht erstellt werden (existiert bereits?).")
|
||||
|
||||
print("Datenbank-Initialisierung abgeschlossen.")
|
||||
729
backend/app - Kopie/utils/job_scheduler.py
Normal file
729
backend/app - Kopie/utils/job_scheduler.py
Normal file
@@ -0,0 +1,729 @@
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
from typing import Dict, Callable, Any, List, Optional, Union
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from PyP100 import PyP110
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from utils.logging_config import get_logger
|
||||
from models import Job, Printer, get_db_session
|
||||
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
|
||||
|
||||
# Lazy logger initialization
|
||||
_logger = None
|
||||
|
||||
def get_scheduler_logger():
|
||||
"""Lazy initialization of the scheduler logger."""
|
||||
global _logger
|
||||
if _logger is None:
|
||||
_logger = get_logger("scheduler")
|
||||
return _logger
|
||||
|
||||
class BackgroundTaskScheduler:
|
||||
"""
|
||||
Ein fortschrittlicher Hintergrund-Task-Scheduler, der registrierbare Worker-Funktionen unterstützt.
|
||||
Tasks können als Platzhalter registriert und später konfiguriert werden.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._tasks: Dict[str, Dict[str, Any]] = {}
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._stop_event = threading.Event()
|
||||
self._running = False
|
||||
self._start_time: Optional[datetime] = None
|
||||
self.logger = get_scheduler_logger()
|
||||
|
||||
def register_task(self,
|
||||
task_id: str,
|
||||
func: Callable,
|
||||
interval: int = 60,
|
||||
args: List = None,
|
||||
kwargs: Dict = None,
|
||||
enabled: bool = True) -> bool:
|
||||
"""
|
||||
Registriert eine neue Hintergrund-Task.
|
||||
|
||||
Args:
|
||||
task_id: Eindeutige ID für die Task
|
||||
func: Die auszuführende Funktion
|
||||
interval: Intervall in Sekunden zwischen den Ausführungen
|
||||
args: Positionsargumente für die Funktion
|
||||
kwargs: Schlüsselwortargumente für die Funktion
|
||||
enabled: Ob die Task aktiviert sein soll
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich, False wenn die ID bereits existiert
|
||||
"""
|
||||
if task_id in self._tasks:
|
||||
self.logger.error(f"Task mit ID {task_id} existiert bereits")
|
||||
return False
|
||||
|
||||
self._tasks[task_id] = {
|
||||
"func": func,
|
||||
"interval": interval,
|
||||
"args": args or [],
|
||||
"kwargs": kwargs or {},
|
||||
"enabled": enabled,
|
||||
"last_run": None,
|
||||
"next_run": datetime.now() if enabled else None
|
||||
}
|
||||
|
||||
self.logger.info(f"Task {task_id} registriert: Intervall {interval}s, Enabled: {enabled}")
|
||||
return True
|
||||
|
||||
def update_task(self,
|
||||
task_id: str,
|
||||
interval: Optional[int] = None,
|
||||
args: Optional[List] = None,
|
||||
kwargs: Optional[Dict] = None,
|
||||
enabled: Optional[bool] = None) -> bool:
|
||||
"""
|
||||
Aktualisiert die Konfiguration einer bestehenden Task.
|
||||
|
||||
Args:
|
||||
task_id: ID der zu aktualisierenden Task
|
||||
interval: Neues Intervall in Sekunden
|
||||
args: Neue Positionsargumente
|
||||
kwargs: Neue Schlüsselwortargumente
|
||||
enabled: Neuer Aktivierungsstatus
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich, False wenn die ID nicht existiert
|
||||
"""
|
||||
if task_id not in self._tasks:
|
||||
self.logger.error(f"Task mit ID {task_id} existiert nicht")
|
||||
return False
|
||||
|
||||
task = self._tasks[task_id]
|
||||
|
||||
if interval is not None:
|
||||
task["interval"] = interval
|
||||
|
||||
if args is not None:
|
||||
task["args"] = args
|
||||
|
||||
if kwargs is not None:
|
||||
task["kwargs"] = kwargs
|
||||
|
||||
if enabled is not None and enabled != task["enabled"]:
|
||||
task["enabled"] = enabled
|
||||
if enabled:
|
||||
task["next_run"] = datetime.now()
|
||||
else:
|
||||
task["next_run"] = None
|
||||
|
||||
self.logger.info(f"Task {task_id} aktualisiert: Intervall {task['interval']}s, Enabled: {task['enabled']}")
|
||||
return True
|
||||
|
||||
def remove_task(self, task_id: str) -> bool:
|
||||
"""
|
||||
Entfernt eine Task aus dem Scheduler.
|
||||
|
||||
Args:
|
||||
task_id: ID der zu entfernenden Task
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich, False wenn die ID nicht existiert
|
||||
"""
|
||||
if task_id not in self._tasks:
|
||||
self.logger.error(f"Task mit ID {task_id} existiert nicht")
|
||||
return False
|
||||
|
||||
del self._tasks[task_id]
|
||||
self.logger.info(f"Task {task_id} entfernt")
|
||||
return True
|
||||
|
||||
def get_task_info(self, task_id: Optional[str] = None) -> Union[Dict, List[Dict]]:
|
||||
"""
|
||||
Gibt Informationen zu einer Task oder allen Tasks zurück.
|
||||
|
||||
Args:
|
||||
task_id: ID der Task oder None für alle Tasks
|
||||
|
||||
Returns:
|
||||
Dict oder List: Task-Informationen
|
||||
"""
|
||||
if task_id is not None:
|
||||
if task_id not in self._tasks:
|
||||
return {}
|
||||
|
||||
task = self._tasks[task_id]
|
||||
return {
|
||||
"id": task_id,
|
||||
"interval": task["interval"],
|
||||
"enabled": task["enabled"],
|
||||
"last_run": task["last_run"].isoformat() if task["last_run"] else None,
|
||||
"next_run": task["next_run"].isoformat() if task["next_run"] else None
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
"id": tid,
|
||||
"interval": task["interval"],
|
||||
"enabled": task["enabled"],
|
||||
"last_run": task["last_run"].isoformat() if task["last_run"] else None,
|
||||
"next_run": task["next_run"].isoformat() if task["next_run"] else None
|
||||
}
|
||||
for tid, task in self._tasks.items()
|
||||
]
|
||||
|
||||
def get_tasks(self) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Gibt alle Tasks mit ihren Konfigurationen zurück.
|
||||
|
||||
Returns:
|
||||
Dict: Dictionary mit Task-IDs als Schlüssel und Task-Konfigurationen als Werte
|
||||
"""
|
||||
return {
|
||||
task_id: {
|
||||
"interval": task["interval"],
|
||||
"enabled": task["enabled"],
|
||||
"last_run": task["last_run"].isoformat() if task["last_run"] else None,
|
||||
"next_run": task["next_run"].isoformat() if task["next_run"] else None
|
||||
}
|
||||
for task_id, task in self._tasks.items()
|
||||
}
|
||||
|
||||
def get_uptime(self) -> Optional[str]:
|
||||
"""
|
||||
Gibt die Laufzeit des Schedulers seit dem Start zurück.
|
||||
|
||||
Returns:
|
||||
str: Formatierte Laufzeit oder None, wenn der Scheduler nicht läuft
|
||||
"""
|
||||
if not self._running or not self._start_time:
|
||||
return None
|
||||
|
||||
uptime = datetime.now() - self._start_time
|
||||
days = uptime.days
|
||||
hours, remainder = divmod(uptime.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
|
||||
if days > 0:
|
||||
return f"{days} Tage, {hours} Stunden, {minutes} Minuten"
|
||||
elif hours > 0:
|
||||
return f"{hours} Stunden, {minutes} Minuten"
|
||||
else:
|
||||
return f"{minutes} Minuten, {seconds} Sekunden"
|
||||
|
||||
def start(self) -> bool:
|
||||
"""
|
||||
Startet den Scheduler.
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich gestartet, False wenn bereits läuft
|
||||
"""
|
||||
if self._running:
|
||||
self.logger.warning("Scheduler läuft bereits")
|
||||
return False
|
||||
|
||||
self._stop_event.clear()
|
||||
self._thread = threading.Thread(target=self._run)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
self._running = True
|
||||
self._start_time = datetime.now()
|
||||
|
||||
self.logger.info("Scheduler gestartet")
|
||||
return True
|
||||
|
||||
def stop(self) -> bool:
|
||||
"""
|
||||
Stoppt den Scheduler.
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich gestoppt, False wenn nicht läuft
|
||||
"""
|
||||
if not self._running:
|
||||
self.logger.warning("Scheduler läuft nicht")
|
||||
return False
|
||||
|
||||
self._stop_event.set()
|
||||
if self._thread:
|
||||
self._thread.join(timeout=5.0)
|
||||
|
||||
self._running = False
|
||||
self._start_time = None
|
||||
self.logger.info("Scheduler gestoppt")
|
||||
return True
|
||||
|
||||
def is_running(self) -> bool:
|
||||
"""
|
||||
Prüft, ob der Scheduler läuft.
|
||||
|
||||
Returns:
|
||||
bool: True wenn der Scheduler läuft, sonst False
|
||||
"""
|
||||
return self._running
|
||||
|
||||
def _run(self) -> None:
|
||||
"""Hauptloop des Schedulers."""
|
||||
self.logger.info("Scheduler-Thread gestartet")
|
||||
|
||||
while not self._stop_event.is_set():
|
||||
now = datetime.now()
|
||||
|
||||
for task_id, task in self._tasks.items():
|
||||
if not task["enabled"] or not task["next_run"]:
|
||||
continue
|
||||
|
||||
if now >= task["next_run"]:
|
||||
try:
|
||||
self.logger.debug(f"Führe Task {task_id} aus")
|
||||
task["func"](*task["args"], **task["kwargs"])
|
||||
task["last_run"] = now
|
||||
task["next_run"] = now + timedelta(seconds=task["interval"])
|
||||
self.logger.debug(f"Task {task_id} erfolgreich ausgeführt, nächste Ausführung: {task['next_run']}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Fehler bei Ausführung von Task {task_id}: {str(e)}")
|
||||
# Trotzdem nächste Ausführung planen
|
||||
task["next_run"] = now + timedelta(seconds=task["interval"])
|
||||
|
||||
# Schlafenszeit berechnen (1 Sekunde oder weniger)
|
||||
time.sleep(1)
|
||||
|
||||
self.logger.info("Scheduler-Thread beendet")
|
||||
|
||||
def toggle_plug(self, ip: str, state: bool, username: str = None, password: str = None) -> bool:
|
||||
"""
|
||||
Schaltet eine TP-Link Tapo P100/P110-Steckdose ein oder aus.
|
||||
|
||||
Args:
|
||||
ip: IP-Adresse der Steckdose
|
||||
state: True = Ein, False = Aus
|
||||
username: Benutzername für die Steckdose (wird überschrieben mit globalen Credentials)
|
||||
password: Passwort für die Steckdose (wird überschrieben mit globalen Credentials)
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich geschaltet
|
||||
"""
|
||||
try:
|
||||
# PyP100 importieren
|
||||
try:
|
||||
from PyP100 import PyP100
|
||||
except ImportError:
|
||||
self.logger.error("❌ PyP100-Modul nicht installiert - Steckdose kann nicht geschaltet werden")
|
||||
return False
|
||||
|
||||
# IMMER globale Anmeldedaten verwenden (da diese funktionieren)
|
||||
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
|
||||
username = TAPO_USERNAME
|
||||
password = TAPO_PASSWORD
|
||||
self.logger.debug(f"🔧 Verwende globale Tapo-Anmeldedaten für {ip}")
|
||||
|
||||
# P100-Verbindung herstellen (P100 statt P110 verwenden)
|
||||
p100 = PyP100.P100(ip, username, password)
|
||||
|
||||
# Handshake und Login durchführen
|
||||
p100.handshake()
|
||||
p100.login()
|
||||
|
||||
# Steckdose schalten
|
||||
if state:
|
||||
p100.turnOn()
|
||||
self.logger.info(f"✅ Tapo-Steckdose {ip} erfolgreich eingeschaltet")
|
||||
else:
|
||||
p100.turnOff()
|
||||
self.logger.info(f"✅ Tapo-Steckdose {ip} erfolgreich ausgeschaltet")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
action = "ein" if state else "aus"
|
||||
self.logger.error(f"❌ Fehler beim {action}schalten der Tapo-Steckdose {ip}: {str(e)}")
|
||||
return False
|
||||
|
||||
def toggle_printer_plug(self, printer_id: int, state: bool) -> bool:
|
||||
"""
|
||||
Schaltet die Steckdose eines Druckers ein oder aus mit korrektem Status-Mapping:
|
||||
- Steckdose AUS = Drucker ONLINE (bereit zum Drucken)
|
||||
- Steckdose AN = Drucker PRINTING (druckt gerade)
|
||||
|
||||
Args:
|
||||
printer_id: ID des Druckers
|
||||
state: True für ein, False für aus
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich, False wenn fehlgeschlagen
|
||||
"""
|
||||
try:
|
||||
# Drucker aus Datenbank holen
|
||||
db_session = get_db_session()
|
||||
printer = db_session.get(Printer, printer_id)
|
||||
|
||||
if not printer:
|
||||
self.logger.error(f"❌ Drucker mit ID {printer_id} nicht gefunden")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
# Konfiguration validieren
|
||||
if not printer.plug_ip:
|
||||
self.logger.error(f"❌ Unvollständige Steckdosen-Konfiguration für Drucker {printer.name}")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
# Steckdose schalten
|
||||
success = self.toggle_plug(
|
||||
ip=printer.plug_ip,
|
||||
state=state,
|
||||
username=printer.plug_username, # Wird überschrieben mit globalen Credentials
|
||||
password=printer.plug_password # Wird überschrieben mit globalen Credentials
|
||||
)
|
||||
|
||||
if success:
|
||||
# Status in Datenbank aktualisieren entsprechend der neuen Logik
|
||||
if state:
|
||||
# Steckdose eingeschaltet = Drucker druckt
|
||||
printer.status = "printing"
|
||||
self.logger.info(f"🖨️ Drucker {printer.name}: Status auf 'printing' gesetzt (Steckdose eingeschaltet)")
|
||||
else:
|
||||
# Steckdose ausgeschaltet = Drucker bereit
|
||||
printer.status = "online"
|
||||
self.logger.info(f"✅ Drucker {printer.name}: Status auf 'online' gesetzt (Steckdose ausgeschaltet - bereit)")
|
||||
|
||||
printer.last_checked = datetime.now()
|
||||
db_session.commit()
|
||||
self.logger.info(f"✅ Status für Drucker {printer.name} erfolgreich aktualisiert")
|
||||
|
||||
db_session.close()
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
action = "ein" if state else "aus"
|
||||
self.logger.error(f"❌ Fehler beim {action}schalten der Steckdose für Drucker {printer_id}: {str(e)}")
|
||||
try:
|
||||
db_session.close()
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
def _check_jobs(self) -> None:
|
||||
"""
|
||||
Überprüft und verwaltet Druckjobs mit intelligentem Power Management:
|
||||
- Startet anstehende Jobs (geplante Jobs)
|
||||
- Beendet abgelaufene Jobs (schaltet Steckdose aus)
|
||||
- Schaltet Drucker automatisch aus bei Leerlauf
|
||||
- Schaltet Drucker automatisch ein bei neuen Jobs
|
||||
"""
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
now = datetime.now()
|
||||
|
||||
# 1. Anstehende Jobs starten (geplante Jobs)
|
||||
pending_jobs = db_session.query(Job).filter(
|
||||
Job.status == "scheduled",
|
||||
Job.start_at <= now
|
||||
).all()
|
||||
|
||||
for job in pending_jobs:
|
||||
self.logger.info(f"🚀 Starte geplanten Job {job.id}: {job.name}")
|
||||
|
||||
# Steckdose einschalten
|
||||
if self.toggle_printer_plug(job.printer_id, True):
|
||||
# Job als laufend markieren
|
||||
job.status = "running"
|
||||
db_session.commit()
|
||||
self.logger.info(f"✅ Job {job.id} gestartet - Drucker eingeschaltet")
|
||||
else:
|
||||
self.logger.error(f"❌ Konnte Steckdose für Job {job.id} nicht einschalten")
|
||||
|
||||
# 2. Sofort-Jobs starten (Jobs die bereits hätten starten sollen)
|
||||
immediate_jobs = db_session.query(Job).filter(
|
||||
Job.status == "waiting_for_printer",
|
||||
Job.start_at <= now
|
||||
).all()
|
||||
|
||||
for job in immediate_jobs:
|
||||
self.logger.info(f"⚡ Starte Sofort-Job {job.id}: {job.name}")
|
||||
|
||||
# Steckdose einschalten
|
||||
if self.toggle_printer_plug(job.printer_id, True):
|
||||
# Job als laufend markieren
|
||||
job.status = "running"
|
||||
db_session.commit()
|
||||
self.logger.info(f"✅ Sofort-Job {job.id} gestartet - Drucker automatisch eingeschaltet")
|
||||
else:
|
||||
self.logger.error(f"❌ Konnte Steckdose für Sofort-Job {job.id} nicht einschalten")
|
||||
|
||||
# 3. Abgelaufene Jobs beenden
|
||||
running_jobs = db_session.query(Job).filter(
|
||||
Job.status == "running",
|
||||
Job.end_at <= now
|
||||
).all()
|
||||
|
||||
for job in running_jobs:
|
||||
self.logger.info(f"🏁 Beende Job {job.id}: {job.name}")
|
||||
|
||||
# Job als beendet markieren
|
||||
job.status = "finished"
|
||||
job.actual_end_time = now
|
||||
db_session.commit()
|
||||
self.logger.info(f"✅ Job {job.id} beendet")
|
||||
|
||||
# Prüfen ob weitere Jobs für diesen Drucker anstehen
|
||||
pending_jobs_for_printer = db_session.query(Job).filter(
|
||||
Job.printer_id == job.printer_id,
|
||||
Job.status.in_(["scheduled", "running", "waiting_for_printer"])
|
||||
).count()
|
||||
|
||||
if pending_jobs_for_printer == 0:
|
||||
# Keine weiteren Jobs - Drucker ausschalten (Leerlauf-Management)
|
||||
if self.toggle_printer_plug(job.printer_id, False):
|
||||
self.logger.info(f"💤 Drucker {job.printer_id} automatisch ausgeschaltet - Leerlauf erkannt")
|
||||
else:
|
||||
self.logger.warning(f"⚠️ Konnte Drucker {job.printer_id} nicht ausschalten")
|
||||
else:
|
||||
self.logger.info(f"🔄 Drucker {job.printer_id} bleibt eingeschaltet - {pending_jobs_for_printer} weitere Jobs anstehend")
|
||||
|
||||
# 4. Intelligentes Leerlauf-Management für alle aktiven Drucker
|
||||
active_printers = db_session.query(Printer).filter(
|
||||
Printer.active == True,
|
||||
Printer.plug_ip.isnot(None),
|
||||
Printer.status == "online"
|
||||
).all()
|
||||
|
||||
for printer in active_printers:
|
||||
# Prüfen ob Jobs für diesen Drucker anstehen
|
||||
active_jobs_count = db_session.query(Job).filter(
|
||||
Job.printer_id == printer.id,
|
||||
Job.status.in_(["scheduled", "running", "waiting_for_printer"])
|
||||
).count()
|
||||
|
||||
if active_jobs_count == 0:
|
||||
# Keine Jobs anstehend - prüfen ob Drucker schon längere Zeit im Leerlauf ist
|
||||
if printer.last_checked:
|
||||
idle_time = now - printer.last_checked
|
||||
# Drucker ausschalten wenn länger als 5 Minuten im Leerlauf
|
||||
if idle_time.total_seconds() > 300: # 5 Minuten
|
||||
if self.toggle_printer_plug(printer.id, False):
|
||||
self.logger.info(f"💤 Drucker {printer.name} nach {idle_time.total_seconds()//60:.0f} Min Leerlauf ausgeschaltet")
|
||||
else:
|
||||
self.logger.warning(f"⚠️ Konnte Drucker {printer.name} nach Leerlauf nicht ausschalten")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"❌ Fehler bei Überprüfung der Jobs: {str(e)}")
|
||||
try:
|
||||
db_session.rollback()
|
||||
except:
|
||||
pass
|
||||
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
def handle_immediate_job(self, job_id: int) -> bool:
|
||||
"""
|
||||
Behandelt einen Job sofort (für Sofort-Start bei Job-Erstellung).
|
||||
|
||||
Args:
|
||||
job_id: ID des zu startenden Jobs
|
||||
|
||||
Returns:
|
||||
bool: True wenn Job erfolgreich gestartet wurde
|
||||
"""
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
now = datetime.now()
|
||||
|
||||
# Job aus Datenbank laden
|
||||
job = db_session.get(Job, job_id)
|
||||
if not job:
|
||||
self.logger.error(f"❌ Job {job_id} nicht gefunden")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
# Nur Jobs behandeln die sofort starten sollen
|
||||
if job.start_at > now:
|
||||
self.logger.info(f"⏰ Job {job_id} ist für später geplant ({job.start_at}) - kein Sofort-Start")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
# Nur Jobs in passenden Status
|
||||
if job.status not in ["scheduled", "waiting_for_printer"]:
|
||||
self.logger.info(f"ℹ️ Job {job_id} hat Status '{job.status}' - kein Sofort-Start nötig")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
self.logger.info(f"⚡ Starte Sofort-Job {job_id}: {job.name} für Drucker {job.printer_id}")
|
||||
|
||||
# Steckdose einschalten
|
||||
if self.toggle_printer_plug(job.printer_id, True):
|
||||
# Job als laufend markieren
|
||||
job.status = "running"
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
self.logger.info(f"✅ Sofort-Job {job_id} erfolgreich gestartet - Drucker automatisch eingeschaltet")
|
||||
return True
|
||||
else:
|
||||
self.logger.error(f"❌ Konnte Steckdose für Sofort-Job {job_id} nicht einschalten")
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"❌ Fehler beim Starten von Sofort-Job {job_id}: {str(e)}")
|
||||
try:
|
||||
db_session.rollback()
|
||||
db_session.close()
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
def check_and_manage_printer_power(self, printer_id: int) -> bool:
|
||||
"""
|
||||
Prüft und verwaltet die Stromversorgung eines spezifischen Druckers.
|
||||
|
||||
Args:
|
||||
printer_id: ID des zu prüfenden Druckers
|
||||
|
||||
Returns:
|
||||
bool: True wenn Power-Management erfolgreich
|
||||
"""
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
now = datetime.now()
|
||||
|
||||
# Drucker laden
|
||||
printer = db_session.get(Printer, printer_id)
|
||||
if not printer or not printer.plug_ip:
|
||||
db_session.close()
|
||||
return False
|
||||
|
||||
# Aktive Jobs für diesen Drucker prüfen
|
||||
active_jobs = db_session.query(Job).filter(
|
||||
Job.printer_id == printer_id,
|
||||
Job.status.in_(["scheduled", "running", "waiting_for_printer"])
|
||||
).all()
|
||||
|
||||
current_jobs = [job for job in active_jobs if job.start_at <= now]
|
||||
future_jobs = [job for job in active_jobs if job.start_at > now]
|
||||
|
||||
if current_jobs:
|
||||
# Jobs laufen oder sollten laufen - Drucker einschalten
|
||||
self.logger.info(f"🔋 Drucker {printer.name} benötigt Strom - {len(current_jobs)} aktive Jobs")
|
||||
success = self.toggle_printer_plug(printer_id, True)
|
||||
|
||||
# Jobs von waiting_for_printer auf running umstellen
|
||||
for job in current_jobs:
|
||||
if job.status == "waiting_for_printer":
|
||||
job.status = "running"
|
||||
self.logger.info(f"🚀 Job {job.id} von 'waiting_for_printer' auf 'running' umgestellt")
|
||||
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
return success
|
||||
|
||||
elif future_jobs:
|
||||
# Nur zukünftige Jobs - Drucker kann ausgeschaltet bleiben
|
||||
next_job_time = min(job.start_at for job in future_jobs)
|
||||
time_until_next = (next_job_time - now).total_seconds() / 60
|
||||
|
||||
self.logger.info(f"⏳ Drucker {printer.name} hat {len(future_jobs)} zukünftige Jobs, nächster in {time_until_next:.1f} Min")
|
||||
|
||||
# Drucker ausschalten wenn nächster Job erst in mehr als 10 Minuten
|
||||
if time_until_next > 10:
|
||||
success = self.toggle_printer_plug(printer_id, False)
|
||||
db_session.close()
|
||||
return success
|
||||
else:
|
||||
self.logger.info(f"🔄 Drucker {printer.name} bleibt eingeschaltet - nächster Job bald")
|
||||
db_session.close()
|
||||
return True
|
||||
|
||||
else:
|
||||
# Keine Jobs - Drucker ausschalten (Leerlauf)
|
||||
self.logger.info(f"💤 Drucker {printer.name} hat keine anstehenden Jobs - ausschalten")
|
||||
success = self.toggle_printer_plug(printer_id, False)
|
||||
db_session.close()
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"❌ Fehler beim Power-Management für Drucker {printer_id}: {str(e)}")
|
||||
try:
|
||||
db_session.close()
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def test_tapo_connection(ip_address: str, username: str = None, password: str = None) -> dict:
|
||||
"""
|
||||
Testet die Verbindung zu einer TP-Link Tapo P110-Steckdose.
|
||||
|
||||
Args:
|
||||
ip_address: IP-Adresse der Steckdose
|
||||
username: Benutzername für die Steckdose (optional)
|
||||
password: Passwort für die Steckdose (optional)
|
||||
|
||||
Returns:
|
||||
dict: Ergebnis mit Status und Informationen
|
||||
"""
|
||||
logger = get_logger("tapo")
|
||||
result = {
|
||||
"success": False,
|
||||
"message": "",
|
||||
"device_info": None,
|
||||
"error": None
|
||||
}
|
||||
|
||||
try:
|
||||
# Importiere PyP100 für Tapo-Unterstützung
|
||||
try:
|
||||
from PyP100 import PyP100
|
||||
except ImportError:
|
||||
result["message"] = "PyP100-Modul nicht verfügbar"
|
||||
result["error"] = "ModuleNotFound"
|
||||
logger.error("PyP100-Modul nicht verfügbar - kann Tapo-Steckdosen nicht testen")
|
||||
return result
|
||||
|
||||
# Verwende globale Anmeldedaten falls nicht angegeben
|
||||
if not username or not password:
|
||||
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
|
||||
username = TAPO_USERNAME
|
||||
password = TAPO_PASSWORD
|
||||
logger.debug(f"Verwende globale Tapo-Anmeldedaten für {ip_address}")
|
||||
|
||||
# TP-Link Tapo P100 Verbindung herstellen
|
||||
p100 = PyP100.P100(ip_address, username, password)
|
||||
p100.handshake() # Authentifizierung
|
||||
p100.login() # Login
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
device_info = p100.getDeviceInfo()
|
||||
|
||||
result["success"] = True
|
||||
result["message"] = "Verbindung erfolgreich"
|
||||
result["device_info"] = device_info
|
||||
|
||||
logger.info(f"Tapo-Verbindung zu {ip_address} erfolgreich: {device_info.get('nickname', 'Unbekannt')}")
|
||||
|
||||
except Exception as e:
|
||||
result["success"] = False
|
||||
result["message"] = f"Verbindungsfehler: {str(e)}"
|
||||
result["error"] = str(e)
|
||||
logger.error(f"Fehler bei Tapo-Test zu {ip_address}: {str(e)}")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# Scheduler-Instanz erzeugen
|
||||
scheduler = BackgroundTaskScheduler()
|
||||
|
||||
# Standardaufgaben registrieren - reduziertes Intervall für bessere Reaktionszeit
|
||||
scheduler.register_task("check_jobs", scheduler._check_jobs, interval=30)
|
||||
|
||||
# Alias für Kompatibilität
|
||||
JobScheduler = BackgroundTaskScheduler
|
||||
|
||||
def get_job_scheduler() -> BackgroundTaskScheduler:
|
||||
"""
|
||||
Gibt den globalen Job-Scheduler zurück.
|
||||
|
||||
Returns:
|
||||
BackgroundTaskScheduler: Der globale Scheduler
|
||||
"""
|
||||
return scheduler
|
||||
467
backend/app - Kopie/utils/logging_config.py
Normal file
467
backend/app - Kopie/utils/logging_config.py
Normal file
@@ -0,0 +1,467 @@
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import platform
|
||||
import socket
|
||||
from typing import Dict, Optional, Any
|
||||
from datetime import datetime
|
||||
from config.settings import (
|
||||
LOG_DIR, LOG_SUBDIRS, LOG_LEVEL, LOG_FORMAT, LOG_DATE_FORMAT,
|
||||
get_log_file, ensure_log_directories
|
||||
)
|
||||
|
||||
# Dictionary zur Speicherung der konfigurierten Logger
|
||||
_loggers: Dict[str, logging.Logger] = {}
|
||||
|
||||
# ANSI-Farbcodes für Log-Level
|
||||
ANSI_COLORS = {
|
||||
'RESET': '\033[0m',
|
||||
'BOLD': '\033[1m',
|
||||
'BLACK': '\033[30m',
|
||||
'RED': '\033[31m',
|
||||
'GREEN': '\033[32m',
|
||||
'YELLOW': '\033[33m',
|
||||
'BLUE': '\033[34m',
|
||||
'MAGENTA': '\033[35m',
|
||||
'CYAN': '\033[36m',
|
||||
'WHITE': '\033[37m',
|
||||
'BG_RED': '\033[41m',
|
||||
'BG_GREEN': '\033[42m',
|
||||
'BG_YELLOW': '\033[43m',
|
||||
'BG_BLUE': '\033[44m'
|
||||
}
|
||||
|
||||
# Emojis für verschiedene Log-Level und Kategorien
|
||||
LOG_EMOJIS = {
|
||||
'DEBUG': '🔍',
|
||||
'INFO': 'ℹ️',
|
||||
'WARNING': '⚠️',
|
||||
'ERROR': '❌',
|
||||
'CRITICAL': '🔥',
|
||||
'app': '🖥️',
|
||||
'scheduler': '⏱️',
|
||||
'auth': '🔐',
|
||||
'jobs': '🖨️',
|
||||
'printers': '🔧',
|
||||
'errors': '💥',
|
||||
'user': '👤',
|
||||
'kiosk': '📺'
|
||||
}
|
||||
|
||||
# ASCII-Fallback für Emojis bei Encoding-Problemen
|
||||
EMOJI_FALLBACK = {
|
||||
'🔍': '[DEBUG]',
|
||||
'ℹ️': '[INFO]',
|
||||
'⚠️': '[WARN]',
|
||||
'❌': '[ERROR]',
|
||||
'🔥': '[CRIT]',
|
||||
'🖥️': '[APP]',
|
||||
'⏱️': '[SCHED]',
|
||||
'🔐': '[AUTH]',
|
||||
'🖨️': '[JOBS]',
|
||||
'🔧': '[PRINT]',
|
||||
'💥': '[ERR]',
|
||||
'👤': '[USER]',
|
||||
'📺': '[KIOSK]',
|
||||
'🐞': '[BUG]',
|
||||
'🚀': '[START]',
|
||||
'📂': '[FOLDER]',
|
||||
'📊': '[CHART]',
|
||||
'💻': '[PC]',
|
||||
'🌐': '[WEB]',
|
||||
'📅': '[TIME]',
|
||||
'📡': '[SIGNAL]',
|
||||
'🧩': '[CONTENT]',
|
||||
'📋': '[HEADER]',
|
||||
'✅': '[OK]',
|
||||
'📦': '[SIZE]'
|
||||
}
|
||||
|
||||
def safe_emoji(emoji: str) -> str:
|
||||
"""Gibt ein Emoji zurück oder einen ASCII-Fallback bei Encoding-Problemen."""
|
||||
try:
|
||||
# Erste Priorität: Teste, ob das Emoji dargestellt werden kann
|
||||
test_encoding = sys.stdout.encoding or 'utf-8'
|
||||
emoji.encode(test_encoding)
|
||||
|
||||
# Zweite Prüfung: Windows-spezifische cp1252-Codierung
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
emoji.encode('cp1252')
|
||||
except UnicodeEncodeError:
|
||||
# Wenn cp1252 fehlschlägt, verwende Fallback
|
||||
return EMOJI_FALLBACK.get(emoji, '[?]')
|
||||
|
||||
return emoji
|
||||
except (UnicodeEncodeError, LookupError, AttributeError):
|
||||
return EMOJI_FALLBACK.get(emoji, '[?]')
|
||||
|
||||
# Prüfen, ob das Terminal ANSI-Farben unterstützt
|
||||
def supports_color() -> bool:
|
||||
"""Prüft, ob das Terminal ANSI-Farben unterstützt."""
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
import ctypes
|
||||
kernel32 = ctypes.windll.kernel32
|
||||
# Aktiviere VT100-Unterstützung unter Windows
|
||||
kernel32.SetConsoleMode(kernel32.GetStdHandle(-11), 7)
|
||||
|
||||
# Setze Console-Output auf UTF-8 für bessere Emoji-Unterstützung
|
||||
try:
|
||||
kernel32.SetConsoleOutputCP(65001) # UTF-8
|
||||
except:
|
||||
pass
|
||||
|
||||
# Versuche UTF-8-Encoding für Emojis zu setzen
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
|
||||
except:
|
||||
try:
|
||||
# Fallback für deutsche Lokalisierung
|
||||
locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8')
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return sys.stdout.isatty()
|
||||
|
||||
USE_COLORS = supports_color()
|
||||
|
||||
class ColoredFormatter(logging.Formatter):
|
||||
"""Formatter, der Farben und Emojis für Logs hinzufügt."""
|
||||
|
||||
level_colors = {
|
||||
'DEBUG': ANSI_COLORS['CYAN'],
|
||||
'INFO': ANSI_COLORS['GREEN'],
|
||||
'WARNING': ANSI_COLORS['YELLOW'],
|
||||
'ERROR': ANSI_COLORS['RED'],
|
||||
'CRITICAL': ANSI_COLORS['BG_RED'] + ANSI_COLORS['WHITE'] + ANSI_COLORS['BOLD']
|
||||
}
|
||||
|
||||
def format(self, record):
|
||||
try:
|
||||
# Basis-Format erstellen
|
||||
log_fmt = LOG_FORMAT
|
||||
date_fmt = LOG_DATE_FORMAT
|
||||
|
||||
# Emoji dem Level und der Kategorie hinzufügen
|
||||
level_name = record.levelname
|
||||
category_name = record.name.split('.')[-1] if '.' in record.name else record.name
|
||||
|
||||
level_emoji = safe_emoji(LOG_EMOJIS.get(level_name, ''))
|
||||
category_emoji = safe_emoji(LOG_EMOJIS.get(category_name, ''))
|
||||
|
||||
# Record-Objekt modifizieren (aber temporär)
|
||||
original_levelname = record.levelname
|
||||
original_name = record.name
|
||||
|
||||
# Emojis hinzufügen
|
||||
record.levelname = f"{level_emoji} {level_name}"
|
||||
record.name = f"{category_emoji} {category_name}"
|
||||
|
||||
# Farbe hinzufügen wenn unterstützt
|
||||
if USE_COLORS:
|
||||
level_color = self.level_colors.get(original_levelname, ANSI_COLORS['RESET'])
|
||||
record.levelname = f"{level_color}{record.levelname}{ANSI_COLORS['RESET']}"
|
||||
record.name = f"{ANSI_COLORS['BOLD']}{record.name}{ANSI_COLORS['RESET']}"
|
||||
|
||||
# Formatieren
|
||||
result = super().format(record)
|
||||
|
||||
# Originale Werte wiederherstellen
|
||||
record.levelname = original_levelname
|
||||
record.name = original_name
|
||||
|
||||
return result
|
||||
except (UnicodeEncodeError, UnicodeDecodeError, AttributeError) as e:
|
||||
# Fallback bei Unicode-Problemen: Verwende nur ASCII-Text
|
||||
original_levelname = record.levelname
|
||||
original_name = record.name
|
||||
|
||||
# Emojis durch ASCII-Fallbacks ersetzen
|
||||
level_fallback = EMOJI_FALLBACK.get(LOG_EMOJIS.get(original_levelname, ''), '[LOG]')
|
||||
category_name = record.name.split('.')[-1] if '.' in record.name else record.name
|
||||
category_fallback = EMOJI_FALLBACK.get(LOG_EMOJIS.get(category_name, ''), '[CAT]')
|
||||
|
||||
record.levelname = f"{level_fallback} {original_levelname}"
|
||||
record.name = f"{category_fallback} {category_name}"
|
||||
|
||||
# Basis-Formatierung ohne Farben
|
||||
result = super().format(record)
|
||||
|
||||
# Originale Werte wiederherstellen
|
||||
record.levelname = original_levelname
|
||||
record.name = original_name
|
||||
|
||||
return result
|
||||
|
||||
class DebugInfoFilter(logging.Filter):
|
||||
"""Filter, der Debug-Informationen zu jedem Log-Eintrag hinzufügt."""
|
||||
|
||||
def __init__(self, add_hostname=True, add_process_info=True):
|
||||
super().__init__()
|
||||
self.add_hostname = add_hostname
|
||||
self.add_process_info = add_process_info
|
||||
self.hostname = socket.gethostname() if add_hostname else None
|
||||
self.pid = os.getpid() if add_process_info else None
|
||||
|
||||
def filter(self, record):
|
||||
# Debug-Informationen hinzufügen
|
||||
if self.add_hostname and not hasattr(record, 'hostname'):
|
||||
record.hostname = self.hostname
|
||||
|
||||
if self.add_process_info and not hasattr(record, 'pid'):
|
||||
record.pid = self.pid
|
||||
|
||||
# Zusätzliche Infos für DEBUG-Level
|
||||
if record.levelno == logging.DEBUG:
|
||||
# Funktionsname und Zeilennummer hervorheben
|
||||
if USE_COLORS:
|
||||
record.funcName = f"{ANSI_COLORS['CYAN']}{record.funcName}{ANSI_COLORS['RESET']}"
|
||||
record.lineno = f"{ANSI_COLORS['CYAN']}{record.lineno}{ANSI_COLORS['RESET']}"
|
||||
|
||||
return True
|
||||
|
||||
def setup_logging(debug_mode: bool = False):
|
||||
"""
|
||||
Initialisiert das Logging-System und erstellt alle erforderlichen Verzeichnisse.
|
||||
|
||||
Args:
|
||||
debug_mode: Wenn True, wird das Log-Level auf DEBUG gesetzt
|
||||
"""
|
||||
ensure_log_directories()
|
||||
|
||||
# Log-Level festlegen
|
||||
log_level = logging.DEBUG if debug_mode else getattr(logging, LOG_LEVEL)
|
||||
|
||||
# Root-Logger konfigurieren
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(log_level)
|
||||
|
||||
# Alle Handler entfernen
|
||||
for handler in root_logger.handlers[:]:
|
||||
root_logger.removeHandler(handler)
|
||||
|
||||
# Formatter erstellen (mit und ohne Farben)
|
||||
colored_formatter = ColoredFormatter(LOG_FORMAT, LOG_DATE_FORMAT)
|
||||
file_formatter = logging.Formatter(LOG_FORMAT, LOG_DATE_FORMAT)
|
||||
|
||||
# Filter für zusätzliche Debug-Informationen
|
||||
debug_filter = DebugInfoFilter()
|
||||
|
||||
# Console Handler für alle Logs
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(log_level)
|
||||
console_handler.setFormatter(colored_formatter)
|
||||
console_handler.addFilter(debug_filter)
|
||||
|
||||
# Windows PowerShell UTF-8 Encoding-Unterstützung
|
||||
if os.name == 'nt' and hasattr(console_handler.stream, 'reconfigure'):
|
||||
try:
|
||||
console_handler.stream.reconfigure(encoding='utf-8')
|
||||
except:
|
||||
pass
|
||||
|
||||
root_logger.addHandler(console_handler)
|
||||
|
||||
# File Handler für allgemeine App-Logs
|
||||
app_log_file = get_log_file("app")
|
||||
app_handler = logging.handlers.RotatingFileHandler(
|
||||
app_log_file, maxBytes=10*1024*1024, backupCount=5, encoding='utf-8'
|
||||
)
|
||||
app_handler.setLevel(log_level)
|
||||
app_handler.setFormatter(file_formatter)
|
||||
root_logger.addHandler(app_handler)
|
||||
|
||||
# Wenn Debug-Modus aktiv, Konfiguration loggen
|
||||
if debug_mode:
|
||||
bug_emoji = safe_emoji("🐞")
|
||||
root_logger.debug(f"{bug_emoji} Debug-Modus aktiviert - Ausführliche Logs werden generiert")
|
||||
|
||||
def get_logger(category: str) -> logging.Logger:
|
||||
"""
|
||||
Gibt einen konfigurierten Logger für eine bestimmte Kategorie zurück.
|
||||
|
||||
Args:
|
||||
category: Log-Kategorie (app, scheduler, auth, jobs, printers, errors)
|
||||
|
||||
Returns:
|
||||
logging.Logger: Konfigurierter Logger
|
||||
"""
|
||||
if category in _loggers:
|
||||
return _loggers[category]
|
||||
|
||||
# Logger erstellen
|
||||
logger = logging.getLogger(f"myp.{category}")
|
||||
logger.setLevel(getattr(logging, LOG_LEVEL))
|
||||
|
||||
# Verhindere doppelte Logs durch Parent-Logger
|
||||
logger.propagate = False
|
||||
|
||||
# Formatter erstellen (mit und ohne Farben)
|
||||
colored_formatter = ColoredFormatter(LOG_FORMAT, LOG_DATE_FORMAT)
|
||||
file_formatter = logging.Formatter(LOG_FORMAT, LOG_DATE_FORMAT)
|
||||
|
||||
# Filter für zusätzliche Debug-Informationen
|
||||
debug_filter = DebugInfoFilter()
|
||||
|
||||
# Console Handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(getattr(logging, LOG_LEVEL))
|
||||
console_handler.setFormatter(colored_formatter)
|
||||
console_handler.addFilter(debug_filter)
|
||||
|
||||
# Windows PowerShell UTF-8 Encoding-Unterstützung
|
||||
if os.name == 'nt' and hasattr(console_handler.stream, 'reconfigure'):
|
||||
try:
|
||||
console_handler.stream.reconfigure(encoding='utf-8')
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# File Handler für spezifische Kategorie
|
||||
log_file = get_log_file(category)
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
log_file, maxBytes=10*1024*1024, backupCount=5, encoding='utf-8'
|
||||
)
|
||||
file_handler.setLevel(getattr(logging, LOG_LEVEL))
|
||||
file_handler.setFormatter(file_formatter)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# Error-Logs zusätzlich in errors.log schreiben
|
||||
if category != "errors":
|
||||
error_log_file = get_log_file("errors")
|
||||
error_handler = logging.handlers.RotatingFileHandler(
|
||||
error_log_file, maxBytes=10*1024*1024, backupCount=5, encoding='utf-8'
|
||||
)
|
||||
error_handler.setLevel(logging.ERROR)
|
||||
error_handler.setFormatter(file_formatter)
|
||||
logger.addHandler(error_handler)
|
||||
|
||||
_loggers[category] = logger
|
||||
return logger
|
||||
|
||||
def log_startup_info():
|
||||
"""Loggt Startup-Informationen."""
|
||||
app_logger = get_logger("app")
|
||||
rocket_emoji = safe_emoji("🚀")
|
||||
folder_emoji = safe_emoji("📂")
|
||||
chart_emoji = safe_emoji("📊")
|
||||
computer_emoji = safe_emoji("💻")
|
||||
globe_emoji = safe_emoji("🌐")
|
||||
calendar_emoji = safe_emoji("📅")
|
||||
|
||||
app_logger.info("=" * 50)
|
||||
app_logger.info(f"{rocket_emoji} MYP (Manage Your Printers) wird gestartet...")
|
||||
app_logger.info(f"{folder_emoji} Log-Verzeichnis: {LOG_DIR}")
|
||||
app_logger.info(f"{chart_emoji} Log-Level: {LOG_LEVEL}")
|
||||
app_logger.info(f"{computer_emoji} Betriebssystem: {platform.system()} {platform.release()}")
|
||||
app_logger.info(f"{globe_emoji} Hostname: {socket.gethostname()}")
|
||||
app_logger.info(f"{calendar_emoji} Startzeit: {datetime.now().strftime('%d.%m.%Y %H:%M:%S')}")
|
||||
app_logger.info("=" * 50)
|
||||
|
||||
# Hilfsfunktionen für das Debugging
|
||||
|
||||
def debug_request(logger: logging.Logger, request):
|
||||
"""
|
||||
Loggt detaillierte Informationen über eine HTTP-Anfrage.
|
||||
|
||||
Args:
|
||||
logger: Logger-Instanz
|
||||
request: Flask-Request-Objekt
|
||||
"""
|
||||
if logger.level > logging.DEBUG:
|
||||
return
|
||||
|
||||
web_emoji = safe_emoji("🌐")
|
||||
signal_emoji = safe_emoji("📡")
|
||||
puzzle_emoji = safe_emoji("🧩")
|
||||
clipboard_emoji = safe_emoji("📋")
|
||||
search_emoji = safe_emoji("🔍")
|
||||
|
||||
logger.debug(f"{web_emoji} HTTP-Anfrage: {request.method} {request.path}")
|
||||
logger.debug(f"{signal_emoji} Remote-Adresse: {request.remote_addr}")
|
||||
logger.debug(f"{puzzle_emoji} Inhaltstyp: {request.content_type}")
|
||||
|
||||
# Nur relevante Headers ausgeben
|
||||
important_headers = ['User-Agent', 'Referer', 'X-Forwarded-For', 'Authorization']
|
||||
headers = {k: v for k, v in request.headers.items() if k in important_headers}
|
||||
if headers:
|
||||
logger.debug(f"{clipboard_emoji} Wichtige Headers: {headers}")
|
||||
|
||||
# Request-Parameter (max. 1000 Zeichen)
|
||||
if request.args:
|
||||
args_str = str(request.args)
|
||||
if len(args_str) > 1000:
|
||||
args_str = args_str[:997] + "..."
|
||||
logger.debug(f"{search_emoji} URL-Parameter: {args_str}")
|
||||
|
||||
def debug_response(logger: logging.Logger, response, duration_ms: float = None):
|
||||
"""
|
||||
Loggt detaillierte Informationen über eine HTTP-Antwort.
|
||||
|
||||
Args:
|
||||
logger: Logger-Instanz
|
||||
response: Flask-Response-Objekt
|
||||
duration_ms: Verarbeitungsdauer in Millisekunden (optional)
|
||||
"""
|
||||
if logger.level > logging.DEBUG:
|
||||
return
|
||||
|
||||
status_emoji = safe_emoji("✅") if response.status_code < 400 else safe_emoji("❌")
|
||||
logger.debug(f"{status_emoji} HTTP-Antwort: {response.status_code}")
|
||||
|
||||
if duration_ms is not None:
|
||||
timer_emoji = safe_emoji("⏱️")
|
||||
logger.debug(f"{timer_emoji} Verarbeitungsdauer: {duration_ms:.2f} ms")
|
||||
|
||||
content_length = response.content_length or 0
|
||||
if content_length > 0:
|
||||
size_str = f"{content_length / 1024:.1f} KB" if content_length > 1024 else f"{content_length} Bytes"
|
||||
package_emoji = safe_emoji("📦")
|
||||
logger.debug(f"{package_emoji} Antwortgröße: {size_str}")
|
||||
|
||||
def measure_execution_time(func=None, logger=None, task_name=None):
|
||||
"""
|
||||
Dekorator, der die Ausführungszeit einer Funktion misst und loggt.
|
||||
|
||||
Args:
|
||||
func: Die zu dekorierende Funktion
|
||||
logger: Logger-Instanz (optional)
|
||||
task_name: Name der Aufgabe für das Logging (optional)
|
||||
|
||||
Returns:
|
||||
Dekorierte Funktion
|
||||
"""
|
||||
from functools import wraps
|
||||
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
result = f(*args, **kwargs)
|
||||
end_time = time.time()
|
||||
|
||||
duration_ms = (end_time - start_time) * 1000
|
||||
name = task_name or f.__name__
|
||||
|
||||
if logger:
|
||||
timer_emoji = safe_emoji('⏱️')
|
||||
if duration_ms > 1000: # Länger als 1 Sekunde
|
||||
logger.warning(f"{timer_emoji} Langsame Ausführung: {name} - {duration_ms:.2f} ms")
|
||||
else:
|
||||
logger.debug(f"{timer_emoji} Ausführungszeit: {name} - {duration_ms:.2f} ms")
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
if func:
|
||||
return decorator(func)
|
||||
return decorator
|
||||
158
backend/app - Kopie/utils/migrate_db.py
Normal file
158
backend/app - Kopie/utils/migrate_db.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Datenbank-Migrationsskript für Guest-Requests, UserPermissions und Notifications
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
# Pfad zur App hinzufügen
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from models import init_db, get_cached_session, GuestRequest, UserPermission, Notification, User
|
||||
from utils.logging_config import get_logger
|
||||
from config.settings import DATABASE_PATH
|
||||
|
||||
logger = get_logger("migrate")
|
||||
|
||||
def column_exists(cursor, table_name, column_name):
|
||||
"""Prüft, ob eine Spalte in einer Tabelle existiert."""
|
||||
cursor.execute(f"PRAGMA table_info({table_name})")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
return column_name in columns
|
||||
|
||||
def get_database_path():
|
||||
"""Ermittelt den Pfad zur Datenbankdatei."""
|
||||
# Verwende den korrekten Datenbankpfad aus der Konfiguration
|
||||
if os.path.exists(DATABASE_PATH):
|
||||
return DATABASE_PATH
|
||||
|
||||
# Fallback für alternative Pfade mit korrektem Dateinamen
|
||||
alternative_paths = [
|
||||
os.path.join('database', 'myp.db'),
|
||||
'myp.db',
|
||||
'../database/myp.db',
|
||||
'./database/myp.db',
|
||||
# Legacy-Pfade für Rückwärtskompatibilität
|
||||
os.path.join('database', 'app.db'),
|
||||
'app.db',
|
||||
'../database/app.db',
|
||||
'./database/app.db'
|
||||
]
|
||||
|
||||
for path in alternative_paths:
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
# Falls keine Datei gefunden wird, verwende den konfigurierten Pfad
|
||||
return DATABASE_PATH
|
||||
|
||||
def migrate_guest_requests_table():
|
||||
"""Migriert die guest_requests Tabelle für neue Spalten."""
|
||||
db_path = get_database_path()
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
logger.warning(f"Datenbankdatei nicht gefunden: {db_path}")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Prüfen ob otp_used_at Spalte bereits existiert
|
||||
if not column_exists(cursor, 'guest_requests', 'otp_used_at'):
|
||||
cursor.execute("""
|
||||
ALTER TABLE guest_requests
|
||||
ADD COLUMN otp_used_at DATETIME
|
||||
""")
|
||||
logger.info("Spalte 'otp_used_at' zur guest_requests Tabelle hinzugefügt")
|
||||
else:
|
||||
logger.info("Spalte 'otp_used_at' existiert bereits")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Migration der guest_requests Tabelle: {str(e)}")
|
||||
if 'conn' in locals():
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Führt die Datenbank-Migration aus."""
|
||||
try:
|
||||
logger.info("Starte Datenbank-Migration...")
|
||||
|
||||
# Datenbank initialisieren (erstellt neue Tabellen)
|
||||
init_db()
|
||||
|
||||
# Spezifische Spalten-Migrationen
|
||||
logger.info("Führe spezifische Tabellen-Migrationen aus...")
|
||||
migrate_guest_requests_table()
|
||||
|
||||
logger.info("Datenbank-Migration erfolgreich abgeschlossen")
|
||||
|
||||
# Testen, ob die neuen Tabellen funktionieren
|
||||
test_new_tables()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Datenbank-Migration: {str(e)}")
|
||||
sys.exit(1)
|
||||
|
||||
def test_new_tables():
|
||||
"""Testet, ob die neuen Tabellen korrekt erstellt wurden."""
|
||||
try:
|
||||
with get_cached_session() as session:
|
||||
# Test der GuestRequest-Tabelle
|
||||
test_request = GuestRequest(
|
||||
name="Test User",
|
||||
email="test@example.com",
|
||||
reason="Test migration",
|
||||
duration_min=60
|
||||
)
|
||||
session.add(test_request)
|
||||
session.flush()
|
||||
|
||||
# Test der UserPermission-Tabelle (mit Admin-User falls vorhanden)
|
||||
admin_user = session.query(User).filter_by(role="admin").first()
|
||||
if admin_user:
|
||||
# Prüfen, ob bereits Permissions für diesen User existieren
|
||||
existing_permission = session.query(UserPermission).filter_by(user_id=admin_user.id).first()
|
||||
|
||||
if not existing_permission:
|
||||
permission = UserPermission(
|
||||
user_id=admin_user.id,
|
||||
can_start_jobs=True,
|
||||
needs_approval=False,
|
||||
can_approve_jobs=True
|
||||
)
|
||||
session.add(permission)
|
||||
session.flush()
|
||||
logger.info(f"UserPermission für Admin-User {admin_user.id} erstellt")
|
||||
else:
|
||||
logger.info(f"UserPermission für Admin-User {admin_user.id} existiert bereits")
|
||||
|
||||
# Test der Notification-Tabelle
|
||||
notification = Notification(
|
||||
user_id=admin_user.id,
|
||||
type="test",
|
||||
payload='{"message": "Test notification"}'
|
||||
)
|
||||
session.add(notification)
|
||||
session.flush()
|
||||
|
||||
# Test-Daten wieder löschen
|
||||
session.rollback()
|
||||
|
||||
logger.info("Alle neuen Tabellen wurden erfolgreich getestet")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Testen der neuen Tabellen: {str(e)}")
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
633
backend/app - Kopie/utils/permissions.py
Normal file
633
backend/app - Kopie/utils/permissions.py
Normal file
@@ -0,0 +1,633 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Erweiterte Berechtigungsverwaltung für MYP Platform
|
||||
Granulare Rollen und Permissions für feingranulare Zugriffskontrolle
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from functools import wraps
|
||||
from typing import List, Dict, Set, Optional
|
||||
from flask import request, jsonify, abort
|
||||
from flask_login import login_required, current_user
|
||||
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, Table, DateTime, MetaData
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime, timedelta
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger("permissions")
|
||||
|
||||
# ===== PERMISSION DEFINITIONS =====
|
||||
|
||||
class Permission(Enum):
|
||||
"""Alle verfügbaren Berechtigungen im System"""
|
||||
|
||||
# Basis-Berechtigungen
|
||||
LOGIN = "login"
|
||||
VIEW_DASHBOARD = "view_dashboard"
|
||||
|
||||
# Drucker-Berechtigungen
|
||||
VIEW_PRINTERS = "view_printers"
|
||||
CREATE_PRINTER = "create_printer"
|
||||
EDIT_PRINTER = "edit_printer"
|
||||
DELETE_PRINTER = "delete_printer"
|
||||
CONTROL_PRINTER = "control_printer" # Ein-/Ausschalten
|
||||
VIEW_PRINTER_DETAILS = "view_printer_details"
|
||||
|
||||
# Job-Berechtigungen
|
||||
VIEW_JOBS = "view_jobs"
|
||||
CREATE_JOB = "create_job"
|
||||
EDIT_OWN_JOB = "edit_own_job"
|
||||
EDIT_ALL_JOBS = "edit_all_jobs"
|
||||
DELETE_OWN_JOB = "delete_own_job"
|
||||
DELETE_ALL_JOBS = "delete_all_jobs"
|
||||
EXTEND_JOB = "extend_job"
|
||||
CANCEL_JOB = "cancel_job"
|
||||
VIEW_JOB_HISTORY = "view_job_history"
|
||||
|
||||
# Benutzer-Berechtigungen
|
||||
VIEW_USERS = "view_users"
|
||||
CREATE_USER = "create_user"
|
||||
EDIT_USER = "edit_user"
|
||||
DELETE_USER = "delete_user"
|
||||
MANAGE_ROLES = "manage_roles"
|
||||
VIEW_USER_DETAILS = "view_user_details"
|
||||
|
||||
# Admin-Berechtigungen
|
||||
VIEW_ADMIN_PANEL = "view_admin_panel"
|
||||
MANAGE_SYSTEM = "manage_system"
|
||||
VIEW_LOGS = "view_logs"
|
||||
EXPORT_DATA = "export_data"
|
||||
BACKUP_DATABASE = "backup_database"
|
||||
MANAGE_SETTINGS = "manage_settings"
|
||||
|
||||
# Gast-Berechtigungen
|
||||
VIEW_GUEST_REQUESTS = "view_guest_requests"
|
||||
CREATE_GUEST_REQUEST = "create_guest_request"
|
||||
APPROVE_GUEST_REQUEST = "approve_guest_request"
|
||||
DENY_GUEST_REQUEST = "deny_guest_request"
|
||||
MANAGE_GUEST_REQUESTS = "manage_guest_requests"
|
||||
|
||||
# Statistik-Berechtigungen
|
||||
VIEW_STATS = "view_stats"
|
||||
VIEW_DETAILED_STATS = "view_detailed_stats"
|
||||
EXPORT_STATS = "export_stats"
|
||||
|
||||
# Kalender-Berechtigungen
|
||||
VIEW_CALENDAR = "view_calendar"
|
||||
EDIT_CALENDAR = "edit_calendar"
|
||||
MANAGE_SHIFTS = "manage_shifts"
|
||||
|
||||
# Wartung-Berechtigungen
|
||||
SCHEDULE_MAINTENANCE = "schedule_maintenance"
|
||||
VIEW_MAINTENANCE = "view_maintenance"
|
||||
PERFORM_MAINTENANCE = "perform_maintenance"
|
||||
|
||||
class Role(Enum):
|
||||
"""Vordefinierte Rollen mit Standard-Berechtigungen"""
|
||||
|
||||
GUEST = "guest"
|
||||
USER = "user"
|
||||
POWER_USER = "power_user"
|
||||
TECHNICIAN = "technician"
|
||||
SUPERVISOR = "supervisor"
|
||||
ADMIN = "admin"
|
||||
SUPER_ADMIN = "super_admin"
|
||||
|
||||
# ===== ROLE PERMISSIONS MAPPING =====
|
||||
|
||||
ROLE_PERMISSIONS = {
|
||||
Role.GUEST: {
|
||||
Permission.LOGIN,
|
||||
Permission.VIEW_PRINTERS,
|
||||
Permission.CREATE_GUEST_REQUEST,
|
||||
Permission.VIEW_CALENDAR,
|
||||
},
|
||||
|
||||
Role.USER: {
|
||||
Permission.LOGIN,
|
||||
Permission.VIEW_DASHBOARD,
|
||||
Permission.VIEW_PRINTERS,
|
||||
Permission.VIEW_JOBS,
|
||||
Permission.CREATE_JOB,
|
||||
Permission.EDIT_OWN_JOB,
|
||||
Permission.DELETE_OWN_JOB,
|
||||
Permission.EXTEND_JOB,
|
||||
Permission.CANCEL_JOB,
|
||||
Permission.VIEW_STATS,
|
||||
Permission.VIEW_CALENDAR,
|
||||
Permission.CREATE_GUEST_REQUEST,
|
||||
},
|
||||
}
|
||||
|
||||
# Power User erweitert User-Permissions
|
||||
ROLE_PERMISSIONS[Role.POWER_USER] = ROLE_PERMISSIONS[Role.USER] | {
|
||||
Permission.VIEW_PRINTER_DETAILS,
|
||||
Permission.VIEW_JOB_HISTORY,
|
||||
Permission.VIEW_DETAILED_STATS,
|
||||
Permission.EXPORT_STATS,
|
||||
Permission.VIEW_GUEST_REQUESTS,
|
||||
}
|
||||
|
||||
# Technician erweitert Power User-Permissions
|
||||
ROLE_PERMISSIONS[Role.TECHNICIAN] = ROLE_PERMISSIONS[Role.POWER_USER] | {
|
||||
Permission.CONTROL_PRINTER,
|
||||
Permission.EDIT_PRINTER,
|
||||
Permission.SCHEDULE_MAINTENANCE,
|
||||
Permission.VIEW_MAINTENANCE,
|
||||
Permission.PERFORM_MAINTENANCE,
|
||||
Permission.EDIT_CALENDAR,
|
||||
}
|
||||
|
||||
# Supervisor erweitert Technician-Permissions
|
||||
ROLE_PERMISSIONS[Role.SUPERVISOR] = ROLE_PERMISSIONS[Role.TECHNICIAN] | {
|
||||
Permission.CREATE_PRINTER,
|
||||
Permission.EDIT_ALL_JOBS,
|
||||
Permission.DELETE_ALL_JOBS,
|
||||
Permission.VIEW_USERS,
|
||||
Permission.APPROVE_GUEST_REQUEST,
|
||||
Permission.DENY_GUEST_REQUEST,
|
||||
Permission.MANAGE_GUEST_REQUESTS,
|
||||
Permission.MANAGE_SHIFTS,
|
||||
Permission.VIEW_USER_DETAILS,
|
||||
}
|
||||
|
||||
# Admin erweitert Supervisor-Permissions
|
||||
ROLE_PERMISSIONS[Role.ADMIN] = ROLE_PERMISSIONS[Role.SUPERVISOR] | {
|
||||
Permission.DELETE_PRINTER,
|
||||
Permission.VIEW_ADMIN_PANEL,
|
||||
Permission.CREATE_USER,
|
||||
Permission.EDIT_USER,
|
||||
Permission.DELETE_USER,
|
||||
Permission.EXPORT_DATA,
|
||||
Permission.VIEW_LOGS,
|
||||
Permission.MANAGE_SETTINGS,
|
||||
}
|
||||
|
||||
# Super Admin hat alle Berechtigungen
|
||||
ROLE_PERMISSIONS[Role.SUPER_ADMIN] = {perm for perm in Permission}
|
||||
|
||||
# ===== DATABASE MODELS EXTENSIONS =====
|
||||
|
||||
# Metadata für die Tabellen erstellen
|
||||
metadata = MetaData()
|
||||
|
||||
# Many-to-Many Tabelle für User-Permissions
|
||||
user_permissions = Table('user_permissions', metadata,
|
||||
Column('user_id', Integer, ForeignKey('users.id'), primary_key=True),
|
||||
Column('permission_id', Integer, ForeignKey('permissions.id'), primary_key=True)
|
||||
)
|
||||
|
||||
# Many-to-Many Tabelle für User-Roles
|
||||
user_roles = Table('user_roles', metadata,
|
||||
Column('user_id', Integer, ForeignKey('users.id'), primary_key=True),
|
||||
Column('role_id', Integer, ForeignKey('roles.id'), primary_key=True)
|
||||
)
|
||||
|
||||
class PermissionModel:
|
||||
"""Datenbank-Model für Berechtigungen"""
|
||||
|
||||
__tablename__ = 'permissions'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100), unique=True, nullable=False)
|
||||
description = Column(String(255))
|
||||
category = Column(String(50)) # Gruppierung von Berechtigungen
|
||||
created_at = Column(DateTime, default=datetime.now)
|
||||
|
||||
class RoleModel:
|
||||
"""Datenbank-Model für Rollen"""
|
||||
|
||||
__tablename__ = 'roles'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(50), unique=True, nullable=False)
|
||||
display_name = Column(String(100))
|
||||
description = Column(String(255))
|
||||
is_system_role = Column(Boolean, default=False) # System-Rollen können nicht gelöscht werden
|
||||
created_at = Column(DateTime, default=datetime.now)
|
||||
|
||||
# Relationships
|
||||
permissions = relationship("PermissionModel", secondary="role_permissions", back_populates="roles")
|
||||
|
||||
class UserPermissionOverride:
|
||||
"""Temporäre oder spezielle Berechtigungsüberschreibungen"""
|
||||
|
||||
__tablename__ = 'user_permission_overrides'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||
permission = Column(String(100), nullable=False)
|
||||
granted = Column(Boolean, nullable=False) # True = gewährt, False = verweigert
|
||||
reason = Column(String(255))
|
||||
granted_by = Column(Integer, ForeignKey('users.id'))
|
||||
expires_at = Column(DateTime, nullable=True) # NULL = permanent
|
||||
created_at = Column(DateTime, default=datetime.now)
|
||||
|
||||
# ===== PERMISSION CHECKER CLASS =====
|
||||
|
||||
class PermissionChecker:
|
||||
"""Hauptklasse für Berechtigungsprüfungen"""
|
||||
|
||||
def __init__(self, user=None):
|
||||
self.user = user or current_user
|
||||
self._permission_cache = {}
|
||||
self._cache_timeout = timedelta(minutes=5)
|
||||
self._cache_timestamp = None
|
||||
|
||||
def has_permission(self, permission: Permission) -> bool:
|
||||
"""
|
||||
Prüft ob der Benutzer eine bestimmte Berechtigung hat
|
||||
|
||||
Args:
|
||||
permission: Die zu prüfende Berechtigung
|
||||
|
||||
Returns:
|
||||
bool: True wenn Berechtigung vorhanden
|
||||
"""
|
||||
if not self.user or not self.user.is_authenticated:
|
||||
return False
|
||||
|
||||
# Cache prüfen
|
||||
if self._is_cache_valid() and permission.value in self._permission_cache:
|
||||
return self._permission_cache[permission.value]
|
||||
|
||||
# Berechtigungen neu berechnen
|
||||
has_perm = self._calculate_permission(permission)
|
||||
|
||||
# Cache aktualisieren
|
||||
self._update_cache(permission.value, has_perm)
|
||||
|
||||
return has_perm
|
||||
|
||||
def _calculate_permission(self, permission: Permission) -> bool:
|
||||
"""Berechnet ob eine Berechtigung vorhanden ist"""
|
||||
|
||||
# Super Admin hat alle Rechte
|
||||
if hasattr(self.user, 'is_super_admin') and self.user.is_super_admin:
|
||||
return True
|
||||
|
||||
# Explizite Überschreibungen prüfen
|
||||
override = self._check_permission_override(permission)
|
||||
if override is not None:
|
||||
return override
|
||||
|
||||
# Rollen-basierte Berechtigungen prüfen
|
||||
user_roles = self._get_user_roles()
|
||||
for role in user_roles:
|
||||
if permission in ROLE_PERMISSIONS.get(role, set()):
|
||||
return True
|
||||
|
||||
# Direkte Benutzer-Berechtigungen prüfen
|
||||
if hasattr(self.user, 'permissions'):
|
||||
user_permissions = [Permission(p.name) for p in self.user.permissions if hasattr(Permission, p.name.upper())]
|
||||
if permission in user_permissions:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _check_permission_override(self, permission: Permission) -> Optional[bool]:
|
||||
"""Prüft ob es eine Berechtigungsüberschreibung gibt"""
|
||||
if not hasattr(self.user, 'permission_overrides'):
|
||||
return None
|
||||
|
||||
now = datetime.now()
|
||||
for override in self.user.permission_overrides:
|
||||
if (override.permission == permission.value and
|
||||
(override.expires_at is None or override.expires_at > now)):
|
||||
logger.info(f"Permission override angewendet: {permission.value} = {override.granted} für User {self.user.id}")
|
||||
return override.granted
|
||||
|
||||
return None
|
||||
|
||||
def _get_user_roles(self) -> List[Role]:
|
||||
"""Holt die Rollen des Benutzers"""
|
||||
roles = []
|
||||
|
||||
# Legacy Admin-Check
|
||||
if hasattr(self.user, 'is_admin') and self.user.is_admin:
|
||||
roles.append(Role.ADMIN)
|
||||
|
||||
# Neue Rollen-System
|
||||
if hasattr(self.user, 'roles'):
|
||||
for role_model in self.user.roles:
|
||||
try:
|
||||
role = Role(role_model.name)
|
||||
roles.append(role)
|
||||
except ValueError:
|
||||
logger.warning(f"Unbekannte Rolle: {role_model.name}")
|
||||
|
||||
# Standard-Rolle wenn keine andere definiert
|
||||
if not roles:
|
||||
roles.append(Role.USER)
|
||||
|
||||
return roles
|
||||
|
||||
def _is_cache_valid(self) -> bool:
|
||||
"""Prüft ob der Permission-Cache noch gültig ist"""
|
||||
if self._cache_timestamp is None:
|
||||
return False
|
||||
|
||||
return datetime.now() - self._cache_timestamp < self._cache_timeout
|
||||
|
||||
def _update_cache(self, permission: str, has_permission: bool):
|
||||
"""Aktualisiert den Permission-Cache"""
|
||||
if self._cache_timestamp is None or not self._is_cache_valid():
|
||||
self._permission_cache = {}
|
||||
self._cache_timestamp = datetime.now()
|
||||
|
||||
self._permission_cache[permission] = has_permission
|
||||
|
||||
def get_all_permissions(self) -> Set[Permission]:
|
||||
"""Gibt alle Berechtigungen des Benutzers zurück"""
|
||||
permissions = set()
|
||||
|
||||
for permission in Permission:
|
||||
if self.has_permission(permission):
|
||||
permissions.add(permission)
|
||||
|
||||
return permissions
|
||||
|
||||
def can_access_resource(self, resource_type: str, resource_id: int = None, action: str = "view") -> bool:
|
||||
"""
|
||||
Prüft Zugriff auf spezifische Ressourcen
|
||||
|
||||
Args:
|
||||
resource_type: Art der Ressource (job, printer, user, etc.)
|
||||
resource_id: ID der Ressource (optional)
|
||||
action: Aktion (view, edit, delete, etc.)
|
||||
|
||||
Returns:
|
||||
bool: True wenn Zugriff erlaubt
|
||||
"""
|
||||
# Resource-spezifische Logik
|
||||
if resource_type == "job":
|
||||
return self._check_job_access(resource_id, action)
|
||||
elif resource_type == "printer":
|
||||
return self._check_printer_access(resource_id, action)
|
||||
elif resource_type == "user":
|
||||
return self._check_user_access(resource_id, action)
|
||||
|
||||
return False
|
||||
|
||||
def _check_job_access(self, job_id: int, action: str) -> bool:
|
||||
"""Prüft Job-spezifische Zugriffsrechte"""
|
||||
if action == "view":
|
||||
if self.has_permission(Permission.VIEW_JOBS):
|
||||
return True
|
||||
elif action == "edit":
|
||||
if self.has_permission(Permission.EDIT_ALL_JOBS):
|
||||
return True
|
||||
if self.has_permission(Permission.EDIT_OWN_JOB) and job_id:
|
||||
# Prüfen ob eigener Job (vereinfacht)
|
||||
return self._is_own_job(job_id)
|
||||
elif action == "delete":
|
||||
if self.has_permission(Permission.DELETE_ALL_JOBS):
|
||||
return True
|
||||
if self.has_permission(Permission.DELETE_OWN_JOB) and job_id:
|
||||
return self._is_own_job(job_id)
|
||||
|
||||
return False
|
||||
|
||||
def _check_printer_access(self, printer_id: int, action: str) -> bool:
|
||||
"""Prüft Drucker-spezifische Zugriffsrechte"""
|
||||
if action == "view":
|
||||
return self.has_permission(Permission.VIEW_PRINTERS)
|
||||
elif action == "edit":
|
||||
return self.has_permission(Permission.EDIT_PRINTER)
|
||||
elif action == "delete":
|
||||
return self.has_permission(Permission.DELETE_PRINTER)
|
||||
elif action == "control":
|
||||
return self.has_permission(Permission.CONTROL_PRINTER)
|
||||
|
||||
return False
|
||||
|
||||
def _check_user_access(self, user_id: int, action: str) -> bool:
|
||||
"""Prüft Benutzer-spezifische Zugriffsrechte"""
|
||||
if action == "view":
|
||||
if self.has_permission(Permission.VIEW_USERS):
|
||||
return True
|
||||
# Eigenes Profil ansehen
|
||||
if user_id == self.user.id:
|
||||
return True
|
||||
elif action == "edit":
|
||||
if self.has_permission(Permission.EDIT_USER):
|
||||
return True
|
||||
# Eigenes Profil bearbeiten (begrenzt)
|
||||
if user_id == self.user.id:
|
||||
return True
|
||||
elif action == "delete":
|
||||
if self.has_permission(Permission.DELETE_USER) and user_id != self.user.id:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _is_own_job(self, job_id: int) -> bool:
|
||||
"""Hilfsfunktion um zu prüfen ob Job dem Benutzer gehört"""
|
||||
# Vereinfachte Implementierung - sollte mit DB-Query implementiert werden
|
||||
try:
|
||||
from models import Job, get_db_session
|
||||
db_session = get_db_session()
|
||||
job = db_session.query(Job).filter(Job.id == job_id).first()
|
||||
db_session.close()
|
||||
|
||||
return job and (job.user_id == self.user.id or job.owner_id == self.user.id)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei Job-Ownership-Check: {e}")
|
||||
return False
|
||||
|
||||
# ===== DECORATORS =====
|
||||
|
||||
def require_permission(permission: Permission):
|
||||
"""
|
||||
Decorator der eine bestimmte Berechtigung erfordert
|
||||
|
||||
Args:
|
||||
permission: Die erforderliche Berechtigung
|
||||
"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
@login_required
|
||||
def wrapper(*args, **kwargs):
|
||||
checker = PermissionChecker()
|
||||
|
||||
if not checker.has_permission(permission):
|
||||
logger.warning(f"Zugriff verweigert: User {current_user.id} hat keine Berechtigung {permission.value}")
|
||||
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({
|
||||
'error': 'Insufficient permissions',
|
||||
'message': f'Berechtigung "{permission.value}" erforderlich',
|
||||
'required_permission': permission.value
|
||||
}), 403
|
||||
else:
|
||||
abort(403)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def require_role(role: Role):
|
||||
"""
|
||||
Decorator der eine bestimmte Rolle erfordert
|
||||
|
||||
Args:
|
||||
role: Die erforderliche Rolle
|
||||
"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
@login_required
|
||||
def wrapper(*args, **kwargs):
|
||||
checker = PermissionChecker()
|
||||
user_roles = checker._get_user_roles()
|
||||
|
||||
if role not in user_roles:
|
||||
logger.warning(f"Zugriff verweigert: User {current_user.id} hat nicht die Rolle {role.value}")
|
||||
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({
|
||||
'error': 'Insufficient role',
|
||||
'message': f'Rolle "{role.value}" erforderlich',
|
||||
'required_role': role.value
|
||||
}), 403
|
||||
else:
|
||||
abort(403)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def require_resource_access(resource_type: str, action: str = "view"):
|
||||
"""
|
||||
Decorator für ressourcen-spezifische Berechtigungsprüfung
|
||||
|
||||
Args:
|
||||
resource_type: Art der Ressource
|
||||
action: Erforderliche Aktion
|
||||
"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
@login_required
|
||||
def wrapper(*args, **kwargs):
|
||||
# Resource ID aus URL-Parametern extrahieren
|
||||
resource_id = kwargs.get('id') or kwargs.get(f'{resource_type}_id')
|
||||
|
||||
checker = PermissionChecker()
|
||||
|
||||
if not checker.can_access_resource(resource_type, resource_id, action):
|
||||
logger.warning(f"Ressourcen-Zugriff verweigert: User {current_user.id}, {resource_type}:{resource_id}, Action: {action}")
|
||||
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({
|
||||
'error': 'Resource access denied',
|
||||
'message': f'Zugriff auf {resource_type} nicht erlaubt',
|
||||
'resource_type': resource_type,
|
||||
'action': action
|
||||
}), 403
|
||||
else:
|
||||
abort(403)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
# ===== UTILITY FUNCTIONS =====
|
||||
|
||||
def check_permission(permission: Permission, user=None) -> bool:
|
||||
"""
|
||||
Standalone-Funktion zur Berechtigungsprüfung
|
||||
|
||||
Args:
|
||||
permission: Die zu prüfende Berechtigung
|
||||
user: Benutzer (optional, default: current_user)
|
||||
|
||||
Returns:
|
||||
bool: True wenn Berechtigung vorhanden
|
||||
"""
|
||||
checker = PermissionChecker(user)
|
||||
return checker.has_permission(permission)
|
||||
|
||||
def get_user_permissions(user=None) -> Set[Permission]:
|
||||
"""
|
||||
Gibt alle Berechtigungen eines Benutzers zurück
|
||||
|
||||
Args:
|
||||
user: Benutzer (optional, default: current_user)
|
||||
|
||||
Returns:
|
||||
Set[Permission]: Alle Berechtigungen des Benutzers
|
||||
"""
|
||||
checker = PermissionChecker(user)
|
||||
return checker.get_all_permissions()
|
||||
|
||||
def grant_temporary_permission(user_id: int, permission: Permission, duration_hours: int = 24, reason: str = "", granted_by_id: int = None):
|
||||
"""
|
||||
Gewährt temporäre Berechtigung
|
||||
|
||||
Args:
|
||||
user_id: ID des Benutzers
|
||||
permission: Die zu gewährende Berechtigung
|
||||
duration_hours: Dauer in Stunden
|
||||
reason: Begründung
|
||||
granted_by_id: ID des gewährenden Benutzers
|
||||
"""
|
||||
try:
|
||||
from models import get_db_session
|
||||
db_session = get_db_session()
|
||||
|
||||
override = UserPermissionOverride(
|
||||
user_id=user_id,
|
||||
permission=permission.value,
|
||||
granted=True,
|
||||
reason=reason,
|
||||
granted_by=granted_by_id or (current_user.id if current_user.is_authenticated else None),
|
||||
expires_at=datetime.now() + timedelta(hours=duration_hours)
|
||||
)
|
||||
|
||||
db_session.add(override)
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
logger.info(f"Temporäre Berechtigung gewährt: {permission.value} für User {user_id} ({duration_hours}h)")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Gewähren temporärer Berechtigung: {e}")
|
||||
|
||||
# ===== TEMPLATE HELPERS =====
|
||||
|
||||
def init_permission_helpers(app):
|
||||
"""
|
||||
Registriert Template-Helper für Berechtigungen
|
||||
|
||||
Args:
|
||||
app: Flask-App-Instanz
|
||||
"""
|
||||
|
||||
@app.template_global()
|
||||
def has_permission(permission_name: str) -> bool:
|
||||
"""Template Helper für Berechtigungsprüfung"""
|
||||
try:
|
||||
permission = Permission(permission_name)
|
||||
return check_permission(permission)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@app.template_global()
|
||||
def has_role(role_name: str) -> bool:
|
||||
"""Template Helper für Rollenprüfung"""
|
||||
try:
|
||||
role = Role(role_name)
|
||||
checker = PermissionChecker()
|
||||
return role in checker._get_user_roles()
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@app.template_global()
|
||||
def can_access(resource_type: str, resource_id: int = None, action: str = "view") -> bool:
|
||||
"""Template Helper für Ressourcen-Zugriff"""
|
||||
checker = PermissionChecker()
|
||||
return checker.can_access_resource(resource_type, resource_id, action)
|
||||
|
||||
logger.info("🔐 Permission Template Helpers registriert")
|
||||
695
backend/app - Kopie/utils/printer_monitor.py
Normal file
695
backend/app - Kopie/utils/printer_monitor.py
Normal file
@@ -0,0 +1,695 @@
|
||||
"""
|
||||
Live-Drucker-Monitor für MYP Platform
|
||||
Überwacht Druckerstatus in Echtzeit mit Session-Caching und automatischer Steckdosen-Initialisierung.
|
||||
"""
|
||||
|
||||
import time
|
||||
import threading
|
||||
import requests
|
||||
import subprocess
|
||||
import ipaddress
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Tuple, List, Optional
|
||||
from flask import session
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
import os
|
||||
|
||||
from models import get_db_session, Printer
|
||||
from utils.logging_config import get_logger
|
||||
from config.settings import PRINTERS, TAPO_USERNAME, TAPO_PASSWORD, DEFAULT_TAPO_IPS, TAPO_AUTO_DISCOVERY
|
||||
|
||||
# TP-Link Tapo P110 Unterstützung hinzufügen
|
||||
try:
|
||||
from PyP100 import PyP100
|
||||
TAPO_AVAILABLE = True
|
||||
except ImportError:
|
||||
TAPO_AVAILABLE = False
|
||||
|
||||
# Logger initialisieren
|
||||
monitor_logger = get_logger("printer_monitor")
|
||||
|
||||
class PrinterMonitor:
|
||||
"""
|
||||
Live-Drucker-Monitor mit Session-Caching und automatischer Initialisierung.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.session_cache = {} # Session-basierter Cache für schnelle Zugriffe
|
||||
self.db_cache = {} # Datenbank-Cache für persistente Daten
|
||||
self.cache_lock = threading.Lock()
|
||||
self.last_db_sync = datetime.now()
|
||||
self.monitoring_active = False
|
||||
self.monitor_thread = None
|
||||
self.startup_initialized = False
|
||||
self.auto_discovered_tapo = False
|
||||
|
||||
# Cache-Konfiguration
|
||||
self.session_cache_ttl = 30 # 30 Sekunden für Session-Cache
|
||||
self.db_cache_ttl = 300 # 5 Minuten für DB-Cache
|
||||
|
||||
monitor_logger.info("🖨️ Drucker-Monitor initialisiert")
|
||||
|
||||
# Automatische Steckdosenerkennung in separatem Thread starten, falls aktiviert
|
||||
if TAPO_AUTO_DISCOVERY:
|
||||
discovery_thread = threading.Thread(
|
||||
target=self._run_auto_discovery,
|
||||
daemon=True,
|
||||
name="TapoAutoDiscovery"
|
||||
)
|
||||
discovery_thread.start()
|
||||
monitor_logger.info("🔍 Automatische Tapo-Erkennung in separatem Thread gestartet")
|
||||
|
||||
def _run_auto_discovery(self):
|
||||
"""
|
||||
Führt die automatische Tapo-Erkennung in einem separaten Thread aus.
|
||||
"""
|
||||
try:
|
||||
# Kurze Verzögerung um sicherzustellen, dass die Hauptanwendung Zeit hat zu starten
|
||||
time.sleep(2)
|
||||
self.auto_discover_tapo_outlets()
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Fehler bei automatischer Tapo-Erkennung: {str(e)}")
|
||||
|
||||
def initialize_all_outlets_on_startup(self) -> Dict[str, bool]:
|
||||
"""
|
||||
Schaltet beim Programmstart alle gespeicherten Steckdosen aus (gleicher Startzustand).
|
||||
|
||||
Returns:
|
||||
Dict[str, bool]: Ergebnis der Initialisierung pro Drucker
|
||||
"""
|
||||
if self.startup_initialized:
|
||||
monitor_logger.info("🔄 Steckdosen bereits beim Start initialisiert")
|
||||
return {}
|
||||
|
||||
monitor_logger.info("🚀 Starte Steckdosen-Initialisierung beim Programmstart...")
|
||||
results = {}
|
||||
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
printers = db_session.query(Printer).filter(Printer.active == True).all()
|
||||
|
||||
if not printers:
|
||||
monitor_logger.warning("⚠️ Keine aktiven Drucker zur Initialisierung gefunden")
|
||||
db_session.close()
|
||||
self.startup_initialized = True
|
||||
return results
|
||||
|
||||
# Alle Steckdosen ausschalten für einheitlichen Startzustand
|
||||
for printer in printers:
|
||||
try:
|
||||
if printer.plug_ip and printer.plug_username and printer.plug_password:
|
||||
success = self._turn_outlet_off(
|
||||
printer.plug_ip,
|
||||
printer.plug_username,
|
||||
printer.plug_password
|
||||
)
|
||||
|
||||
results[printer.name] = success
|
||||
|
||||
if success:
|
||||
monitor_logger.info(f"✅ {printer.name}: Steckdose ausgeschaltet")
|
||||
# Status in Datenbank aktualisieren
|
||||
printer.status = "offline"
|
||||
printer.last_checked = datetime.now()
|
||||
else:
|
||||
monitor_logger.warning(f"❌ {printer.name}: Steckdose konnte nicht ausgeschaltet werden")
|
||||
else:
|
||||
monitor_logger.warning(f"⚠️ {printer.name}: Unvollständige Steckdosen-Konfiguration")
|
||||
results[printer.name] = False
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Fehler bei Initialisierung von {printer.name}: {str(e)}")
|
||||
results[printer.name] = False
|
||||
|
||||
# Änderungen speichern
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
success_count = sum(1 for success in results.values() if success)
|
||||
total_count = len(results)
|
||||
|
||||
monitor_logger.info(f"🎯 Steckdosen-Initialisierung abgeschlossen: {success_count}/{total_count} erfolgreich")
|
||||
self.startup_initialized = True
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Kritischer Fehler bei Steckdosen-Initialisierung: {str(e)}")
|
||||
results = {}
|
||||
|
||||
return results
|
||||
|
||||
def _turn_outlet_off(self, ip_address: str, username: str, password: str, timeout: int = 5) -> bool:
|
||||
"""
|
||||
Schaltet eine TP-Link Tapo P110-Steckdose aus.
|
||||
|
||||
Args:
|
||||
ip_address: IP-Adresse der Steckdose
|
||||
username: Benutzername für die Steckdose (wird überschrieben)
|
||||
password: Passwort für die Steckdose (wird überschrieben)
|
||||
timeout: Timeout in Sekunden (wird ignoriert, da PyP100 eigenes Timeout hat)
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich ausgeschaltet
|
||||
"""
|
||||
if not TAPO_AVAILABLE:
|
||||
monitor_logger.error("⚠️ PyP100-Modul nicht verfügbar - kann Tapo-Steckdose nicht schalten")
|
||||
return False
|
||||
|
||||
# IMMER globale Anmeldedaten verwenden (da diese funktionieren)
|
||||
username = TAPO_USERNAME
|
||||
password = TAPO_PASSWORD
|
||||
monitor_logger.debug(f"🔧 Verwende globale Tapo-Anmeldedaten für {ip_address}")
|
||||
|
||||
try:
|
||||
# TP-Link Tapo P100 Verbindung herstellen (P100 statt P110)
|
||||
from PyP100 import PyP100
|
||||
p100 = PyP100.P100(ip_address, username, password)
|
||||
p100.handshake() # Authentifizierung
|
||||
p100.login() # Login
|
||||
|
||||
# Steckdose ausschalten
|
||||
p100.turnOff()
|
||||
monitor_logger.debug(f"✅ Tapo-Steckdose {ip_address} erfolgreich ausgeschaltet")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.debug(f"⚠️ Fehler beim Ausschalten der Tapo-Steckdose {ip_address}: {str(e)}")
|
||||
return False
|
||||
|
||||
def get_live_printer_status(self, use_session_cache: bool = True) -> Dict[int, Dict]:
|
||||
"""
|
||||
Holt Live-Druckerstatus mit Session- und DB-Caching.
|
||||
|
||||
Args:
|
||||
use_session_cache: Ob Session-Cache verwendet werden soll
|
||||
|
||||
Returns:
|
||||
Dict[int, Dict]: Status-Dict mit Drucker-ID als Key
|
||||
"""
|
||||
current_time = datetime.now()
|
||||
|
||||
# Session-Cache prüfen (nur wenn aktiviert)
|
||||
if use_session_cache and hasattr(session, 'get'):
|
||||
session_key = "printer_status_cache"
|
||||
session_timestamp_key = "printer_status_timestamp"
|
||||
|
||||
cached_data = session.get(session_key)
|
||||
cached_timestamp = session.get(session_timestamp_key)
|
||||
|
||||
if cached_data and cached_timestamp:
|
||||
cache_age = (current_time - datetime.fromisoformat(cached_timestamp)).total_seconds()
|
||||
if cache_age < self.session_cache_ttl:
|
||||
monitor_logger.debug("📋 Verwende Session-Cache für Druckerstatus")
|
||||
return cached_data
|
||||
|
||||
# DB-Cache prüfen
|
||||
with self.cache_lock:
|
||||
if self.db_cache and (current_time - self.last_db_sync).total_seconds() < self.db_cache_ttl:
|
||||
monitor_logger.debug("🗃️ Verwende DB-Cache für Druckerstatus")
|
||||
|
||||
# Session-Cache aktualisieren
|
||||
if use_session_cache and hasattr(session, '__setitem__'):
|
||||
session["printer_status_cache"] = self.db_cache
|
||||
session["printer_status_timestamp"] = current_time.isoformat()
|
||||
|
||||
return self.db_cache
|
||||
|
||||
# Live-Status von Druckern abrufen
|
||||
monitor_logger.info("🔄 Aktualisiere Live-Druckerstatus...")
|
||||
status_dict = self._fetch_live_printer_status()
|
||||
|
||||
# Caches aktualisieren
|
||||
with self.cache_lock:
|
||||
self.db_cache = status_dict
|
||||
self.last_db_sync = current_time
|
||||
|
||||
if use_session_cache and hasattr(session, '__setitem__'):
|
||||
session["printer_status_cache"] = status_dict
|
||||
session["printer_status_timestamp"] = current_time.isoformat()
|
||||
|
||||
return status_dict
|
||||
|
||||
def _fetch_live_printer_status(self) -> Dict[int, Dict]:
|
||||
"""
|
||||
Holt den aktuellen Status aller Drucker direkt von den Geräten.
|
||||
|
||||
Returns:
|
||||
Dict[int, Dict]: Status-Dict mit umfassenden Informationen
|
||||
"""
|
||||
status_dict = {}
|
||||
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
printers = db_session.query(Printer).filter(Printer.active == True).all()
|
||||
|
||||
# Wenn keine aktiven Drucker vorhanden sind, gebe leeres Dict zurück
|
||||
if not printers:
|
||||
monitor_logger.info("ℹ️ Keine aktiven Drucker gefunden")
|
||||
db_session.close()
|
||||
return status_dict
|
||||
|
||||
monitor_logger.info(f"🔍 Prüfe Status von {len(printers)} aktiven Druckern...")
|
||||
|
||||
# Parallel-Status-Prüfung mit ThreadPoolExecutor
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
# Sicherstellen, dass max_workers mindestens 1 ist
|
||||
max_workers = min(max(len(printers), 1), 8)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_to_printer = {
|
||||
executor.submit(self._check_single_printer_status, printer): printer
|
||||
for printer in printers
|
||||
}
|
||||
|
||||
for future in as_completed(future_to_printer, timeout=15):
|
||||
printer = future_to_printer[future]
|
||||
try:
|
||||
status_info = future.result()
|
||||
status_dict[printer.id] = status_info
|
||||
|
||||
# Status in Datenbank aktualisieren
|
||||
printer.status = status_info["status"]
|
||||
printer.last_checked = datetime.now()
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Fehler bei Status-Check für Drucker {printer.name}: {str(e)}")
|
||||
status_dict[printer.id] = {
|
||||
"id": printer.id,
|
||||
"name": printer.name,
|
||||
"status": "offline",
|
||||
"active": False,
|
||||
"ip_address": printer.ip_address,
|
||||
"plug_ip": printer.plug_ip,
|
||||
"location": printer.location,
|
||||
"last_checked": datetime.now().isoformat(),
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
# Änderungen in Datenbank speichern
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
monitor_logger.info(f"✅ Status-Update abgeschlossen für {len(status_dict)} Drucker")
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Kritischer Fehler beim Abrufen des Live-Status: {str(e)}")
|
||||
|
||||
return status_dict
|
||||
|
||||
def _check_single_printer_status(self, printer: Printer, timeout: int = 7) -> Dict:
|
||||
"""
|
||||
Überprüft den Status eines einzelnen Druckers basierend auf der Steckdosen-Logik:
|
||||
- Steckdose erreichbar aber AUS = Drucker ONLINE (bereit zum Drucken)
|
||||
- Steckdose erreichbar und AN = Drucker PRINTING (druckt gerade)
|
||||
- Steckdose nicht erreichbar = Drucker OFFLINE (kritischer Fehler)
|
||||
|
||||
Args:
|
||||
printer: Printer-Objekt aus der Datenbank
|
||||
timeout: Timeout in Sekunden
|
||||
|
||||
Returns:
|
||||
Dict: Umfassende Status-Informationen
|
||||
"""
|
||||
status_info = {
|
||||
"id": printer.id,
|
||||
"name": printer.name,
|
||||
"status": "offline",
|
||||
"active": False,
|
||||
"ip_address": printer.ip_address,
|
||||
"plug_ip": printer.plug_ip,
|
||||
"location": printer.location,
|
||||
"last_checked": datetime.now().isoformat(),
|
||||
"ping_successful": False,
|
||||
"outlet_reachable": False,
|
||||
"outlet_state": "unknown"
|
||||
}
|
||||
|
||||
try:
|
||||
# 1. Ping-Test für Grundkonnektivität
|
||||
if printer.plug_ip:
|
||||
ping_success = self._ping_address(printer.plug_ip, timeout=3)
|
||||
status_info["ping_successful"] = ping_success
|
||||
|
||||
if ping_success:
|
||||
# 2. Smart Plug Status prüfen
|
||||
outlet_reachable, outlet_state = self._check_outlet_status(
|
||||
printer.plug_ip,
|
||||
printer.plug_username,
|
||||
printer.plug_password,
|
||||
timeout
|
||||
)
|
||||
|
||||
status_info["outlet_reachable"] = outlet_reachable
|
||||
status_info["outlet_state"] = outlet_state
|
||||
|
||||
# 🎯 KORREKTE LOGIK: Steckdose erreichbar = Drucker funktionsfähig
|
||||
if outlet_reachable:
|
||||
if outlet_state == "off":
|
||||
# Steckdose aus = Drucker ONLINE (bereit zum Drucken)
|
||||
status_info["status"] = "online"
|
||||
status_info["active"] = True
|
||||
monitor_logger.debug(f"✅ {printer.name}: ONLINE (Steckdose aus - bereit zum Drucken)")
|
||||
elif outlet_state == "on":
|
||||
# Steckdose an = Drucker PRINTING (druckt gerade)
|
||||
status_info["status"] = "printing"
|
||||
status_info["active"] = True
|
||||
monitor_logger.debug(f"🖨️ {printer.name}: PRINTING (Steckdose an - druckt gerade)")
|
||||
else:
|
||||
# Unbekannter Steckdosen-Status
|
||||
status_info["status"] = "error"
|
||||
status_info["active"] = False
|
||||
monitor_logger.warning(f"⚠️ {printer.name}: Unbekannter Steckdosen-Status '{outlet_state}'")
|
||||
else:
|
||||
# Steckdose nicht erreichbar = kritischer Fehler
|
||||
status_info["status"] = "offline"
|
||||
status_info["active"] = False
|
||||
monitor_logger.warning(f"❌ {printer.name}: OFFLINE (Steckdose nicht erreichbar)")
|
||||
else:
|
||||
# Ping fehlgeschlagen = Netzwerkproblem
|
||||
status_info["status"] = "unreachable"
|
||||
status_info["active"] = False
|
||||
monitor_logger.warning(f"🔌 {printer.name}: UNREACHABLE (Ping fehlgeschlagen)")
|
||||
else:
|
||||
# Keine Steckdosen-IP konfiguriert
|
||||
status_info["status"] = "unconfigured"
|
||||
status_info["active"] = False
|
||||
monitor_logger.info(f"⚙️ {printer.name}: UNCONFIGURED (keine Steckdosen-IP)")
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Fehler bei Status-Check für {printer.name}: {str(e)}")
|
||||
status_info["error"] = str(e)
|
||||
status_info["status"] = "error"
|
||||
status_info["active"] = False
|
||||
|
||||
return status_info
|
||||
|
||||
def _ping_address(self, ip_address: str, timeout: int = 3) -> bool:
|
||||
"""
|
||||
Führt einen Konnektivitätstest zu einer IP-Adresse durch.
|
||||
Verwendet ausschließlich TCP-Verbindung statt Ping, um Encoding-Probleme zu vermeiden.
|
||||
|
||||
Args:
|
||||
ip_address: Zu testende IP-Adresse
|
||||
timeout: Timeout in Sekunden
|
||||
|
||||
Returns:
|
||||
bool: True wenn Verbindung erfolgreich
|
||||
"""
|
||||
try:
|
||||
# IP-Adresse validieren
|
||||
ipaddress.ip_address(ip_address.strip())
|
||||
|
||||
import socket
|
||||
|
||||
# Erst Port 9999 versuchen (Tapo-Standard)
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((ip_address.strip(), 9999))
|
||||
sock.close()
|
||||
|
||||
if result == 0:
|
||||
return True
|
||||
|
||||
# Falls Port 9999 nicht erfolgreich, Port 80 versuchen (HTTP)
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((ip_address.strip(), 80))
|
||||
sock.close()
|
||||
|
||||
if result == 0:
|
||||
return True
|
||||
|
||||
# Falls Port 80 nicht erfolgreich, Port 443 versuchen (HTTPS)
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((ip_address.strip(), 443))
|
||||
sock.close()
|
||||
|
||||
return result == 0
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.debug(f"❌ Fehler beim Verbindungstest zu {ip_address}: {str(e)}")
|
||||
return False
|
||||
|
||||
def _check_outlet_status(self, ip_address: str, username: str, password: str, timeout: int = 5) -> Tuple[bool, str]:
|
||||
"""
|
||||
Überprüft den Status einer TP-Link Tapo P110-Steckdose.
|
||||
|
||||
Args:
|
||||
ip_address: IP-Adresse der Steckdose
|
||||
username: Benutzername für die Steckdose
|
||||
password: Passwort für die Steckdose
|
||||
timeout: Timeout in Sekunden (wird ignoriert, da PyP100 eigenes Timeout hat)
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str]: (Erreichbar, Status) - Status: "on", "off", "unknown"
|
||||
"""
|
||||
if not TAPO_AVAILABLE:
|
||||
monitor_logger.debug("⚠️ PyP100-Modul nicht verfügbar - kann Tapo-Steckdosen-Status nicht abfragen")
|
||||
return False, "unknown"
|
||||
|
||||
# IMMER globale Anmeldedaten verwenden (da diese funktionieren)
|
||||
username = TAPO_USERNAME
|
||||
password = TAPO_PASSWORD
|
||||
monitor_logger.debug(f"🔧 Verwende globale Tapo-Anmeldedaten für {ip_address}")
|
||||
|
||||
try:
|
||||
# TP-Link Tapo P100 Verbindung herstellen (P100 statt P110)
|
||||
from PyP100 import PyP100
|
||||
p100 = PyP100.P100(ip_address, username, password)
|
||||
p100.handshake() # Authentifizierung
|
||||
p100.login() # Login
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
device_info = p100.getDeviceInfo()
|
||||
|
||||
# Status auswerten
|
||||
device_on = device_info.get('device_on', False)
|
||||
status = "on" if device_on else "off"
|
||||
|
||||
monitor_logger.debug(f"✅ Tapo-Steckdose {ip_address}: Status = {status}")
|
||||
return True, status
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.debug(f"⚠️ Fehler bei Tapo-Steckdosen-Status-Check {ip_address}: {str(e)}")
|
||||
return False, "unknown"
|
||||
|
||||
def clear_all_caches(self):
|
||||
"""Löscht alle Caches (Session und DB)."""
|
||||
with self.cache_lock:
|
||||
self.db_cache = {}
|
||||
self.last_db_sync = datetime.now()
|
||||
|
||||
if hasattr(session, 'pop'):
|
||||
session.pop("printer_status_cache", None)
|
||||
session.pop("printer_status_timestamp", None)
|
||||
|
||||
monitor_logger.info("🧹 Alle Drucker-Caches gelöscht")
|
||||
|
||||
def get_printer_summary(self) -> Dict[str, int]:
|
||||
"""
|
||||
Gibt eine Zusammenfassung der Druckerstatus zurück.
|
||||
|
||||
Returns:
|
||||
Dict[str, int]: Anzahl Drucker pro Status
|
||||
"""
|
||||
status_dict = self.get_live_printer_status()
|
||||
|
||||
summary = {
|
||||
"total": len(status_dict),
|
||||
"online": 0,
|
||||
"offline": 0,
|
||||
"printing": 0, # Neuer Status: Drucker druckt gerade
|
||||
"standby": 0,
|
||||
"unreachable": 0,
|
||||
"unconfigured": 0,
|
||||
"error": 0 # Status für unbekannte Fehler
|
||||
}
|
||||
|
||||
for printer_info in status_dict.values():
|
||||
status = printer_info.get("status", "offline")
|
||||
if status in summary:
|
||||
summary[status] += 1
|
||||
else:
|
||||
# Fallback für unbekannte Status
|
||||
summary["offline"] += 1
|
||||
|
||||
return summary
|
||||
|
||||
def auto_discover_tapo_outlets(self) -> Dict[str, bool]:
|
||||
"""
|
||||
Automatische Erkennung und Konfiguration von TP-Link Tapo P110-Steckdosen im Netzwerk.
|
||||
Robuste Version mit Timeout-Behandlung und Fehler-Resilience.
|
||||
|
||||
Returns:
|
||||
Dict[str, bool]: Ergebnis der Steckdosenerkennung mit IP als Schlüssel
|
||||
"""
|
||||
if self.auto_discovered_tapo:
|
||||
monitor_logger.info("🔍 Tapo-Steckdosen wurden bereits erkannt")
|
||||
return {}
|
||||
|
||||
monitor_logger.info("🔍 Starte automatische Tapo-Steckdosenerkennung...")
|
||||
results = {}
|
||||
start_time = time.time()
|
||||
|
||||
# 1. Zuerst die Standard-IPs aus der Konfiguration testen
|
||||
monitor_logger.info(f"🔄 Teste {len(DEFAULT_TAPO_IPS)} Standard-IPs aus der Konfiguration")
|
||||
|
||||
for i, ip in enumerate(DEFAULT_TAPO_IPS):
|
||||
try:
|
||||
# Fortschrittsmeldung
|
||||
monitor_logger.info(f"🔍 Teste IP {i+1}/{len(DEFAULT_TAPO_IPS)}: {ip}")
|
||||
|
||||
# Reduzierte Timeouts für schnellere Erkennung
|
||||
ping_success = self._ping_address(ip, timeout=2)
|
||||
|
||||
if ping_success:
|
||||
monitor_logger.info(f"✅ Steckdose mit IP {ip} ist erreichbar")
|
||||
|
||||
# Tapo-Verbindung testen mit Timeout-Schutz
|
||||
if TAPO_AVAILABLE:
|
||||
try:
|
||||
# Timeout für Tapo-Verbindung
|
||||
import signal
|
||||
|
||||
def timeout_handler(signum, frame):
|
||||
raise TimeoutError("Tapo-Verbindung Timeout")
|
||||
|
||||
# Nur unter Unix/Linux verfügbar
|
||||
if hasattr(signal, 'SIGALRM'):
|
||||
signal.signal(signal.SIGALRM, timeout_handler)
|
||||
signal.alarm(5) # 5 Sekunden Timeout
|
||||
|
||||
try:
|
||||
from PyP100 import PyP100
|
||||
p100 = PyP100.P100(ip, TAPO_USERNAME, TAPO_PASSWORD)
|
||||
p100.handshake()
|
||||
p100.login()
|
||||
device_info = p100.getDeviceInfo()
|
||||
|
||||
# Timeout zurücksetzen
|
||||
if hasattr(signal, 'SIGALRM'):
|
||||
signal.alarm(0)
|
||||
|
||||
# Steckdose gefunden und verbunden
|
||||
nickname = device_info.get('nickname', f"Tapo P110 ({ip})")
|
||||
state = "on" if device_info.get('device_on', False) else "off"
|
||||
|
||||
monitor_logger.info(f"✅ Tapo-Steckdose '{nickname}' ({ip}) gefunden - Status: {state}")
|
||||
results[ip] = True
|
||||
|
||||
# Steckdose in Datenbank speichern/aktualisieren (nicht-blockierend)
|
||||
try:
|
||||
self._ensure_tapo_in_database(ip, nickname)
|
||||
except Exception as db_error:
|
||||
monitor_logger.warning(f"⚠️ Fehler beim Speichern in DB für {ip}: {str(db_error)}")
|
||||
|
||||
except (TimeoutError, Exception) as tapo_error:
|
||||
if hasattr(signal, 'SIGALRM'):
|
||||
signal.alarm(0) # Timeout zurücksetzen
|
||||
monitor_logger.debug(f"❌ IP {ip} ist erreichbar, aber keine Tapo-Steckdose oder Timeout: {str(tapo_error)}")
|
||||
results[ip] = False
|
||||
|
||||
except Exception as outer_error:
|
||||
monitor_logger.debug(f"❌ Fehler bei Tapo-Test für {ip}: {str(outer_error)}")
|
||||
results[ip] = False
|
||||
else:
|
||||
monitor_logger.warning("⚠️ PyP100-Modul nicht verfügbar - kann Tapo-Verbindung nicht testen")
|
||||
results[ip] = False
|
||||
else:
|
||||
monitor_logger.debug(f"❌ IP {ip} nicht erreichbar")
|
||||
results[ip] = False
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.warning(f"❌ Fehler bei Steckdosen-Erkennung für IP {ip}: {str(e)}")
|
||||
results[ip] = False
|
||||
# Weiter mit nächster IP - nicht abbrechen
|
||||
continue
|
||||
|
||||
# Erfolgsstatistik berechnen
|
||||
success_count = sum(1 for success in results.values() if success)
|
||||
elapsed_time = time.time() - start_time
|
||||
|
||||
monitor_logger.info(f"✅ Steckdosen-Erkennung abgeschlossen: {success_count}/{len(results)} Steckdosen gefunden in {elapsed_time:.1f}s")
|
||||
|
||||
# Markieren, dass automatische Erkennung durchgeführt wurde
|
||||
self.auto_discovered_tapo = True
|
||||
|
||||
return results
|
||||
|
||||
def _ensure_tapo_in_database(self, ip_address: str, nickname: str = None) -> bool:
|
||||
"""
|
||||
Stellt sicher, dass eine erkannte Tapo-Steckdose in der Datenbank existiert.
|
||||
|
||||
Args:
|
||||
ip_address: IP-Adresse der Steckdose
|
||||
nickname: Name der Steckdose (optional)
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich in Datenbank gespeichert/aktualisiert
|
||||
"""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Prüfen, ob Drucker mit dieser IP bereits existiert
|
||||
existing_printer = db_session.query(Printer).filter(Printer.plug_ip == ip_address).first()
|
||||
|
||||
if existing_printer:
|
||||
# Drucker aktualisieren, falls nötig
|
||||
if not existing_printer.plug_username or not existing_printer.plug_password:
|
||||
existing_printer.plug_username = TAPO_USERNAME
|
||||
existing_printer.plug_password = TAPO_PASSWORD
|
||||
monitor_logger.info(f"✅ Drucker {existing_printer.name} mit Tapo-Anmeldedaten aktualisiert")
|
||||
|
||||
if nickname and existing_printer.name != nickname and "Tapo P110" not in existing_printer.name:
|
||||
old_name = existing_printer.name
|
||||
existing_printer.name = nickname
|
||||
monitor_logger.info(f"✅ Drucker {old_name} umbenannt zu {nickname}")
|
||||
|
||||
# Drucker als aktiv markieren, da Tapo-Steckdose gefunden wurde
|
||||
if not existing_printer.active:
|
||||
existing_printer.active = True
|
||||
monitor_logger.info(f"✅ Drucker {existing_printer.name} als aktiv markiert")
|
||||
|
||||
# Status aktualisieren
|
||||
existing_printer.last_checked = datetime.now()
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
return True
|
||||
else:
|
||||
# Neuen Drucker erstellen, falls keiner existiert
|
||||
printer_name = nickname or f"Tapo P110 ({ip_address})"
|
||||
mac_address = f"tapo:{ip_address.replace('.', '-')}" # Pseudo-MAC-Adresse
|
||||
|
||||
new_printer = Printer(
|
||||
name=printer_name,
|
||||
model="TP-Link Tapo P110",
|
||||
location="Automatisch erkannt",
|
||||
ip_address=ip_address, # Drucker-IP setzen wir gleich Steckdosen-IP
|
||||
mac_address=mac_address,
|
||||
plug_ip=ip_address,
|
||||
plug_username=TAPO_USERNAME,
|
||||
plug_password=TAPO_PASSWORD,
|
||||
status="offline",
|
||||
active=True,
|
||||
last_checked=datetime.now()
|
||||
)
|
||||
|
||||
db_session.add(new_printer)
|
||||
db_session.commit()
|
||||
monitor_logger.info(f"✅ Neuer Drucker '{printer_name}' mit Tapo-Steckdose {ip_address} erstellt")
|
||||
db_session.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
monitor_logger.error(f"❌ Fehler beim Speichern der Tapo-Steckdose {ip_address}: {str(e)}")
|
||||
try:
|
||||
db_session.rollback()
|
||||
db_session.close()
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
# Globale Instanz
|
||||
printer_monitor = PrinterMonitor()
|
||||
396
backend/app - Kopie/utils/queue_manager.py
Normal file
396
backend/app - Kopie/utils/queue_manager.py
Normal file
@@ -0,0 +1,396 @@
|
||||
"""
|
||||
Queue Manager für die Verwaltung von Druckjobs in Warteschlangen.
|
||||
Überwacht offline Drucker und aktiviert Jobs automatisch.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import subprocess
|
||||
import os
|
||||
import requests
|
||||
import signal
|
||||
import atexit
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from contextlib import contextmanager
|
||||
|
||||
from models import get_db_session, Job, Printer, User, Notification
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
# Windows-spezifische Imports
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
from utils.windows_fixes import get_windows_thread_manager
|
||||
except ImportError:
|
||||
get_windows_thread_manager = None
|
||||
else:
|
||||
get_windows_thread_manager = None
|
||||
|
||||
# Logger für Queue-Manager
|
||||
queue_logger = get_logger("queue_manager")
|
||||
|
||||
def check_printer_status(ip_address: str, timeout: int = 5) -> Tuple[str, bool]:
|
||||
"""
|
||||
Vereinfachte Drucker-Status-Prüfung für den Queue Manager.
|
||||
|
||||
Args:
|
||||
ip_address: IP-Adresse der Drucker-Steckdose
|
||||
timeout: Timeout in Sekunden (Standard: 5)
|
||||
|
||||
Returns:
|
||||
Tuple[str, bool]: (Status, Aktiv) - Status ist "online" oder "offline", Aktiv ist True/False
|
||||
"""
|
||||
if not ip_address or ip_address.strip() == "":
|
||||
return "offline", False
|
||||
|
||||
try:
|
||||
# Ping-Test um Erreichbarkeit zu prüfen
|
||||
if os.name == 'nt': # Windows
|
||||
cmd = ['ping', '-n', '1', '-w', str(timeout * 1000), ip_address.strip()]
|
||||
else: # Unix/Linux/macOS
|
||||
cmd = ['ping', '-c', '1', '-W', str(timeout), ip_address.strip()]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout + 1,
|
||||
encoding='utf-8',
|
||||
errors='replace'
|
||||
)
|
||||
|
||||
# Wenn Ping erfolgreich ist, als online betrachten
|
||||
if result.returncode == 0:
|
||||
queue_logger.debug(f"✅ Drucker {ip_address} ist erreichbar (Ping erfolgreich)")
|
||||
return "online", True
|
||||
else:
|
||||
queue_logger.debug(f"❌ Drucker {ip_address} nicht erreichbar (Ping fehlgeschlagen)")
|
||||
return "offline", False
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
queue_logger.warning(f"⏱️ Ping-Timeout für Drucker {ip_address} nach {timeout} Sekunden")
|
||||
return "offline", False
|
||||
except Exception as e:
|
||||
queue_logger.error(f"❌ Fehler beim Status-Check für Drucker {ip_address}: {str(e)}")
|
||||
return "offline", False
|
||||
|
||||
class PrinterQueueManager:
|
||||
"""
|
||||
Verwaltet die Warteschlangen für offline Drucker und überwacht deren Status.
|
||||
Verbesserte Version mit ordnungsgemäßem Thread-Management für Windows.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.is_running = False
|
||||
self.monitor_thread = None
|
||||
self.shutdown_event = threading.Event() # Sauberes Shutdown-Signal
|
||||
self.check_interval = 120 # 2 Minuten zwischen Status-Checks
|
||||
self.last_status_cache = {} # Cache für letzten bekannten Status
|
||||
self.notification_cooldown = {} # Verhindert Spam-Benachrichtigungen
|
||||
self._lock = threading.Lock() # Thread-Sicherheit
|
||||
|
||||
# Windows-spezifische Signal-Handler registrieren
|
||||
if os.name == 'nt':
|
||||
signal.signal(signal.SIGINT, self._signal_handler)
|
||||
signal.signal(signal.SIGTERM, self._signal_handler)
|
||||
|
||||
def _signal_handler(self, signum, frame):
|
||||
"""Signal-Handler für ordnungsgemäßes Shutdown."""
|
||||
queue_logger.warning(f"🛑 Signal {signum} empfangen - stoppe Queue Manager...")
|
||||
self.stop()
|
||||
|
||||
def start(self):
|
||||
"""Startet den Queue-Manager mit verbessertem Thread-Management."""
|
||||
with self._lock:
|
||||
if not self.is_running:
|
||||
self.is_running = True
|
||||
self.shutdown_event.clear()
|
||||
self.monitor_thread = threading.Thread(target=self._monitor_loop, daemon=False)
|
||||
self.monitor_thread.name = "PrinterQueueMonitor"
|
||||
|
||||
# Windows Thread-Manager verwenden falls verfügbar
|
||||
if os.name == 'nt' and get_windows_thread_manager:
|
||||
try:
|
||||
thread_manager = get_windows_thread_manager()
|
||||
thread_manager.register_thread(self.monitor_thread)
|
||||
thread_manager.register_cleanup_function(self.stop)
|
||||
queue_logger.debug("✅ Queue Manager bei Windows Thread-Manager registriert")
|
||||
except Exception as e:
|
||||
queue_logger.warning(f"⚠️ Windows Thread-Manager nicht verfügbar: {str(e)}")
|
||||
|
||||
self.monitor_thread.start()
|
||||
queue_logger.info("✅ Printer Queue Manager erfolgreich gestartet")
|
||||
|
||||
def stop(self):
|
||||
"""Stoppt den Queue-Manager ordnungsgemäß."""
|
||||
with self._lock:
|
||||
if self.is_running:
|
||||
queue_logger.info("🔄 Beende Queue Manager...")
|
||||
self.is_running = False
|
||||
self.shutdown_event.set()
|
||||
|
||||
if self.monitor_thread and self.monitor_thread.is_alive():
|
||||
queue_logger.debug("⏳ Warte auf Thread-Beendigung...")
|
||||
self.monitor_thread.join(timeout=10)
|
||||
|
||||
if self.monitor_thread.is_alive():
|
||||
queue_logger.warning("⚠️ Thread konnte nicht ordnungsgemäß beendet werden")
|
||||
else:
|
||||
queue_logger.info("✅ Monitor-Thread erfolgreich beendet")
|
||||
|
||||
self.monitor_thread = None
|
||||
queue_logger.info("❌ Printer Queue Manager gestoppt")
|
||||
|
||||
def _monitor_loop(self):
|
||||
"""Hauptschleife für die Überwachung der Drucker mit verbessertem Shutdown-Handling."""
|
||||
queue_logger.info(f"🔄 Queue-Überwachung gestartet (Intervall: {self.check_interval} Sekunden)")
|
||||
|
||||
while self.is_running and not self.shutdown_event.is_set():
|
||||
try:
|
||||
self._check_waiting_jobs()
|
||||
|
||||
# Verwende Event.wait() statt time.sleep() für unterbrechbares Warten
|
||||
if self.shutdown_event.wait(timeout=self.check_interval):
|
||||
# Shutdown-Signal erhalten
|
||||
queue_logger.info("🛑 Shutdown-Signal empfangen - beende Monitor-Loop")
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
queue_logger.error(f"❌ Fehler in Monitor-Schleife: {str(e)}")
|
||||
# Kürzere Wartezeit bei Fehlern, aber auch unterbrechbar
|
||||
if self.shutdown_event.wait(timeout=30):
|
||||
break
|
||||
|
||||
queue_logger.info("🔚 Monitor-Loop beendet")
|
||||
|
||||
def _check_waiting_jobs(self):
|
||||
"""Überprüft alle wartenden Jobs und aktiviert sie bei verfügbaren Druckern."""
|
||||
if self.shutdown_event.is_set():
|
||||
return
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
# Alle wartenden Jobs abrufen
|
||||
waiting_jobs = db_session.query(Job).filter(
|
||||
Job.status == "waiting_for_printer"
|
||||
).all()
|
||||
|
||||
if not waiting_jobs:
|
||||
return
|
||||
|
||||
queue_logger.info(f"🔍 Überprüfe {len(waiting_jobs)} wartende Jobs...")
|
||||
|
||||
activated_jobs = []
|
||||
|
||||
for job in waiting_jobs:
|
||||
# Shutdown-Check zwischen Jobs
|
||||
if self.shutdown_event.is_set():
|
||||
break
|
||||
|
||||
# Drucker-Status prüfen
|
||||
printer = db_session.get(Printer, job.printer_id)
|
||||
if not printer:
|
||||
continue
|
||||
|
||||
# Status-Check mit Cache-Optimierung
|
||||
printer_key = f"printer_{printer.id}"
|
||||
current_status = None
|
||||
|
||||
try:
|
||||
if printer.plug_ip:
|
||||
status, active = check_printer_status(printer.plug_ip, timeout=5)
|
||||
current_status = "online" if (status == "online" and active) else "offline"
|
||||
else:
|
||||
current_status = "offline"
|
||||
|
||||
except Exception as e:
|
||||
queue_logger.warning(f"⚠️ Status-Check für Drucker {printer.name} fehlgeschlagen: {str(e)}")
|
||||
current_status = "offline"
|
||||
|
||||
# Prüfen, ob Drucker online geworden ist
|
||||
last_status = self.last_status_cache.get(printer_key, "offline")
|
||||
self.last_status_cache[printer_key] = current_status
|
||||
|
||||
if current_status == "online" and last_status == "offline":
|
||||
# Drucker ist online geworden!
|
||||
queue_logger.info(f"🟢 Drucker {printer.name} ist ONLINE geworden - aktiviere wartende Jobs")
|
||||
|
||||
# Job aktivieren
|
||||
job.status = "scheduled"
|
||||
printer.status = "available"
|
||||
printer.active = True
|
||||
printer.last_checked = datetime.now()
|
||||
|
||||
activated_jobs.append({
|
||||
"job": job,
|
||||
"printer": printer
|
||||
})
|
||||
|
||||
elif current_status == "online":
|
||||
# Drucker ist bereits online, Job kann aktiviert werden
|
||||
job.status = "scheduled"
|
||||
printer.status = "available"
|
||||
printer.active = True
|
||||
printer.last_checked = datetime.now()
|
||||
|
||||
activated_jobs.append({
|
||||
"job": job,
|
||||
"printer": printer
|
||||
})
|
||||
|
||||
else:
|
||||
# Drucker bleibt offline
|
||||
printer.status = "offline"
|
||||
printer.active = False
|
||||
printer.last_checked = datetime.now()
|
||||
|
||||
# Speichere alle Änderungen
|
||||
if activated_jobs:
|
||||
db_session.commit()
|
||||
queue_logger.info(f"✅ {len(activated_jobs)} Jobs erfolgreich aktiviert")
|
||||
|
||||
# Benachrichtigungen versenden (nur wenn nicht im Shutdown)
|
||||
if not self.shutdown_event.is_set():
|
||||
for item in activated_jobs:
|
||||
self._send_job_activation_notification(item["job"], item["printer"])
|
||||
else:
|
||||
# Auch offline-Status speichern
|
||||
db_session.commit()
|
||||
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
queue_logger.error(f"❌ Fehler beim Überprüfen wartender Jobs: {str(e)}")
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
def _send_job_activation_notification(self, job: Job, printer: Printer):
|
||||
"""Sendet eine Benachrichtigung, wenn ein Job aktiviert wird."""
|
||||
if self.shutdown_event.is_set():
|
||||
return
|
||||
|
||||
try:
|
||||
# Cooldown prüfen (keine Spam-Benachrichtigungen)
|
||||
cooldown_key = f"job_{job.id}_activated"
|
||||
now = datetime.now()
|
||||
|
||||
if cooldown_key in self.notification_cooldown:
|
||||
last_notification = self.notification_cooldown[cooldown_key]
|
||||
if (now - last_notification).total_seconds() < 300: # 5 Minuten Cooldown
|
||||
return
|
||||
|
||||
self.notification_cooldown[cooldown_key] = now
|
||||
|
||||
# Benachrichtigung erstellen
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
user = db_session.get(User, job.user_id)
|
||||
if not user:
|
||||
return
|
||||
|
||||
notification = Notification(
|
||||
user_id=user.id,
|
||||
type="job_activated",
|
||||
payload={
|
||||
"job_id": job.id,
|
||||
"job_name": job.name,
|
||||
"printer_id": printer.id,
|
||||
"printer_name": printer.name,
|
||||
"start_time": job.start_at.isoformat() if job.start_at else None,
|
||||
"message": f"🎉 Gute Nachrichten! Drucker '{printer.name}' ist online. Ihr Job '{job.name}' wurde aktiviert und startet bald."
|
||||
}
|
||||
)
|
||||
|
||||
db_session.add(notification)
|
||||
db_session.commit()
|
||||
|
||||
queue_logger.info(f"📧 Benachrichtigung für User {user.name} gesendet: Job {job.name} aktiviert")
|
||||
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
queue_logger.error(f"❌ Fehler beim Erstellen der Benachrichtigung: {str(e)}")
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
except Exception as e:
|
||||
queue_logger.error(f"❌ Fehler beim Senden der Aktivierungs-Benachrichtigung: {str(e)}")
|
||||
|
||||
def get_queue_status(self) -> Dict:
|
||||
"""Gibt den aktuellen Status der Warteschlangen zurück."""
|
||||
db_session = get_db_session()
|
||||
|
||||
try:
|
||||
# Wartende Jobs zählen
|
||||
waiting_jobs = db_session.query(Job).filter(
|
||||
Job.status == "waiting_for_printer"
|
||||
).count()
|
||||
|
||||
# Offline Drucker mit wartenden Jobs
|
||||
offline_printers_with_queue = db_session.query(Printer).join(Job).filter(
|
||||
Printer.status == "offline",
|
||||
Job.status == "waiting_for_printer"
|
||||
).distinct().count()
|
||||
|
||||
# Online Drucker
|
||||
online_printers = db_session.query(Printer).filter(
|
||||
Printer.status == "available"
|
||||
).count()
|
||||
|
||||
total_printers = db_session.query(Printer).count()
|
||||
|
||||
return {
|
||||
"waiting_jobs": waiting_jobs,
|
||||
"offline_printers_with_queue": offline_printers_with_queue,
|
||||
"online_printers": online_printers,
|
||||
"total_printers": total_printers,
|
||||
"queue_manager_running": self.is_running,
|
||||
"last_check": datetime.now().isoformat(),
|
||||
"check_interval_seconds": self.check_interval
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
queue_logger.error(f"❌ Fehler beim Abrufen des Queue-Status: {str(e)}")
|
||||
return {
|
||||
"error": str(e),
|
||||
"queue_manager_running": self.is_running
|
||||
}
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
def is_healthy(self) -> bool:
|
||||
"""Prüft, ob der Queue Manager ordnungsgemäß läuft."""
|
||||
return (self.is_running and
|
||||
self.monitor_thread is not None and
|
||||
self.monitor_thread.is_alive() and
|
||||
not self.shutdown_event.is_set())
|
||||
|
||||
# Globale Instanz des Queue-Managers
|
||||
_queue_manager_instance = None
|
||||
_queue_manager_lock = threading.Lock()
|
||||
|
||||
def get_queue_manager() -> PrinterQueueManager:
|
||||
"""Gibt die globale Instanz des Queue-Managers zurück."""
|
||||
global _queue_manager_instance
|
||||
with _queue_manager_lock:
|
||||
if _queue_manager_instance is None:
|
||||
_queue_manager_instance = PrinterQueueManager()
|
||||
return _queue_manager_instance
|
||||
|
||||
def start_queue_manager():
|
||||
"""Startet den globalen Queue-Manager."""
|
||||
manager = get_queue_manager()
|
||||
manager.start()
|
||||
return manager
|
||||
|
||||
def stop_queue_manager():
|
||||
"""Stoppt den globalen Queue-Manager."""
|
||||
global _queue_manager_instance
|
||||
with _queue_manager_lock:
|
||||
if _queue_manager_instance:
|
||||
_queue_manager_instance.stop()
|
||||
_queue_manager_instance = None
|
||||
|
||||
# Automatisches Cleanup bei Prozess-Ende registrieren
|
||||
atexit.register(stop_queue_manager)
|
||||
226
backend/app - Kopie/utils/quick_fix.py
Normal file
226
backend/app - Kopie/utils/quick_fix.py
Normal file
@@ -0,0 +1,226 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Schnelle Datenbank-Reparatur für kritische Fehler
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Datenbankpfad
|
||||
DATABASE_PATH = "database/myp.db"
|
||||
|
||||
def quick_fix_database():
|
||||
"""Behebt die kritischsten Datenbankprobleme sofort"""
|
||||
print("🔧 Starte schnelle Datenbank-Reparatur...")
|
||||
|
||||
if not os.path.exists(DATABASE_PATH):
|
||||
print(f"❌ Datenbankdatei nicht gefunden: {DATABASE_PATH}")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Backup erstellen
|
||||
backup_path = f"{DATABASE_PATH}.emergency_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
import shutil
|
||||
shutil.copy2(DATABASE_PATH, backup_path)
|
||||
print(f"✅ Emergency-Backup erstellt: {backup_path}")
|
||||
|
||||
# Verbindung zur Datenbank
|
||||
conn = sqlite3.connect(DATABASE_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
print("🔧 Repariere Datenbank-Schema...")
|
||||
|
||||
# 1. Fehlende Spalte duration_minutes zu guest_requests hinzufügen
|
||||
try:
|
||||
cursor.execute("ALTER TABLE guest_requests ADD COLUMN duration_minutes INTEGER")
|
||||
print("✅ Spalte duration_minutes zu guest_requests hinzugefügt")
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column name" in str(e).lower():
|
||||
print("ℹ️ Spalte duration_minutes bereits vorhanden")
|
||||
else:
|
||||
print(f"⚠️ Fehler bei duration_minutes: {e}")
|
||||
|
||||
# 2. Fehlende Spalten zu users hinzufügen
|
||||
user_columns = [
|
||||
("username", "VARCHAR(100) UNIQUE"),
|
||||
("updated_at", "DATETIME DEFAULT CURRENT_TIMESTAMP"),
|
||||
("department", "VARCHAR(100)"),
|
||||
("position", "VARCHAR(100)"),
|
||||
("phone", "VARCHAR(50)"),
|
||||
("bio", "TEXT")
|
||||
]
|
||||
|
||||
for column_name, column_def in user_columns:
|
||||
try:
|
||||
cursor.execute(f"ALTER TABLE users ADD COLUMN {column_name} {column_def}")
|
||||
print(f"✅ Spalte {column_name} zu users hinzugefügt")
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column name" in str(e).lower():
|
||||
print(f"ℹ️ Spalte {column_name} bereits vorhanden")
|
||||
else:
|
||||
print(f"⚠️ Fehler bei {column_name}: {e}")
|
||||
|
||||
# 3. Fehlende Spalten zu printers hinzufügen
|
||||
printer_columns = [
|
||||
("plug_username", "VARCHAR(100) DEFAULT 'admin'"),
|
||||
("plug_password", "VARCHAR(100) DEFAULT 'admin'"),
|
||||
("last_checked", "DATETIME")
|
||||
]
|
||||
|
||||
for column_name, column_def in printer_columns:
|
||||
try:
|
||||
cursor.execute(f"ALTER TABLE printers ADD COLUMN {column_name} {column_def}")
|
||||
print(f"✅ Spalte {column_name} zu printers hinzugefügt")
|
||||
except sqlite3.OperationalError as e:
|
||||
if "duplicate column name" in str(e).lower():
|
||||
print(f"ℹ️ Spalte {column_name} bereits vorhanden")
|
||||
else:
|
||||
print(f"⚠️ Fehler bei {column_name}: {e}")
|
||||
|
||||
# 4. Username für bestehende User setzen (falls NULL)
|
||||
try:
|
||||
cursor.execute("UPDATE users SET username = email WHERE username IS NULL")
|
||||
updated_users = cursor.rowcount
|
||||
if updated_users > 0:
|
||||
print(f"✅ Username für {updated_users} Benutzer gesetzt")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Fehler beim Setzen der Usernames: {e}")
|
||||
|
||||
# 5. Drucker-Daten nachtragen
|
||||
print("🖨️ Trage Drucker nach...")
|
||||
|
||||
# Prüfen ob bereits Drucker vorhanden sind
|
||||
cursor.execute("SELECT COUNT(*) FROM printers")
|
||||
printer_count = cursor.fetchone()[0]
|
||||
|
||||
if printer_count == 0:
|
||||
# Standard-Drucker hinzufügen
|
||||
printers_to_add = [
|
||||
{
|
||||
'name': 'Printer 1',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.100',
|
||||
'mac_address': '98:254A:E1:2001',
|
||||
'plug_ip': '192.168.0.100',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
},
|
||||
{
|
||||
'name': 'Printer 2',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.101',
|
||||
'mac_address': '98:254A:E1:2002',
|
||||
'plug_ip': '192.168.0.101',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
},
|
||||
{
|
||||
'name': 'Printer 3',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.102',
|
||||
'mac_address': '98:254A:E1:2003',
|
||||
'plug_ip': '192.168.0.102',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
},
|
||||
{
|
||||
'name': 'Printer 4',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.103',
|
||||
'mac_address': '98:254A:E1:2004',
|
||||
'plug_ip': '192.168.0.103',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
},
|
||||
{
|
||||
'name': 'Printer 5',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.104',
|
||||
'mac_address': '98:254A:E1:2005',
|
||||
'plug_ip': '192.168.0.104',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
},
|
||||
{
|
||||
'name': 'Printer 6',
|
||||
'model': 'P115',
|
||||
'location': 'Werk 040 - Berlin - TBA',
|
||||
'ip_address': '192.168.0.106',
|
||||
'mac_address': '98:254A:E1:2006',
|
||||
'plug_ip': '192.168.0.106',
|
||||
'plug_username': 'admin',
|
||||
'plug_password': 'admin',
|
||||
'status': 'offline',
|
||||
'active': 1
|
||||
}
|
||||
]
|
||||
|
||||
for printer in printers_to_add:
|
||||
try:
|
||||
cursor.execute("""
|
||||
INSERT INTO printers (name, model, location, ip_address, mac_address, plug_ip, plug_username, plug_password, status, active, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
printer['name'], printer['model'], printer['location'],
|
||||
printer['ip_address'], printer['mac_address'], printer['plug_ip'],
|
||||
printer['plug_username'], printer['plug_password'],
|
||||
printer['status'], printer['active'], datetime.now()
|
||||
))
|
||||
print(f"✅ Drucker {printer['name']} hinzugefügt")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Fehler beim Hinzufügen von {printer['name']}: {e}")
|
||||
else:
|
||||
print(f"ℹ️ {printer_count} Drucker bereits vorhanden")
|
||||
|
||||
# 6. Optimierungen
|
||||
print("🔧 Führe Datenbankoptimierungen durch...")
|
||||
try:
|
||||
# Indizes erstellen
|
||||
indices = [
|
||||
"CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_users_username ON users(username)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_jobs_user_id ON jobs(user_id)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_jobs_printer_id ON jobs(printer_id)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status)",
|
||||
"CREATE INDEX IF NOT EXISTS idx_guest_requests_status ON guest_requests(status)"
|
||||
]
|
||||
|
||||
for index_sql in indices:
|
||||
cursor.execute(index_sql)
|
||||
|
||||
# Statistiken aktualisieren
|
||||
cursor.execute("ANALYZE")
|
||||
print("✅ Datenbankoptimierungen abgeschlossen")
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Fehler bei Optimierungen: {e}")
|
||||
|
||||
# Änderungen speichern
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print("✅ Schnelle Datenbank-Reparatur erfolgreich abgeschlossen!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Kritischer Fehler bei der Reparatur: {str(e)}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
quick_fix_database()
|
||||
244
backend/app - Kopie/utils/rate_limiter.py
Normal file
244
backend/app - Kopie/utils/rate_limiter.py
Normal file
@@ -0,0 +1,244 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Rate Limiting System für MYP Platform
|
||||
Schutz vor API-Missbrauch und DDoS-Attacken
|
||||
"""
|
||||
|
||||
import time
|
||||
import redis
|
||||
import hashlib
|
||||
from functools import wraps
|
||||
from flask import request, jsonify, g
|
||||
from typing import Dict, Optional
|
||||
from dataclasses import dataclass
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger("security")
|
||||
|
||||
@dataclass
|
||||
class RateLimit:
|
||||
"""Konfiguration für Rate-Limiting-Regeln"""
|
||||
requests: int # Anzahl erlaubter Anfragen
|
||||
per: int # Zeitraum in Sekunden
|
||||
message: str # Fehlermeldung bei Überschreitung
|
||||
|
||||
# Rate-Limiting-Konfiguration
|
||||
RATE_LIMITS = {
|
||||
# API-Endpunkte
|
||||
'api_general': RateLimit(100, 300, "Zu viele API-Anfragen. Versuchen Sie es in 5 Minuten erneut."),
|
||||
'api_auth': RateLimit(10, 300, "Zu viele Anmeldeversuche. Versuchen Sie es in 5 Minuten erneut."),
|
||||
'api_upload': RateLimit(20, 3600, "Zu viele Upload-Anfragen. Versuchen Sie es in einer Stunde erneut."),
|
||||
'api_admin': RateLimit(200, 300, "Zu viele Admin-Anfragen. Versuchen Sie es in 5 Minuten erneut."),
|
||||
|
||||
# Spezielle Endpunkte
|
||||
'printer_status': RateLimit(300, 300, "Zu viele Drucker-Status-Anfragen."),
|
||||
'job_creation': RateLimit(50, 3600, "Zu viele Job-Erstellungen. Versuchen Sie es in einer Stunde erneut."),
|
||||
|
||||
# Drucker-Monitor Rate-Limits (gelockert für Live-Updates)
|
||||
'printer_monitor_live': RateLimit(30, 60, "Zu viele Live-Status-Anfragen. Versuchen Sie es in einer Minute erneut."),
|
||||
'printer_monitor_summary': RateLimit(60, 60, "Zu viele Zusammenfassungs-Anfragen. Versuchen Sie es in einer Minute erneut."),
|
||||
'printer_monitor_cache': RateLimit(10, 120, "Zu viele Cache-Lösch-Anfragen. Versuchen Sie es in 2 Minuten erneut."),
|
||||
'printer_monitor_init': RateLimit(5, 300, "Zu viele Initialisierungs-Anfragen. Versuchen Sie es in 5 Minuten erneut."),
|
||||
|
||||
# Sicherheitskritische Endpunkte
|
||||
'password_reset': RateLimit(3, 3600, "Zu viele Passwort-Reset-Anfragen. Versuchen Sie es in einer Stunde erneut."),
|
||||
'user_creation': RateLimit(10, 3600, "Zu viele Benutzer-Erstellungen.")
|
||||
}
|
||||
|
||||
class RateLimiter:
|
||||
"""
|
||||
In-Memory Rate Limiter mit optionaler Redis-Unterstützung
|
||||
"""
|
||||
|
||||
def __init__(self, use_redis: bool = False, redis_url: str = None):
|
||||
self.use_redis = use_redis
|
||||
self.redis_client = None
|
||||
self.memory_store: Dict[str, Dict] = {}
|
||||
|
||||
if use_redis and redis_url:
|
||||
try:
|
||||
import redis
|
||||
self.redis_client = redis.from_url(redis_url, decode_responses=True)
|
||||
logger.info("✅ Redis-basiertes Rate Limiting aktiviert")
|
||||
except ImportError:
|
||||
logger.warning("⚠️ Redis nicht verfügbar, verwende In-Memory Rate Limiting")
|
||||
self.use_redis = False
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Redis-Verbindung fehlgeschlagen: {e}")
|
||||
self.use_redis = False
|
||||
|
||||
def _get_client_id(self) -> str:
|
||||
"""
|
||||
Generiert eine eindeutige Client-ID basierend auf IP und User-Agent
|
||||
"""
|
||||
ip = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr)
|
||||
user_agent = request.headers.get('User-Agent', '')
|
||||
|
||||
# Hash für Anonymisierung
|
||||
client_string = f"{ip}:{user_agent}"
|
||||
return hashlib.sha256(client_string.encode()).hexdigest()[:16]
|
||||
|
||||
def _get_key(self, limit_type: str, client_id: str) -> str:
|
||||
"""Erstellt Redis/Memory-Key für Rate-Limiting"""
|
||||
return f"rate_limit:{limit_type}:{client_id}"
|
||||
|
||||
def _get_current_requests(self, key: str, window_start: int) -> int:
|
||||
"""Holt aktuelle Anfragen-Anzahl"""
|
||||
if self.use_redis and self.redis_client:
|
||||
try:
|
||||
# Redis-basierte Implementierung
|
||||
pipe = self.redis_client.pipeline()
|
||||
pipe.zremrangebyscore(key, 0, window_start)
|
||||
pipe.zcard(key)
|
||||
_, count = pipe.execute()
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error(f"Redis-Fehler: {e}, fallback zu Memory")
|
||||
self.use_redis = False
|
||||
|
||||
# In-Memory Implementierung
|
||||
if key not in self.memory_store:
|
||||
self.memory_store[key] = {'requests': [], 'last_cleanup': time.time()}
|
||||
|
||||
# Alte Einträge bereinigen
|
||||
current_time = time.time()
|
||||
data = self.memory_store[key]
|
||||
data['requests'] = [req_time for req_time in data['requests'] if req_time > window_start]
|
||||
|
||||
return len(data['requests'])
|
||||
|
||||
def _add_request(self, key: str, current_time: int, expire_time: int):
|
||||
"""Fügt neue Anfrage hinzu"""
|
||||
if self.use_redis and self.redis_client:
|
||||
try:
|
||||
pipe = self.redis_client.pipeline()
|
||||
pipe.zadd(key, {str(current_time): current_time})
|
||||
pipe.expire(key, expire_time)
|
||||
pipe.execute()
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(f"Redis-Fehler: {e}, fallback zu Memory")
|
||||
self.use_redis = False
|
||||
|
||||
# In-Memory Implementierung
|
||||
if key not in self.memory_store:
|
||||
self.memory_store[key] = {'requests': [], 'last_cleanup': time.time()}
|
||||
|
||||
self.memory_store[key]['requests'].append(current_time)
|
||||
|
||||
def is_allowed(self, limit_type: str) -> tuple[bool, Dict]:
|
||||
"""
|
||||
Prüft ob eine Anfrage erlaubt ist
|
||||
|
||||
Returns:
|
||||
(is_allowed, info_dict)
|
||||
"""
|
||||
if limit_type not in RATE_LIMITS:
|
||||
return True, {}
|
||||
|
||||
rate_limit = RATE_LIMITS[limit_type]
|
||||
client_id = self._get_client_id()
|
||||
key = self._get_key(limit_type, client_id)
|
||||
|
||||
current_time = int(time.time())
|
||||
window_start = current_time - rate_limit.per
|
||||
|
||||
# Aktuelle Anfragen zählen
|
||||
current_requests = self._get_current_requests(key, window_start)
|
||||
|
||||
# Limite prüfen
|
||||
if current_requests >= rate_limit.requests:
|
||||
logger.warning(f"🚨 Rate limit exceeded: {limit_type} für Client {client_id[:8]}...")
|
||||
return False, {
|
||||
'limit': rate_limit.requests,
|
||||
'remaining': 0,
|
||||
'reset_time': current_time + rate_limit.per,
|
||||
'message': rate_limit.message
|
||||
}
|
||||
|
||||
# Anfrage hinzufügen
|
||||
self._add_request(key, current_time, rate_limit.per)
|
||||
|
||||
return True, {
|
||||
'limit': rate_limit.requests,
|
||||
'remaining': rate_limit.requests - current_requests - 1,
|
||||
'reset_time': current_time + rate_limit.per
|
||||
}
|
||||
|
||||
def cleanup_memory(self):
|
||||
"""Bereinigt alte In-Memory-Einträge"""
|
||||
if self.use_redis:
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
keys_to_delete = []
|
||||
|
||||
for key, data in self.memory_store.items():
|
||||
# Bereinige alle Einträge älter als 24 Stunden
|
||||
if current_time - data.get('last_cleanup', 0) > 86400:
|
||||
keys_to_delete.append(key)
|
||||
|
||||
for key in keys_to_delete:
|
||||
del self.memory_store[key]
|
||||
|
||||
# Globale Rate-Limiter-Instanz
|
||||
rate_limiter = RateLimiter()
|
||||
|
||||
def limit_requests(limit_type: str):
|
||||
"""
|
||||
Decorator für Rate-Limiting von API-Endpunkten
|
||||
|
||||
Args:
|
||||
limit_type: Art des Limits (siehe RATE_LIMITS)
|
||||
"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Rate-Limiting prüfen
|
||||
is_allowed, info = rate_limiter.is_allowed(limit_type)
|
||||
|
||||
if not is_allowed:
|
||||
response = jsonify({
|
||||
'error': 'Rate limit exceeded',
|
||||
'message': info['message'],
|
||||
'retry_after': info['reset_time'] - int(time.time())
|
||||
})
|
||||
response.status_code = 429
|
||||
response.headers['Retry-After'] = str(info['reset_time'] - int(time.time()))
|
||||
response.headers['X-RateLimit-Limit'] = str(info['limit'])
|
||||
response.headers['X-RateLimit-Remaining'] = str(info['remaining'])
|
||||
response.headers['X-RateLimit-Reset'] = str(info['reset_time'])
|
||||
return response
|
||||
|
||||
# Rate-Limiting-Headers zu Response hinzufügen
|
||||
response = f(*args, **kwargs)
|
||||
|
||||
if hasattr(response, 'headers'):
|
||||
response.headers['X-RateLimit-Limit'] = str(info['limit'])
|
||||
response.headers['X-RateLimit-Remaining'] = str(info['remaining'])
|
||||
response.headers['X-RateLimit-Reset'] = str(info['reset_time'])
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def get_client_info() -> Dict:
|
||||
"""
|
||||
Gibt Client-Informationen für Rate-Limiting zurück
|
||||
"""
|
||||
client_id = rate_limiter._get_client_id()
|
||||
ip = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr)
|
||||
|
||||
return {
|
||||
'client_id': client_id,
|
||||
'ip_address': ip,
|
||||
'user_agent': request.headers.get('User-Agent', ''),
|
||||
'timestamp': int(time.time())
|
||||
}
|
||||
|
||||
# Maintenance-Task für Memory-Cleanup
|
||||
def cleanup_rate_limiter():
|
||||
"""Periodische Bereinigung des Rate-Limiters"""
|
||||
rate_limiter.cleanup_memory()
|
||||
logger.debug("🧹 Rate-Limiter Memory bereinigt")
|
||||
50
backend/app - Kopie/utils/scheduler.py
Normal file
50
backend/app - Kopie/utils/scheduler.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Scheduler utility functions for the admin panel.
|
||||
"""
|
||||
|
||||
from utils.job_scheduler import scheduler
|
||||
|
||||
def scheduler_is_running():
|
||||
"""
|
||||
Überprüft, ob der Job-Scheduler läuft.
|
||||
|
||||
Returns:
|
||||
bool: True wenn der Scheduler aktiv ist, sonst False
|
||||
"""
|
||||
return scheduler.is_running()
|
||||
|
||||
def start_scheduler():
|
||||
"""
|
||||
Startet den Job-Scheduler.
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich gestartet, False wenn bereits läuft
|
||||
"""
|
||||
return scheduler.start()
|
||||
|
||||
def stop_scheduler():
|
||||
"""
|
||||
Stoppt den Job-Scheduler.
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich gestoppt, False wenn nicht läuft
|
||||
"""
|
||||
return scheduler.stop()
|
||||
|
||||
def get_scheduler_uptime():
|
||||
"""
|
||||
Gibt die Laufzeit des Schedulers zurück.
|
||||
|
||||
Returns:
|
||||
str: Formatierte Laufzeit oder None, wenn der Scheduler nicht läuft
|
||||
"""
|
||||
return scheduler.get_uptime()
|
||||
|
||||
def get_scheduler_tasks():
|
||||
"""
|
||||
Gibt alle registrierten Tasks im Scheduler zurück.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary mit Task-IDs als Schlüssel und Task-Konfigurationen als Werte
|
||||
"""
|
||||
return scheduler.get_tasks()
|
||||
338
backend/app - Kopie/utils/security.py
Normal file
338
backend/app - Kopie/utils/security.py
Normal file
@@ -0,0 +1,338 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Security Utilities für MYP Platform
|
||||
Content Security Policy (CSP), Security Headers und weitere Sicherheitsmaßnahmen
|
||||
"""
|
||||
|
||||
import secrets
|
||||
import hashlib
|
||||
from flask import request, g, session
|
||||
from functools import wraps
|
||||
from typing import Dict, List, Optional
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger("security")
|
||||
|
||||
# Content Security Policy Konfiguration
|
||||
CSP_POLICY = {
|
||||
'default-src': ["'self'"],
|
||||
'script-src': [
|
||||
"'self'",
|
||||
"'unsafe-inline'", # Für inline Scripts (wird nur verwendet wenn keine Nonce vorhanden)
|
||||
"https://cdn.jsdelivr.net", # Für externe Libraries
|
||||
"https://unpkg.com" # Für Fallback-Libraries
|
||||
],
|
||||
'style-src': [
|
||||
"'self'",
|
||||
"'unsafe-inline'", # Für Tailwind und Dynamic Styles
|
||||
"https://fonts.googleapis.com"
|
||||
],
|
||||
'img-src': [
|
||||
"'self'",
|
||||
"data:", # Für SVG Data URLs
|
||||
"blob:", # Für dynamisch generierte Bilder
|
||||
"https:" # HTTPS-Bilder erlauben
|
||||
],
|
||||
'font-src': [
|
||||
"'self'",
|
||||
"https://fonts.gstatic.com",
|
||||
"data:" # Für eingebettete Fonts
|
||||
],
|
||||
'connect-src': [
|
||||
"'self'",
|
||||
"ws:", # WebSocket für lokale Entwicklung
|
||||
"wss:", # Sichere WebSockets
|
||||
"http://localhost:*", # Lokale Entwicklung
|
||||
"http://127.0.0.1:*", # Lokale Entwicklung
|
||||
"https://localhost:*", # Lokale Entwicklung HTTPS
|
||||
"https://127.0.0.1:*" # Lokale Entwicklung HTTPS
|
||||
],
|
||||
'media-src': ["'self'"],
|
||||
'object-src': ["'none'"], # Flash und andere Plugins blockieren
|
||||
'base-uri': ["'self'"],
|
||||
'form-action': ["'self'"],
|
||||
'frame-ancestors': ["'none'"], # Clickjacking-Schutz
|
||||
'upgrade-insecure-requests': False, # Für lokale Entwicklung deaktiviert
|
||||
'block-all-mixed-content': False # Für lokale Entwicklung deaktiviert
|
||||
}
|
||||
|
||||
# Security Headers Konfiguration
|
||||
SECURITY_HEADERS = {
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'X-Frame-Options': 'DENY',
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'Referrer-Policy': 'strict-origin-when-cross-origin',
|
||||
'Permissions-Policy': (
|
||||
'geolocation=(), '
|
||||
'microphone=(), '
|
||||
'camera=(), '
|
||||
'payment=(), '
|
||||
'usb=(), '
|
||||
'accelerometer=(), '
|
||||
'gyroscope=(), '
|
||||
'magnetometer=()'
|
||||
),
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Resource-Policy': 'same-origin'
|
||||
}
|
||||
|
||||
class SecurityManager:
|
||||
"""
|
||||
Zentrale Sicherheitsverwaltung für MYP Platform
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.nonce_store: Dict[str, str] = {}
|
||||
|
||||
def generate_nonce(self) -> str:
|
||||
"""Generiert eine sichere Nonce für CSP"""
|
||||
nonce = secrets.token_urlsafe(32)
|
||||
|
||||
# Nonce in Session speichern für Validierung
|
||||
if 'security_nonces' not in session:
|
||||
session['security_nonces'] = []
|
||||
|
||||
session['security_nonces'].append(nonce)
|
||||
|
||||
# Maximal 10 Nonces pro Session
|
||||
if len(session['security_nonces']) > 10:
|
||||
session['security_nonces'] = session['security_nonces'][-10:]
|
||||
|
||||
return nonce
|
||||
|
||||
def validate_nonce(self, nonce: str) -> bool:
|
||||
"""Validiert eine Nonce"""
|
||||
if 'security_nonces' not in session:
|
||||
return False
|
||||
|
||||
return nonce in session['security_nonces']
|
||||
|
||||
def build_csp_header(self, nonce: Optional[str] = None, use_nonce: bool = False) -> str:
|
||||
"""
|
||||
Erstellt den Content-Security-Policy Header
|
||||
|
||||
Args:
|
||||
nonce: Optional CSP nonce für inline scripts
|
||||
use_nonce: Ob Nonces verwendet werden sollen (deaktiviert dann 'unsafe-inline')
|
||||
|
||||
Returns:
|
||||
CSP Header String
|
||||
"""
|
||||
csp_parts = []
|
||||
|
||||
for directive, values in CSP_POLICY.items():
|
||||
if directive in ['upgrade-insecure-requests', 'block-all-mixed-content']:
|
||||
if values:
|
||||
csp_parts.append(directive.replace('_', '-'))
|
||||
continue
|
||||
|
||||
if isinstance(values, list):
|
||||
directive_values = values.copy()
|
||||
|
||||
# Nonce für script-src hinzufügen nur wenn explizit gewünscht
|
||||
if directive == 'script-src' and nonce and use_nonce:
|
||||
directive_values.append(f"'nonce-{nonce}'")
|
||||
# 'unsafe-inline' entfernen wenn Nonce verwendet wird
|
||||
if "'unsafe-inline'" in directive_values:
|
||||
directive_values.remove("'unsafe-inline'")
|
||||
|
||||
csp_parts.append(f"{directive.replace('_', '-')} {' '.join(directive_values)}")
|
||||
|
||||
return "; ".join(csp_parts)
|
||||
|
||||
def get_client_fingerprint(self) -> str:
|
||||
"""
|
||||
Erstellt einen Client-Fingerprint für erweiterte Sicherheit
|
||||
"""
|
||||
components = [
|
||||
request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr),
|
||||
request.headers.get('User-Agent', ''),
|
||||
request.headers.get('Accept-Language', ''),
|
||||
request.headers.get('Accept-Encoding', '')
|
||||
]
|
||||
|
||||
fingerprint_string = '|'.join(components)
|
||||
return hashlib.sha256(fingerprint_string.encode()).hexdigest()[:32]
|
||||
|
||||
def check_suspicious_activity(self) -> bool:
|
||||
"""
|
||||
Prüft auf verdächtige Aktivitäten
|
||||
"""
|
||||
# SQL Injection Patterns
|
||||
sql_patterns = [
|
||||
'union select', 'drop table', 'insert into', 'delete from',
|
||||
'script>', '<iframe', 'javascript:', 'vbscript:',
|
||||
'onload=', 'onerror=', 'onclick='
|
||||
]
|
||||
|
||||
# Request-Daten prüfen
|
||||
request_data = str(request.args) + str(request.form) + str(request.json or {})
|
||||
request_data_lower = request_data.lower()
|
||||
|
||||
for pattern in sql_patterns:
|
||||
if pattern in request_data_lower:
|
||||
logger.warning(f"🚨 Verdächtige Aktivität erkannt: {pattern} von {request.remote_addr}")
|
||||
return True
|
||||
|
||||
# Übermäßig große Requests
|
||||
if len(request_data) > 50000: # 50KB Limit
|
||||
logger.warning(f"🚨 Übermäßig große Anfrage von {request.remote_addr}: {len(request_data)} bytes")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def log_security_event(self, event_type: str, details: Dict):
|
||||
"""
|
||||
Protokolliert Sicherheitsereignisse
|
||||
"""
|
||||
security_data = {
|
||||
'event_type': event_type,
|
||||
'ip_address': request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr),
|
||||
'user_agent': request.headers.get('User-Agent', ''),
|
||||
'timestamp': request.environ.get('REQUEST_START_TIME'),
|
||||
'fingerprint': self.get_client_fingerprint(),
|
||||
**details
|
||||
}
|
||||
|
||||
logger.warning(f"🔒 Sicherheitsereignis: {event_type} - {security_data}")
|
||||
|
||||
# Globale Security Manager Instanz
|
||||
security_manager = SecurityManager()
|
||||
|
||||
def apply_security_headers(response):
|
||||
"""
|
||||
Wendet Sicherheits-Headers auf Response an
|
||||
"""
|
||||
# Standard Security Headers
|
||||
for header, value in SECURITY_HEADERS.items():
|
||||
response.headers[header] = value
|
||||
|
||||
# Content Security Policy - für Entwicklung weniger restriktiv
|
||||
nonce = getattr(g, 'csp_nonce', None)
|
||||
# In der Entwicklung verwenden wir keine Nonces, um 'unsafe-inline' zu erhalten
|
||||
use_nonce = False # In Produktion auf True setzen für bessere Sicherheit
|
||||
csp_header = security_manager.build_csp_header(nonce, use_nonce)
|
||||
response.headers['Content-Security-Policy'] = csp_header
|
||||
|
||||
# HSTS nur für HTTPS und Produktion
|
||||
if request.is_secure and not request.environ.get('FLASK_ENV') == 'development':
|
||||
response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains; preload'
|
||||
|
||||
return response
|
||||
|
||||
def security_check(check_suspicious: bool = True):
|
||||
"""
|
||||
Decorator für Sicherheitsprüfungen
|
||||
|
||||
Args:
|
||||
check_suspicious: Ob auf verdächtige Aktivitäten geprüft werden soll
|
||||
"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Verdächtige Aktivitäten prüfen
|
||||
if check_suspicious and security_manager.check_suspicious_activity():
|
||||
security_manager.log_security_event('suspicious_request', {
|
||||
'endpoint': request.endpoint,
|
||||
'method': request.method,
|
||||
'args': dict(request.args),
|
||||
'form': dict(request.form)
|
||||
})
|
||||
|
||||
from flask import jsonify
|
||||
return jsonify({
|
||||
'error': 'Verdächtige Anfrage erkannt',
|
||||
'message': 'Ihre Anfrage wurde aus Sicherheitsgründen blockiert.'
|
||||
}), 400
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def require_secure_headers(f):
|
||||
"""
|
||||
Decorator der sicherstellt, dass Security Headers gesetzt werden
|
||||
"""
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# CSP Nonce generieren
|
||||
g.csp_nonce = security_manager.generate_nonce()
|
||||
|
||||
response = f(*args, **kwargs)
|
||||
|
||||
# Security Headers anwenden
|
||||
if hasattr(response, 'headers'):
|
||||
response = apply_security_headers(response)
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
def get_csp_nonce() -> str:
|
||||
"""
|
||||
Holt die aktuelle CSP Nonce für Templates
|
||||
"""
|
||||
return getattr(g, 'csp_nonce', '')
|
||||
|
||||
def validate_origin():
|
||||
"""
|
||||
Validiert die Origin des Requests
|
||||
"""
|
||||
origin = request.headers.get('Origin')
|
||||
referer = request.headers.get('Referer')
|
||||
host = request.headers.get('Host')
|
||||
|
||||
# Für API-Requests Origin prüfen
|
||||
if request.path.startswith('/api/') and origin:
|
||||
allowed_origins = [
|
||||
f"http://{host}",
|
||||
f"https://{host}",
|
||||
"http://localhost:5000",
|
||||
"http://127.0.0.1:5000"
|
||||
]
|
||||
|
||||
if origin not in allowed_origins:
|
||||
logger.warning(f"🚨 Ungültige Origin: {origin} für {request.path}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Template Helper für CSP Nonce
|
||||
def csp_nonce():
|
||||
"""Template Helper für CSP Nonce"""
|
||||
return get_csp_nonce()
|
||||
|
||||
# Security Middleware für Flask App
|
||||
def init_security(app):
|
||||
"""
|
||||
Initialisiert Sicherheitsfeatures für Flask App
|
||||
"""
|
||||
|
||||
@app.before_request
|
||||
def before_request_security():
|
||||
"""Security Checks vor jedem Request"""
|
||||
|
||||
# Origin validieren
|
||||
if not validate_origin():
|
||||
from flask import jsonify
|
||||
return jsonify({
|
||||
'error': 'Invalid origin',
|
||||
'message': 'Anfrage von ungültiger Quelle'
|
||||
}), 403
|
||||
|
||||
# CSP Nonce generieren
|
||||
g.csp_nonce = security_manager.generate_nonce()
|
||||
|
||||
@app.after_request
|
||||
def after_request_security(response):
|
||||
"""Security Headers nach jedem Request anwenden"""
|
||||
return apply_security_headers(response)
|
||||
|
||||
# Template Helper registrieren
|
||||
app.jinja_env.globals['csp_nonce'] = csp_nonce
|
||||
|
||||
logger.info("🔒 Security System initialisiert")
|
||||
|
||||
return app
|
||||
117
backend/app - Kopie/utils/setup_drucker_db.py
Normal file
117
backend/app - Kopie/utils/setup_drucker_db.py
Normal file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Drucker-Datenbank Setup für MYP Platform
|
||||
Trägt die hardkodierten Drucker in die Datenbank ein.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append('.')
|
||||
|
||||
from config.settings import PRINTERS
|
||||
from database.db_manager import DatabaseManager
|
||||
from models import Printer
|
||||
from datetime import datetime
|
||||
|
||||
def setup_drucker():
|
||||
"""Trägt die hardkodierten Drucker in die Datenbank ein."""
|
||||
print("=== MYP Platform - Drucker-Setup ===")
|
||||
print(f"Hardkodierte Drucker: {len(PRINTERS)}")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
# Alle existierenden Drucker löschen
|
||||
existing_printers = session.query(Printer).all()
|
||||
if existing_printers:
|
||||
print(f"Lösche {len(existing_printers)} existierende Drucker...")
|
||||
for printer in existing_printers:
|
||||
session.delete(printer)
|
||||
session.commit()
|
||||
print("✅ Alle alten Drucker gelöscht")
|
||||
else:
|
||||
print("Keine existierenden Drucker gefunden")
|
||||
|
||||
# Neue Drucker hinzufügen
|
||||
added_count = 0
|
||||
|
||||
for printer_name, config in PRINTERS.items():
|
||||
# Neuen Drucker erstellen
|
||||
new_printer = Printer(
|
||||
name=printer_name,
|
||||
model="P115", # Standard-Modell
|
||||
location="Werk 040 - Berlin - TBA", # Aktualisierter Standort
|
||||
ip_address=config["ip"],
|
||||
mac_address=f"98:25:4A:E1:{printer_name[-1]}0:0{printer_name[-1]}", # Dummy MAC
|
||||
plug_ip=config["ip"],
|
||||
plug_username="admin",
|
||||
plug_password="admin",
|
||||
status="available", # Verfügbar, da in Konfiguration
|
||||
active=True,
|
||||
created_at=datetime.now()
|
||||
)
|
||||
|
||||
session.add(new_printer)
|
||||
print(f"✅ {printer_name}: Hinzugefügt (IP: {config['ip']})")
|
||||
added_count += 1
|
||||
|
||||
# Änderungen speichern
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
print(f"\n✅ {added_count} neue Drucker erfolgreich hinzugefügt")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Setup der Drucker: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if 'session' in locals():
|
||||
session.rollback()
|
||||
session.close()
|
||||
return False
|
||||
|
||||
def list_drucker():
|
||||
"""Zeigt alle Drucker in der Datenbank an."""
|
||||
print("\n=== Drucker in der Datenbank ===")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
printers = session.query(Printer).all()
|
||||
|
||||
if not printers:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
return True
|
||||
|
||||
print(f"{'ID':<5} {'Name':<15} {'Status':<12} {'IP-Adresse':<15} {'Aktiv':<8}")
|
||||
print("-" * 60)
|
||||
|
||||
for printer in printers:
|
||||
active_str = "✅" if printer.active else "❌"
|
||||
print(f"{printer.id:<5} {printer.name:<15} {printer.status:<12} {printer.ip_address:<15} {active_str:<8}")
|
||||
|
||||
session.close()
|
||||
|
||||
print(f"\nGesamt: {len(printers)} Drucker")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Abrufen der Drucker: {e}")
|
||||
if 'session' in locals():
|
||||
session.close()
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("MYP Platform - Drucker-Datenbank Setup")
|
||||
print("=" * 40)
|
||||
|
||||
success = setup_drucker()
|
||||
if success:
|
||||
list_drucker()
|
||||
print("\n✅ Drucker-Setup erfolgreich abgeschlossen!")
|
||||
else:
|
||||
print("\n❌ Drucker-Setup fehlgeschlagen!")
|
||||
sys.exit(1)
|
||||
270
backend/app - Kopie/utils/ssl_manager.py
Normal file
270
backend/app - Kopie/utils/ssl_manager.py
Normal file
@@ -0,0 +1,270 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
SSL-Manager für die MYP-Plattform
|
||||
Generiert und verwaltet SSL-Zertifikate für Mercedes-Benz Yard Printing
|
||||
"""
|
||||
|
||||
import os
|
||||
import socket
|
||||
from datetime import datetime, timedelta
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import NameOID, ExtendedKeyUsageOID
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
import ipaddress
|
||||
|
||||
class SSLManager:
|
||||
"""SSL-Zertifikat-Manager für die MYP-Plattform"""
|
||||
|
||||
def __init__(self, cert_path: str = None, key_path: str = None):
|
||||
"""
|
||||
Initialisiert den SSL-Manager
|
||||
|
||||
Args:
|
||||
cert_path: Pfad zum SSL-Zertifikat
|
||||
key_path: Pfad zum SSL-Schlüssel
|
||||
"""
|
||||
from config.settings import SSL_CERT_PATH, SSL_KEY_PATH
|
||||
|
||||
self.cert_path = cert_path or SSL_CERT_PATH
|
||||
self.key_path = key_path or SSL_KEY_PATH
|
||||
|
||||
# Stelle sicher, dass das Verzeichnis existiert
|
||||
cert_dir = os.path.dirname(self.cert_path)
|
||||
if not os.path.exists(cert_dir):
|
||||
os.makedirs(cert_dir, exist_ok=True)
|
||||
|
||||
def generate_mercedes_certificate(self,
|
||||
hostname: str = "localhost",
|
||||
validity_days: int = 365) -> bool:
|
||||
"""
|
||||
Generiert ein Mercedes-Benz SSL-Zertifikat
|
||||
|
||||
Args:
|
||||
hostname: Hostname für das Zertifikat
|
||||
validity_days: Gültigkeitsdauer in Tagen
|
||||
|
||||
Returns:
|
||||
bool: True wenn erfolgreich, False bei Fehler
|
||||
"""
|
||||
try:
|
||||
print(f"Generiere Mercedes-Benz SSL-Zertifikat für {hostname}...")
|
||||
|
||||
# Privaten Schlüssel generieren (4096-bit für höhere Sicherheit)
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=4096,
|
||||
)
|
||||
|
||||
# Subject und Issuer für Mercedes-Benz
|
||||
subject = issuer = x509.Name([
|
||||
x509.NameAttribute(NameOID.COUNTRY_NAME, "DE"),
|
||||
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "Baden-Württemberg"),
|
||||
x509.NameAttribute(NameOID.LOCALITY_NAME, "Stuttgart"),
|
||||
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Mercedes-Benz Group AG"),
|
||||
x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, "IT Infrastructure"),
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, hostname),
|
||||
x509.NameAttribute(NameOID.EMAIL_ADDRESS, "admin@mercedes-benz.com"),
|
||||
])
|
||||
|
||||
# Zertifikat erstellen
|
||||
cert = x509.CertificateBuilder().subject_name(
|
||||
subject
|
||||
).issuer_name(
|
||||
issuer
|
||||
).public_key(
|
||||
private_key.public_key()
|
||||
).serial_number(
|
||||
x509.random_serial_number()
|
||||
).not_valid_before(
|
||||
datetime.utcnow()
|
||||
).not_valid_after(
|
||||
datetime.utcnow() + timedelta(days=validity_days)
|
||||
)
|
||||
|
||||
# Subject Alternative Names hinzufügen
|
||||
san_list = [
|
||||
x509.DNSName(hostname),
|
||||
x509.DNSName("localhost"),
|
||||
x509.DNSName("*.localhost"),
|
||||
x509.DNSName("raspberrypi"),
|
||||
x509.DNSName("*.raspberrypi"),
|
||||
x509.DNSName("myp.mercedes-benz.local"),
|
||||
x509.DNSName("*.myp.mercedes-benz.local"),
|
||||
x509.IPAddress(ipaddress.IPv4Address("127.0.0.1")),
|
||||
x509.IPAddress(ipaddress.IPv4Address("0.0.0.0")),
|
||||
]
|
||||
|
||||
# Lokale IP-Adresse hinzufügen
|
||||
try:
|
||||
local_ip = socket.gethostbyname(socket.gethostname())
|
||||
if local_ip and local_ip != "127.0.0.1":
|
||||
san_list.append(x509.IPAddress(ipaddress.IPv4Address(local_ip)))
|
||||
except:
|
||||
pass
|
||||
|
||||
cert = cert.add_extension(
|
||||
x509.SubjectAlternativeName(san_list),
|
||||
critical=False,
|
||||
)
|
||||
|
||||
# Key Usage Extension
|
||||
cert = cert.add_extension(
|
||||
x509.KeyUsage(
|
||||
digital_signature=True,
|
||||
key_encipherment=True,
|
||||
key_agreement=False,
|
||||
key_cert_sign=False,
|
||||
crl_sign=False,
|
||||
content_commitment=False,
|
||||
data_encipherment=False,
|
||||
encipher_only=False,
|
||||
decipher_only=False,
|
||||
),
|
||||
critical=True,
|
||||
)
|
||||
|
||||
# Extended Key Usage
|
||||
cert = cert.add_extension(
|
||||
x509.ExtendedKeyUsage([
|
||||
ExtendedKeyUsageOID.SERVER_AUTH,
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH,
|
||||
]),
|
||||
critical=True,
|
||||
)
|
||||
|
||||
# Basic Constraints
|
||||
cert = cert.add_extension(
|
||||
x509.BasicConstraints(ca=False, path_length=None),
|
||||
critical=True,
|
||||
)
|
||||
|
||||
# Zertifikat signieren
|
||||
cert = cert.sign(private_key, hashes.SHA256())
|
||||
|
||||
# Zertifikat in Datei schreiben
|
||||
with open(self.cert_path, "wb") as f:
|
||||
f.write(cert.public_bytes(serialization.Encoding.PEM))
|
||||
|
||||
# Privaten Schlüssel in Datei schreiben
|
||||
with open(self.key_path, "wb") as f:
|
||||
f.write(private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
))
|
||||
|
||||
print(f"✓ SSL-Zertifikat erfolgreich erstellt: {self.cert_path}")
|
||||
print(f"✓ SSL-Schlüssel erfolgreich erstellt: {self.key_path}")
|
||||
|
||||
# Zertifikatsinformationen anzeigen
|
||||
self._print_certificate_info(cert)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Fehler beim Erstellen des SSL-Zertifikats: {e}")
|
||||
return False
|
||||
|
||||
def _print_certificate_info(self, cert):
|
||||
"""Zeigt Informationen über das erstellte Zertifikat an"""
|
||||
try:
|
||||
print("\n=== Zertifikatsinformationen ===")
|
||||
print(f"Subject: {cert.subject.rfc4514_string()}")
|
||||
print(f"Gültig von: {cert.not_valid_before}")
|
||||
print(f"Gültig bis: {cert.not_valid_after}")
|
||||
print(f"Seriennummer: {cert.serial_number}")
|
||||
|
||||
# SAN anzeigen
|
||||
try:
|
||||
san_ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
|
||||
print("Subject Alternative Names:")
|
||||
for name in san_ext.value:
|
||||
print(f" - {name}")
|
||||
except:
|
||||
pass
|
||||
|
||||
print("================================\n")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Anzeigen der Zertifikatsinformationen: {e}")
|
||||
|
||||
def certificate_exists(self) -> bool:
|
||||
"""
|
||||
Prüft, ob SSL-Zertifikat und Schlüssel existieren
|
||||
|
||||
Returns:
|
||||
bool: True wenn beide Dateien existieren
|
||||
"""
|
||||
return os.path.exists(self.cert_path) and os.path.exists(self.key_path)
|
||||
|
||||
def get_certificate_info(self) -> dict:
|
||||
"""
|
||||
Gibt Informationen über das vorhandene Zertifikat zurück
|
||||
|
||||
Returns:
|
||||
dict: Zertifikatsinformationen oder None bei Fehler
|
||||
"""
|
||||
if not self.certificate_exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(self.cert_path, "rb") as f:
|
||||
cert_data = f.read()
|
||||
|
||||
cert = x509.load_pem_x509_certificate(cert_data)
|
||||
|
||||
return {
|
||||
"subject": cert.subject.rfc4514_string(),
|
||||
"issuer": cert.issuer.rfc4514_string(),
|
||||
"not_valid_before": cert.not_valid_before,
|
||||
"not_valid_after": cert.not_valid_after,
|
||||
"serial_number": cert.serial_number,
|
||||
"is_expired": datetime.utcnow() > cert.not_valid_after,
|
||||
"days_until_expiry": (cert.not_valid_after - datetime.utcnow()).days
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Lesen der Zertifikatsinformationen: {e}")
|
||||
return None
|
||||
|
||||
# Globale SSL-Manager-Instanz
|
||||
ssl_manager = SSLManager()
|
||||
|
||||
def ensure_ssl_certificates() -> bool:
|
||||
"""
|
||||
Stellt sicher, dass SSL-Zertifikate vorhanden sind
|
||||
|
||||
Returns:
|
||||
bool: True wenn Zertifikate verfügbar sind
|
||||
"""
|
||||
if ssl_manager.certificate_exists():
|
||||
cert_info = ssl_manager.get_certificate_info()
|
||||
if cert_info and not cert_info["is_expired"]:
|
||||
print(f"✓ Gültiges SSL-Zertifikat gefunden (läuft ab in {cert_info['days_until_expiry']} Tagen)")
|
||||
return True
|
||||
else:
|
||||
print("⚠ SSL-Zertifikat ist abgelaufen, erstelle neues...")
|
||||
|
||||
print("SSL-Zertifikate nicht gefunden, erstelle neue...")
|
||||
return ssl_manager.generate_mercedes_certificate()
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Direkte Ausführung für Tests
|
||||
print("Mercedes-Benz SSL-Zertifikat-Generator")
|
||||
print("=====================================")
|
||||
|
||||
if ssl_manager.certificate_exists():
|
||||
print("Vorhandene Zertifikate gefunden:")
|
||||
info = ssl_manager.get_certificate_info()
|
||||
if info:
|
||||
print(f" Subject: {info['subject']}")
|
||||
print(f" Gültig bis: {info['not_valid_after']}")
|
||||
print(f" Status: {'Abgelaufen' if info['is_expired'] else 'Gültig'}")
|
||||
|
||||
success = ssl_manager.generate_mercedes_certificate()
|
||||
if success:
|
||||
print("✓ SSL-Zertifikat erfolgreich generiert!")
|
||||
else:
|
||||
print("✗ Fehler beim Generieren des SSL-Zertifikats!")
|
||||
507
backend/app - Kopie/utils/template_helpers.py
Normal file
507
backend/app - Kopie/utils/template_helpers.py
Normal file
@@ -0,0 +1,507 @@
|
||||
"""
|
||||
Template Helpers für MYP Platform
|
||||
Jinja2 Helper-Funktionen für UI-Komponenten
|
||||
"""
|
||||
|
||||
from flask import current_app, url_for, request
|
||||
from markupsafe import Markup
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, List
|
||||
import calendar
|
||||
import random
|
||||
|
||||
|
||||
class UIHelpers:
|
||||
"""UI-Helper-Klasse für Template-Funktionen"""
|
||||
|
||||
@staticmethod
|
||||
def component_button(text: str, type: str = "primary", size: str = "md",
|
||||
classes: str = "", icon: str = "", onclick: str = "",
|
||||
disabled: bool = False, **attrs) -> Markup:
|
||||
"""
|
||||
Erstellt einen Button mit Tailwind-Klassen
|
||||
|
||||
Args:
|
||||
text: Button-Text
|
||||
type: Button-Typ (primary, secondary, danger, success)
|
||||
size: Button-Größe (sm, md, lg)
|
||||
classes: Zusätzliche CSS-Klassen
|
||||
icon: SVG-Icon-Code
|
||||
onclick: JavaScript-Code für onclick
|
||||
disabled: Button deaktiviert
|
||||
**attrs: Zusätzliche HTML-Attribute
|
||||
"""
|
||||
base_classes = ["btn"]
|
||||
|
||||
# Typ-spezifische Klassen
|
||||
type_classes = {
|
||||
"primary": "btn-primary",
|
||||
"secondary": "btn-secondary",
|
||||
"danger": "btn-danger",
|
||||
"success": "btn-success"
|
||||
}
|
||||
base_classes.append(type_classes.get(type, "btn-primary"))
|
||||
|
||||
# Größen-spezifische Klassen
|
||||
size_classes = {
|
||||
"sm": "btn-sm",
|
||||
"md": "",
|
||||
"lg": "btn-lg"
|
||||
}
|
||||
if size_classes.get(size):
|
||||
base_classes.append(size_classes[size])
|
||||
|
||||
if disabled:
|
||||
base_classes.append("opacity-50 cursor-not-allowed")
|
||||
|
||||
# Zusätzliche Klassen hinzufügen
|
||||
if classes:
|
||||
base_classes.append(classes)
|
||||
|
||||
# HTML-Attribute aufbauen
|
||||
attrs_str = ""
|
||||
for key, value in attrs.items():
|
||||
attrs_str += f' {key.replace("_", "-")}="{value}"'
|
||||
|
||||
if onclick:
|
||||
attrs_str += f' onclick="{onclick}"'
|
||||
|
||||
if disabled:
|
||||
attrs_str += ' disabled'
|
||||
|
||||
# Icon und Text kombinieren
|
||||
content = ""
|
||||
if icon:
|
||||
content += f'<span class="inline-block mr-2">{icon}</span>'
|
||||
content += text
|
||||
|
||||
html = f'''<button class="{" ".join(base_classes)}"{attrs_str}>
|
||||
{content}
|
||||
</button>'''
|
||||
|
||||
return Markup(html)
|
||||
|
||||
@staticmethod
|
||||
def component_badge(text: str, type: str = "blue", classes: str = "") -> Markup:
|
||||
"""
|
||||
Erstellt ein Badge/Tag-Element
|
||||
|
||||
Args:
|
||||
text: Badge-Text
|
||||
type: Badge-Typ (blue, green, red, yellow, purple)
|
||||
classes: Zusätzliche CSS-Klassen
|
||||
"""
|
||||
base_classes = ["badge", f"badge-{type}"]
|
||||
|
||||
if classes:
|
||||
base_classes.append(classes)
|
||||
|
||||
html = f'<span class="{" ".join(base_classes)}">{text}</span>'
|
||||
return Markup(html)
|
||||
|
||||
@staticmethod
|
||||
def component_status_badge(status: str, type: str = "job") -> Markup:
|
||||
"""
|
||||
Erstellt ein Status-Badge für Jobs oder Drucker
|
||||
|
||||
Args:
|
||||
status: Status-Wert
|
||||
type: Typ (job, printer)
|
||||
"""
|
||||
if type == "job":
|
||||
class_name = f"job-status job-{status}"
|
||||
else:
|
||||
class_name = f"printer-status printer-{status}"
|
||||
|
||||
# Status-Text übersetzen
|
||||
translations = {
|
||||
"job": {
|
||||
"queued": "In Warteschlange",
|
||||
"printing": "Wird gedruckt",
|
||||
"completed": "Abgeschlossen",
|
||||
"failed": "Fehlgeschlagen",
|
||||
"cancelled": "Abgebrochen",
|
||||
"paused": "Pausiert"
|
||||
},
|
||||
"printer": {
|
||||
"ready": "Bereit",
|
||||
"busy": "Beschäftigt",
|
||||
"error": "Fehler",
|
||||
"offline": "Offline",
|
||||
"maintenance": "Wartung"
|
||||
}
|
||||
}
|
||||
|
||||
display_text = translations.get(type, {}).get(status, status)
|
||||
|
||||
html = f'<span class="{class_name}">{display_text}</span>'
|
||||
return Markup(html)
|
||||
|
||||
@staticmethod
|
||||
def component_card(title: str = "", content: str = "", footer: str = "",
|
||||
classes: str = "", hover: bool = False) -> Markup:
|
||||
"""
|
||||
Erstellt eine Karte
|
||||
|
||||
Args:
|
||||
title: Karten-Titel
|
||||
content: Karten-Inhalt
|
||||
footer: Karten-Footer
|
||||
classes: Zusätzliche CSS-Klassen
|
||||
hover: Hover-Effekt aktivieren
|
||||
"""
|
||||
base_classes = ["card"]
|
||||
|
||||
if hover:
|
||||
base_classes.append("card-hover")
|
||||
|
||||
if classes:
|
||||
base_classes.append(classes)
|
||||
|
||||
html_parts = [f'<div class="{" ".join(base_classes)}">']
|
||||
|
||||
if title:
|
||||
html_parts.append(f'<h3 class="text-lg font-semibold mb-4 text-slate-900 dark:text-white">{title}</h3>')
|
||||
|
||||
if content:
|
||||
html_parts.append(f'<div class="text-slate-600 dark:text-slate-300">{content}</div>')
|
||||
|
||||
if footer:
|
||||
html_parts.append(f'<div class="mt-4 pt-4 border-t border-light-border dark:border-dark-border">{footer}</div>')
|
||||
|
||||
html_parts.append('</div>')
|
||||
|
||||
return Markup("".join(html_parts))
|
||||
|
||||
@staticmethod
|
||||
def component_alert(message: str, type: str = "info", dismissible: bool = False) -> Markup:
|
||||
"""
|
||||
Erstellt eine Alert-Benachrichtigung
|
||||
|
||||
Args:
|
||||
message: Alert-Nachricht
|
||||
type: Alert-Typ (info, success, warning, error)
|
||||
dismissible: Schließbar machen
|
||||
"""
|
||||
base_classes = ["alert", f"alert-{type}"]
|
||||
|
||||
html_parts = [f'<div class="{" ".join(base_classes)}">']
|
||||
|
||||
if dismissible:
|
||||
html_parts.append('''
|
||||
<div class="flex justify-between">
|
||||
<div>
|
||||
''')
|
||||
|
||||
html_parts.append(f'<p>{message}</p>')
|
||||
|
||||
if dismissible:
|
||||
html_parts.append('''
|
||||
</div>
|
||||
<button onclick="this.parentElement.parentElement.remove()"
|
||||
class="text-current opacity-70 hover:opacity-100">
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z" clip-rule="evenodd"></path>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
''')
|
||||
|
||||
html_parts.append('</div>')
|
||||
|
||||
return Markup("".join(html_parts))
|
||||
|
||||
@staticmethod
|
||||
def component_modal(modal_id: str, title: str, content: str,
|
||||
footer: str = "", size: str = "md") -> Markup:
|
||||
"""
|
||||
Erstellt ein Modal-Dialog
|
||||
|
||||
Args:
|
||||
modal_id: Eindeutige Modal-ID
|
||||
title: Modal-Titel
|
||||
content: Modal-Inhalt
|
||||
footer: Modal-Footer
|
||||
size: Modal-Größe (sm, md, lg, xl)
|
||||
"""
|
||||
size_classes = {
|
||||
"sm": "max-w-md",
|
||||
"md": "max-w-lg",
|
||||
"lg": "max-w-2xl",
|
||||
"xl": "max-w-4xl"
|
||||
}
|
||||
|
||||
max_width = size_classes.get(size, "max-w-lg")
|
||||
|
||||
html = f'''
|
||||
<div id="{modal_id}" class="fixed inset-0 z-50 hidden">
|
||||
<div class="flex items-center justify-center min-h-screen px-4">
|
||||
<div class="modal-content bg-white dark:bg-slate-800 rounded-lg shadow-xl transform scale-95 opacity-0 transition-all duration-150 w-full {max_width}">
|
||||
<div class="px-6 py-4 border-b border-slate-200 dark:border-slate-700">
|
||||
<div class="flex items-center justify-between">
|
||||
<h3 class="text-lg font-semibold text-slate-900 dark:text-white">{title}</h3>
|
||||
<button data-modal-close="{modal_id}" class="text-slate-400 hover:text-slate-600 dark:hover:text-slate-300">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="px-6 py-4">
|
||||
{content}
|
||||
</div>
|
||||
{f'<div class="px-6 py-4 border-t border-slate-200 dark:border-slate-700">{footer}</div>' if footer else ''}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
'''
|
||||
|
||||
return Markup(html)
|
||||
|
||||
@staticmethod
|
||||
def component_table(headers: List[str], rows: List[List[str]],
|
||||
classes: str = "", striped: bool = True) -> Markup:
|
||||
"""
|
||||
Erstellt eine styled Tabelle
|
||||
|
||||
Args:
|
||||
headers: Tabellen-Kopfzeilen
|
||||
rows: Tabellen-Zeilen
|
||||
classes: Zusätzliche CSS-Klassen
|
||||
striped: Zebra-Streifen aktivieren
|
||||
"""
|
||||
html_parts = ['<div class="table-container">']
|
||||
|
||||
table_classes = ["table-styled"]
|
||||
if classes:
|
||||
table_classes.append(classes)
|
||||
|
||||
html_parts.append(f'<table class="{" ".join(table_classes)}">')
|
||||
|
||||
# Kopfzeilen
|
||||
html_parts.append('<thead><tr>')
|
||||
for header in headers:
|
||||
html_parts.append(f'<th>{header}</th>')
|
||||
html_parts.append('</tr></thead>')
|
||||
|
||||
# Zeilen
|
||||
html_parts.append('<tbody>')
|
||||
for i, row in enumerate(rows):
|
||||
row_classes = ""
|
||||
if striped and i % 2 == 1:
|
||||
row_classes = 'class="bg-slate-50 dark:bg-slate-800/50"'
|
||||
html_parts.append(f'<tr {row_classes}>')
|
||||
for cell in row:
|
||||
html_parts.append(f'<td>{cell}</td>')
|
||||
html_parts.append('</tr>')
|
||||
html_parts.append('</tbody>')
|
||||
|
||||
html_parts.append('</table></div>')
|
||||
|
||||
return Markup("".join(html_parts))
|
||||
|
||||
@staticmethod
|
||||
def format_datetime_german(dt: datetime, format_str: str = "%d.%m.%Y %H:%M") -> str:
|
||||
"""
|
||||
Formatiert Datetime für deutsche Anzeige
|
||||
|
||||
Args:
|
||||
dt: Datetime-Objekt
|
||||
format_str: Format-String
|
||||
"""
|
||||
if not dt:
|
||||
return ""
|
||||
return dt.strftime(format_str)
|
||||
|
||||
@staticmethod
|
||||
def format_duration(minutes: int) -> str:
|
||||
"""
|
||||
Formatiert Dauer in Minuten zu lesbarem Format
|
||||
|
||||
Args:
|
||||
minutes: Dauer in Minuten
|
||||
"""
|
||||
if not minutes:
|
||||
return "0 Min"
|
||||
|
||||
if minutes < 60:
|
||||
return f"{minutes} Min"
|
||||
|
||||
hours = minutes // 60
|
||||
remaining_minutes = minutes % 60
|
||||
|
||||
if remaining_minutes == 0:
|
||||
return f"{hours} Std"
|
||||
|
||||
return f"{hours} Std {remaining_minutes} Min"
|
||||
|
||||
@staticmethod
|
||||
def json_encode(data: Any) -> str:
|
||||
"""
|
||||
Enkodiert Python-Daten als JSON für JavaScript
|
||||
|
||||
Args:
|
||||
data: Zu enkodierendes Objekt
|
||||
"""
|
||||
return json.dumps(data, default=str, ensure_ascii=False)
|
||||
|
||||
|
||||
def register_template_helpers(app):
|
||||
"""
|
||||
Registriert alle Template-Helper bei der Flask-App
|
||||
|
||||
Args:
|
||||
app: Flask-App-Instanz
|
||||
"""
|
||||
# Funktionen registrieren
|
||||
app.jinja_env.globals['ui_button'] = UIHelpers.component_button
|
||||
app.jinja_env.globals['ui_badge'] = UIHelpers.component_badge
|
||||
app.jinja_env.globals['ui_status_badge'] = UIHelpers.component_status_badge
|
||||
app.jinja_env.globals['ui_card'] = UIHelpers.component_card
|
||||
app.jinja_env.globals['ui_alert'] = UIHelpers.component_alert
|
||||
app.jinja_env.globals['ui_modal'] = UIHelpers.component_modal
|
||||
app.jinja_env.globals['ui_table'] = UIHelpers.component_table
|
||||
|
||||
# Filter registrieren
|
||||
app.jinja_env.filters['german_datetime'] = UIHelpers.format_datetime_german
|
||||
app.jinja_env.filters['duration'] = UIHelpers.format_duration
|
||||
app.jinja_env.filters['json'] = UIHelpers.json_encode
|
||||
|
||||
# Zusätzliche globale Variablen
|
||||
app.jinja_env.globals['current_year'] = datetime.now().year
|
||||
|
||||
# Icons als globale Variablen
|
||||
icons = {
|
||||
'check': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"></path></svg>',
|
||||
'x': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path></svg>',
|
||||
'plus': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 6v6m0 0v6m0-6h6m-6 0H6"></path></svg>',
|
||||
'edit': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z"></path></svg>',
|
||||
'trash': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"></path></svg>',
|
||||
'printer': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M17 17h2a2 2 0 002-2v-4a2 2 0 00-2-2H5a2 2 0 00-2 2v4a2 2 0 002 2h2m2 4h6a2 2 0 002-2v-4a2 2 0 00-2-2H9a2 2 0 00-2 2v4a2 2 0 002 2zm8-12V5a2 2 0 00-2-2H9a2 2 0 00-2 2v4h10z"></path></svg>',
|
||||
'dashboard': '<svg class="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2H5a2 2 0 00-2-2z"></path></svg>',
|
||||
}
|
||||
|
||||
app.jinja_env.globals['icons'] = icons
|
||||
|
||||
@app.context_processor
|
||||
def utility_processor():
|
||||
"""Fügt nützliche Hilfsfunktionen zu Jinja hinzu."""
|
||||
return dict(
|
||||
active_page=active_page,
|
||||
format_datetime=format_datetime,
|
||||
format_date=format_date,
|
||||
format_time=format_time,
|
||||
random_avatar_color=random_avatar_color,
|
||||
get_initials=get_initials,
|
||||
render_progress_bar=render_progress_bar
|
||||
)
|
||||
|
||||
def active_page(path):
|
||||
"""
|
||||
Überprüft, ob der aktuelle Pfad mit dem gegebenen Pfad übereinstimmt.
|
||||
"""
|
||||
if request.path == path:
|
||||
return 'active'
|
||||
return ''
|
||||
|
||||
def format_datetime(value, format='%d.%m.%Y %H:%M'):
|
||||
"""
|
||||
Formatiert ein Datum mit Uhrzeit nach deutschem Format.
|
||||
"""
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
value = datetime.fromisoformat(value)
|
||||
except ValueError:
|
||||
return value
|
||||
return value.strftime(format)
|
||||
|
||||
def format_date(value, format='%d.%m.%Y'):
|
||||
"""
|
||||
Formatiert ein Datum nach deutschem Format.
|
||||
"""
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
value = datetime.fromisoformat(value)
|
||||
except ValueError:
|
||||
return value
|
||||
return value.strftime(format)
|
||||
|
||||
def format_time(value, format='%H:%M'):
|
||||
"""
|
||||
Formatiert eine Uhrzeit nach deutschem Format.
|
||||
"""
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
value = datetime.fromisoformat(value)
|
||||
except ValueError:
|
||||
return value
|
||||
return value.strftime(format)
|
||||
|
||||
def random_avatar_color():
|
||||
"""
|
||||
Gibt eine zufällige Hintergrundfarbe für Avatare zurück.
|
||||
"""
|
||||
colors = [
|
||||
'bg-blue-100 text-blue-800',
|
||||
'bg-green-100 text-green-800',
|
||||
'bg-yellow-100 text-yellow-800',
|
||||
'bg-red-100 text-red-800',
|
||||
'bg-indigo-100 text-indigo-800',
|
||||
'bg-purple-100 text-purple-800',
|
||||
'bg-pink-100 text-pink-800',
|
||||
'bg-gray-100 text-gray-800',
|
||||
]
|
||||
return random.choice(colors)
|
||||
|
||||
def get_initials(name, max_length=2):
|
||||
"""
|
||||
Extrahiert die Initialen eines Namens.
|
||||
"""
|
||||
if not name:
|
||||
return "?"
|
||||
|
||||
parts = name.split()
|
||||
if len(parts) == 1:
|
||||
return name[0:max_length].upper()
|
||||
|
||||
initials = ""
|
||||
for part in parts:
|
||||
if part and len(initials) < max_length:
|
||||
initials += part[0].upper()
|
||||
|
||||
return initials
|
||||
|
||||
def render_progress_bar(value, color='blue'):
|
||||
"""
|
||||
Rendert einen Fortschrittsbalken ohne Inline-Styles.
|
||||
|
||||
Args:
|
||||
value (int): Der Prozentwert für den Fortschrittsbalken (0-100)
|
||||
color (str): Die Farbe des Balkens (blue, green, purple, red)
|
||||
|
||||
Returns:
|
||||
str: HTML-Markup für den Fortschrittsbalken
|
||||
"""
|
||||
css_class = f"progress-bar-fill-{color}"
|
||||
|
||||
# Sicherstellen, dass der Wert im gültigen Bereich liegt
|
||||
if value < 0:
|
||||
value = 0
|
||||
elif value > 100:
|
||||
value = 100
|
||||
|
||||
# Erstellen des DOM-Struktur für den Fortschrittsbalken
|
||||
html = f"""
|
||||
<div class="progress-bar">
|
||||
<div class="progress-bar-fill {css_class}" data-width="{value}"></div>
|
||||
</div>
|
||||
"""
|
||||
|
||||
return Markup(html)
|
||||
243
backend/app - Kopie/utils/test_button_functionality.py
Normal file
243
backend/app - Kopie/utils/test_button_functionality.py
Normal file
@@ -0,0 +1,243 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Test-Skript für Button-Funktionalitäten
|
||||
Testet alle Buttons aus dem Selenium-Test auf echte Funktionalität
|
||||
"""
|
||||
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
|
||||
class ButtonFunctionalityTester:
|
||||
def __init__(self, base_url="http://127.0.0.1:5000"):
|
||||
self.base_url = base_url
|
||||
self.session = requests.Session()
|
||||
self.driver = None
|
||||
|
||||
def setup_driver(self):
|
||||
"""Selenium WebDriver einrichten"""
|
||||
try:
|
||||
self.driver = webdriver.Chrome()
|
||||
self.driver.set_window_size(1696, 1066)
|
||||
print("✅ WebDriver erfolgreich initialisiert")
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Initialisieren des WebDrivers: {e}")
|
||||
|
||||
def login(self, username="admin", password="admin"):
|
||||
"""Anmeldung durchführen"""
|
||||
try:
|
||||
self.driver.get(f"{self.base_url}/auth/login")
|
||||
|
||||
# Warten bis Login-Formular geladen ist
|
||||
username_field = WebDriverWait(self.driver, 10).until(
|
||||
EC.presence_of_element_located((By.NAME, "username"))
|
||||
)
|
||||
|
||||
username_field.send_keys(username)
|
||||
self.driver.find_element(By.NAME, "password").send_keys(password)
|
||||
self.driver.find_element(By.XPATH, "//button[@type='submit']").click()
|
||||
|
||||
# Warten bis Dashboard geladen ist
|
||||
WebDriverWait(self.driver, 10).until(
|
||||
EC.url_contains("/dashboard")
|
||||
)
|
||||
|
||||
print("✅ Erfolgreich angemeldet")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler bei der Anmeldung: {e}")
|
||||
return False
|
||||
|
||||
def test_button_functionality(self, button_selector, button_name, expected_action=""):
|
||||
"""Teste einen einzelnen Button auf Funktionalität"""
|
||||
try:
|
||||
print(f"\n🔍 Teste Button: {button_name} ({button_selector})")
|
||||
|
||||
# Button finden
|
||||
button = WebDriverWait(self.driver, 5).until(
|
||||
EC.element_to_be_clickable((By.CSS_SELECTOR, button_selector))
|
||||
)
|
||||
|
||||
# Ursprünglichen Zustand erfassen
|
||||
original_url = self.driver.current_url
|
||||
original_text = button.text if button.text else "Kein Text"
|
||||
|
||||
print(f" 📍 Button gefunden: '{original_text}'")
|
||||
|
||||
# Button klicken
|
||||
button.click()
|
||||
print(f" 👆 Button geklickt")
|
||||
|
||||
# Kurz warten für Reaktion
|
||||
time.sleep(1)
|
||||
|
||||
# Reaktion prüfen
|
||||
reactions = []
|
||||
|
||||
# URL-Änderung prüfen
|
||||
if self.driver.current_url != original_url:
|
||||
reactions.append(f"URL-Änderung: {self.driver.current_url}")
|
||||
|
||||
# Modal-Fenster prüfen
|
||||
try:
|
||||
modal = self.driver.find_element(By.CSS_SELECTOR, ".fixed.inset-0")
|
||||
if modal.is_displayed():
|
||||
reactions.append("Modal-Fenster geöffnet")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Loading-Spinner prüfen
|
||||
try:
|
||||
spinner = self.driver.find_element(By.CSS_SELECTOR, ".animate-spin")
|
||||
if spinner.is_displayed():
|
||||
reactions.append("Loading-Animation aktiv")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Toast-Benachrichtigung prüfen
|
||||
try:
|
||||
toast = self.driver.find_element(By.CSS_SELECTOR, ".fixed.top-4.right-4")
|
||||
if toast.is_displayed():
|
||||
reactions.append(f"Toast-Nachricht: {toast.text}")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Button-Text-Änderung prüfen
|
||||
new_text = button.text if button.text else "Kein Text"
|
||||
if new_text != original_text:
|
||||
reactions.append(f"Text-Änderung: '{original_text}' → '{new_text}'")
|
||||
|
||||
# Ergebnis ausgeben
|
||||
if reactions:
|
||||
print(f" ✅ Reaktionen gefunden:")
|
||||
for reaction in reactions:
|
||||
print(f" - {reaction}")
|
||||
return True
|
||||
else:
|
||||
print(f" ⚠️ Keine sichtbare Reaktion erkannt")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Fehler beim Testen: {e}")
|
||||
return False
|
||||
|
||||
def test_all_buttons(self):
|
||||
"""Teste alle Buttons aus dem Selenium-Test"""
|
||||
if not self.setup_driver():
|
||||
return
|
||||
|
||||
if not self.login():
|
||||
return
|
||||
|
||||
# Button-Test-Plan basierend auf Selenium-Test
|
||||
button_tests = [
|
||||
# Dashboard-Seite (Startseite)
|
||||
{
|
||||
"page": f"{self.base_url}/",
|
||||
"buttons": [
|
||||
(".mb-8 > .btn-primary", "Haupt-CTA Button"),
|
||||
(".btn-primary > span", "CTA Button Span")
|
||||
]
|
||||
},
|
||||
|
||||
# Dashboard-Seite
|
||||
{
|
||||
"page": f"{self.base_url}/dashboard",
|
||||
"buttons": [
|
||||
("#refreshDashboard > span", "Dashboard Aktualisieren")
|
||||
]
|
||||
},
|
||||
|
||||
# Drucker-Seite
|
||||
{
|
||||
"page": f"{self.base_url}/printers",
|
||||
"buttons": [
|
||||
("#refresh-button > span", "Drucker Aktualisieren"),
|
||||
("#maintenance-toggle > span", "Wartungsmodus Toggle")
|
||||
]
|
||||
},
|
||||
|
||||
# Jobs-Seite
|
||||
{
|
||||
"page": f"{self.base_url}/jobs",
|
||||
"buttons": [
|
||||
("#batch-toggle > span", "Mehrfachauswahl Toggle"),
|
||||
("#refresh-button > span", "Jobs Aktualisieren")
|
||||
]
|
||||
},
|
||||
|
||||
# Admin-Seite
|
||||
{
|
||||
"page": f"{self.base_url}/admin",
|
||||
"buttons": [
|
||||
("#analytics-btn", "Analytics Button"),
|
||||
("#maintenance-btn", "Wartung Button"),
|
||||
("#system-status-btn", "System Status Button"),
|
||||
("#add-user-btn", "Benutzer hinzufügen")
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
results = {"total": 0, "working": 0, "broken": 0}
|
||||
|
||||
print("🚀 Starte umfassenden Button-Funktionalitäts-Test...\n")
|
||||
|
||||
for test_group in button_tests:
|
||||
print(f"📄 Navigiere zu Seite: {test_group['page']}")
|
||||
|
||||
try:
|
||||
self.driver.get(test_group["page"])
|
||||
time.sleep(2) # Seite laden lassen
|
||||
|
||||
for selector, name in test_group["buttons"]:
|
||||
results["total"] += 1
|
||||
if self.test_button_functionality(selector, name):
|
||||
results["working"] += 1
|
||||
else:
|
||||
results["broken"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Laden der Seite {test_group['page']}: {e}")
|
||||
|
||||
# Zusammenfassung
|
||||
print(f"\n{'='*60}")
|
||||
print(f"📊 TEST-ZUSAMMENFASSUNG")
|
||||
print(f"{'='*60}")
|
||||
print(f"Getestete Buttons gesamt: {results['total']}")
|
||||
print(f"✅ Funktional: {results['working']}")
|
||||
print(f"❌ Nicht funktional: {results['broken']}")
|
||||
|
||||
success_rate = (results['working'] / results['total']) * 100 if results['total'] > 0 else 0
|
||||
print(f"📈 Erfolgsrate: {success_rate:.1f}%")
|
||||
|
||||
if success_rate >= 90:
|
||||
print("🎉 AUSGEZEICHNET! Fast alle Buttons funktionieren korrekt.")
|
||||
elif success_rate >= 75:
|
||||
print("✅ GUT! Die meisten Buttons funktionieren korrekt.")
|
||||
elif success_rate >= 50:
|
||||
print("⚠️ BEFRIEDIGEND! Einige Buttons benötigen noch Verbesserungen.")
|
||||
else:
|
||||
print("❌ VERBESSERUNG ERFORDERLICH! Viele Buttons haben keine Funktionalität.")
|
||||
|
||||
def cleanup(self):
|
||||
"""Aufräumen"""
|
||||
if self.driver:
|
||||
self.driver.quit()
|
||||
print("🧹 WebDriver beendet")
|
||||
|
||||
def main():
|
||||
"""Hauptfunktion"""
|
||||
tester = ButtonFunctionalityTester()
|
||||
|
||||
try:
|
||||
tester.test_all_buttons()
|
||||
finally:
|
||||
tester.cleanup()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
backend/app - Kopie/utils/test_buttons.bat
Normal file
BIN
backend/app - Kopie/utils/test_buttons.bat
Normal file
Binary file not shown.
47
backend/app - Kopie/utils/test_database_fix.py
Normal file
47
backend/app - Kopie/utils/test_database_fix.py
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test-Script für die Datenbank-Reparatur
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Pfad zur App hinzufügen
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
def test_database_fix():
|
||||
"""Testet ob die Datenbank-Reparatur erfolgreich war."""
|
||||
try:
|
||||
from models import get_cached_session, User, Printer, Job
|
||||
|
||||
print("=== DATENBANK-TEST NACH REPARATUR ===")
|
||||
|
||||
with get_cached_session() as session:
|
||||
# Test User-Query (das war das ursprüngliche Problem)
|
||||
users = session.query(User).limit(5).all()
|
||||
print(f"✓ User-Abfrage erfolgreich - {len(users)} Benutzer gefunden")
|
||||
|
||||
# Details des ersten Users anzeigen (falls vorhanden)
|
||||
if users:
|
||||
user = users[0]
|
||||
print(f"✓ Test-User: {user.username} ({user.email})")
|
||||
print(f"✓ updated_at-Feld: {user.updated_at}")
|
||||
|
||||
# Test Printer-Query
|
||||
printers = session.query(Printer).limit(5).all()
|
||||
print(f"✓ Printer-Abfrage erfolgreich - {len(printers)} Drucker gefunden")
|
||||
|
||||
# Test Job-Query
|
||||
jobs = session.query(Job).limit(5).all()
|
||||
print(f"✓ Job-Abfrage erfolgreich - {len(jobs)} Jobs gefunden")
|
||||
|
||||
print("\n🎉 ALLE DATENBANK-TESTS ERFOLGREICH!")
|
||||
print("Die Anwendung sollte jetzt ohne Fehler starten.")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ DATENBANK-TEST FEHLGESCHLAGEN: {str(e)}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_database_fix()
|
||||
1
backend/app - Kopie/utils/test_korrekturen.py
Normal file
1
backend/app - Kopie/utils/test_korrekturen.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
175
backend/app - Kopie/utils/test_p110.py
Normal file
175
backend/app - Kopie/utils/test_p110.py
Normal file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
P110-TAPO-TEST - Speziell für TP-Link Tapo P110-Steckdosen
|
||||
Testet verschiedene Versionen des PyP100-Moduls
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
import socket
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
# Anmeldedaten
|
||||
TAPO_USERNAME = "till.tomczak@mercedes-benz.com"
|
||||
TAPO_PASSWORD = "744563017196"
|
||||
|
||||
# Standard-IP-Adressen zum Testen (anpassen an tatsächliche IPs)
|
||||
TEST_IPS = [
|
||||
"192.168.0.103", # Diese IPs waren erreichbar im vorherigen Test
|
||||
"192.168.0.104"
|
||||
]
|
||||
|
||||
def log(message):
|
||||
"""Logge eine Nachricht mit Zeitstempel"""
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"[{timestamp}] {message}")
|
||||
|
||||
def check_connection(ip, port=80, timeout=1):
|
||||
"""Prüft eine TCP-Verbindung"""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((ip, port))
|
||||
sock.close()
|
||||
return result == 0
|
||||
except:
|
||||
return False
|
||||
|
||||
def install_package(package):
|
||||
"""Installiert ein Python-Paket"""
|
||||
try:
|
||||
log(f"Installiere {package}...")
|
||||
subprocess.run([sys.executable, "-m", "pip", "install", package, "--force-reinstall"], check=True)
|
||||
log(f"✅ {package} erfolgreich installiert")
|
||||
return True
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler bei Installation von {package}: {e}")
|
||||
return False
|
||||
|
||||
def test_p110_connection():
|
||||
"""Testet verschiedene Möglichkeiten, um mit P110-Steckdosen zu kommunizieren"""
|
||||
|
||||
log("🚀 TAPO P110 TEST - STARTER")
|
||||
log(f"👤 Benutzername: {TAPO_USERNAME}")
|
||||
log(f"🔑 Passwort: {TAPO_PASSWORD}")
|
||||
|
||||
# Verfügbare Module testen
|
||||
log("\n1️⃣ SCHRITT 1: Teste verfügbare Module")
|
||||
|
||||
try:
|
||||
from PyP100 import PyP110
|
||||
log("✅ PyP100 Modul gefunden")
|
||||
except ImportError:
|
||||
log("❌ PyP100 Modul nicht gefunden")
|
||||
install_package("PyP100==0.1.2")
|
||||
|
||||
try:
|
||||
from PyP100 import PyP110
|
||||
log("✅ PyP100 Modul jetzt installiert")
|
||||
except ImportError:
|
||||
log("❌ Konnte PyP100 nicht importieren")
|
||||
return
|
||||
|
||||
# Erreichbare Steckdosen finden
|
||||
log("\n2️⃣ SCHRITT 2: Suche erreichbare IPs")
|
||||
|
||||
available_ips = []
|
||||
for ip in TEST_IPS:
|
||||
if check_connection(ip):
|
||||
log(f"✅ IP {ip} ist erreichbar")
|
||||
available_ips.append(ip)
|
||||
else:
|
||||
log(f"❌ IP {ip} nicht erreichbar")
|
||||
|
||||
if not available_ips:
|
||||
log("❌ Keine erreichbaren IPs gefunden!")
|
||||
return
|
||||
|
||||
# P110-Verbindung testen
|
||||
log("\n3️⃣ SCHRITT 3: Teste PyP100 Bibliothek")
|
||||
|
||||
for ip in available_ips:
|
||||
try:
|
||||
log(f"🔄 Verbinde zu Steckdose {ip} mit PyP100.PyP110...")
|
||||
|
||||
# Neue Instanz erstellen
|
||||
from PyP100 import PyP110
|
||||
p110 = PyP110.P110(ip, TAPO_USERNAME, TAPO_PASSWORD)
|
||||
|
||||
# Handshake und Login
|
||||
log(" Handshake...")
|
||||
p110.handshake()
|
||||
log(" Login...")
|
||||
p110.login()
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
log(" Geräteinformationen abrufen...")
|
||||
device_info = p110.getDeviceInfo()
|
||||
|
||||
# Erfolg!
|
||||
log(f"✅ ERFOLG! Steckdose {ip} gefunden")
|
||||
log(f" Name: {device_info.get('nickname', 'Unbekannt')}")
|
||||
log(f" Status: {'Eingeschaltet' if device_info.get('device_on', False) else 'Ausgeschaltet'}")
|
||||
|
||||
# Ein-/Ausschalten testen
|
||||
if "--toggle" in sys.argv:
|
||||
current_state = device_info.get('device_on', False)
|
||||
|
||||
if current_state:
|
||||
log(" Schalte AUS...")
|
||||
p110.turnOff()
|
||||
else:
|
||||
log(" Schalte EIN...")
|
||||
p110.turnOn()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
# Status prüfen
|
||||
device_info = p110.getDeviceInfo()
|
||||
new_state = device_info.get('device_on', False)
|
||||
log(f" Neuer Status: {'Eingeschaltet' if new_state else 'Ausgeschaltet'}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler bei Verbindung zu {ip}: {e}")
|
||||
|
||||
# Alternative Bibliothek testen
|
||||
log("\n4️⃣ SCHRITT 4: Teste PyP100 mit alternativer Version")
|
||||
|
||||
if install_package("pytapo==1.1.2"):
|
||||
try:
|
||||
import pytapo
|
||||
from pytapo.tapo import Tapo
|
||||
|
||||
for ip in available_ips:
|
||||
try:
|
||||
log(f"🔄 Verbinde zu Steckdose {ip} mit pytapo...")
|
||||
|
||||
# Neue Verbindung
|
||||
tapo = Tapo(ip, TAPO_USERNAME, TAPO_PASSWORD)
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
device_info = tapo.get_device_info()
|
||||
|
||||
# Erfolg!
|
||||
log(f"✅ ERFOLG mit pytapo! Steckdose {ip} gefunden")
|
||||
log(f" Name: {device_info.get('nickname', 'Unbekannt')}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler bei pytapo-Verbindung zu {ip}: {e}")
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler beim Import von pytapo: {e}")
|
||||
|
||||
log("\n❌ Keine funktionierenden Tapo-Steckdosen gefunden!")
|
||||
log("Bitte überprüfen Sie die Anmeldedaten und IP-Adressen")
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n======= TAPO P110 TEST =======\n")
|
||||
test_p110_connection()
|
||||
print("\n======= TEST BEENDET =======\n")
|
||||
437
backend/app - Kopie/utils/test_system_functionality.py
Normal file
437
backend/app - Kopie/utils/test_system_functionality.py
Normal file
@@ -0,0 +1,437 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Umfassender Systemfunktionalitätstest für MYP Platform
|
||||
Prüft alle kritischen Komponenten und Features
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Any
|
||||
|
||||
# Füge das aktuelle Verzeichnis zum Python-Pfad hinzu
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Tests für interne Komponenten
|
||||
def test_internal_components():
|
||||
"""Testet interne Systemkomponenten"""
|
||||
results = {}
|
||||
|
||||
print("🔍 Teste interne Systemkomponenten...")
|
||||
|
||||
# Test 1: Importiere kritische Module
|
||||
try:
|
||||
from models import User, Printer, Job, get_db_session, init_database
|
||||
from config.settings import SECRET_KEY, DATABASE_PATH
|
||||
from utils.logging_config import get_logger
|
||||
results["module_imports"] = {"status": "SUCCESS", "message": "Alle kritischen Module importiert"}
|
||||
except Exception as e:
|
||||
results["module_imports"] = {"status": "FAILED", "message": f"Import-Fehler: {str(e)}"}
|
||||
return results
|
||||
|
||||
# Test 2: Datenbankverbindung
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
user_count = db_session.query(User).count()
|
||||
printer_count = db_session.query(Printer).count()
|
||||
job_count = db_session.query(Job).count()
|
||||
db_session.close()
|
||||
|
||||
results["database_connection"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Datenbank verbunden - {user_count} Benutzer, {printer_count} Drucker, {job_count} Jobs"
|
||||
}
|
||||
except Exception as e:
|
||||
results["database_connection"] = {"status": "FAILED", "message": f"DB-Fehler: {str(e)}"}
|
||||
|
||||
# Test 3: Admin-Benutzer vorhanden
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
admin_user = db_session.query(User).filter(User.role == "admin").first()
|
||||
db_session.close()
|
||||
|
||||
if admin_user:
|
||||
results["admin_user"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Admin-Benutzer gefunden: {admin_user.username} ({admin_user.email})"
|
||||
}
|
||||
else:
|
||||
results["admin_user"] = {"status": "FAILED", "message": "Kein Admin-Benutzer gefunden"}
|
||||
except Exception as e:
|
||||
results["admin_user"] = {"status": "FAILED", "message": f"Admin-Check-Fehler: {str(e)}"}
|
||||
|
||||
# Test 4: Windows-Fixes
|
||||
try:
|
||||
if os.name == 'nt':
|
||||
from utils.windows_fixes import get_windows_thread_manager
|
||||
thread_manager = get_windows_thread_manager()
|
||||
if thread_manager:
|
||||
results["windows_fixes"] = {"status": "SUCCESS", "message": "Windows-Fixes geladen"}
|
||||
else:
|
||||
results["windows_fixes"] = {"status": "WARNING", "message": "Windows-Fixes verfügbar aber nicht aktiv"}
|
||||
else:
|
||||
results["windows_fixes"] = {"status": "SKIPPED", "message": "Nicht Windows-System"}
|
||||
except Exception as e:
|
||||
results["windows_fixes"] = {"status": "WARNING", "message": f"Windows-Fixes-Fehler: {str(e)}"}
|
||||
|
||||
# Test 5: Logging-System
|
||||
try:
|
||||
logger = get_logger("test")
|
||||
logger.info("Test-Log-Nachricht")
|
||||
results["logging_system"] = {"status": "SUCCESS", "message": "Logging-System funktional"}
|
||||
except Exception as e:
|
||||
results["logging_system"] = {"status": "FAILED", "message": f"Logging-Fehler: {str(e)}"}
|
||||
|
||||
# Test 6: Queue Manager
|
||||
try:
|
||||
from utils.queue_manager import get_queue_manager
|
||||
queue_manager = get_queue_manager()
|
||||
if queue_manager:
|
||||
status = queue_manager.get_queue_status()
|
||||
results["queue_manager"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Queue Manager aktiv - Status: {len(status)} Warteschlangen"
|
||||
}
|
||||
else:
|
||||
results["queue_manager"] = {"status": "WARNING", "message": "Queue Manager nicht initialisiert"}
|
||||
except Exception as e:
|
||||
results["queue_manager"] = {"status": "WARNING", "message": f"Queue Manager-Fehler: {str(e)}"}
|
||||
|
||||
# Test 7: Job Scheduler
|
||||
try:
|
||||
from utils.job_scheduler import get_job_scheduler
|
||||
scheduler = get_job_scheduler()
|
||||
if scheduler:
|
||||
results["job_scheduler"] = {"status": "SUCCESS", "message": "Job Scheduler verfügbar"}
|
||||
else:
|
||||
results["job_scheduler"] = {"status": "WARNING", "message": "Job Scheduler nicht verfügbar"}
|
||||
except Exception as e:
|
||||
results["job_scheduler"] = {"status": "WARNING", "message": f"Job Scheduler-Fehler: {str(e)}"}
|
||||
|
||||
return results
|
||||
|
||||
def test_api_endpoints():
|
||||
"""Testet kritische API-Endpunkte"""
|
||||
results = {}
|
||||
base_url = "http://localhost:5000"
|
||||
|
||||
print("🌐 Teste API-Endpunkte...")
|
||||
|
||||
# Test 1: Root-Endpunkt
|
||||
try:
|
||||
response = requests.get(f"{base_url}/", timeout=5)
|
||||
if response.status_code == 200:
|
||||
results["root_endpoint"] = {"status": "SUCCESS", "message": "Root-Endpunkt erreichbar"}
|
||||
else:
|
||||
results["root_endpoint"] = {"status": "FAILED", "message": f"HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
results["root_endpoint"] = {"status": "FAILED", "message": f"Verbindungsfehler: {str(e)}"}
|
||||
|
||||
# Test 2: Login-Seite
|
||||
try:
|
||||
response = requests.get(f"{base_url}/auth/login", timeout=5)
|
||||
if response.status_code == 200:
|
||||
results["login_page"] = {"status": "SUCCESS", "message": "Login-Seite verfügbar"}
|
||||
else:
|
||||
results["login_page"] = {"status": "FAILED", "message": f"HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
results["login_page"] = {"status": "FAILED", "message": f"Login-Seite-Fehler: {str(e)}"}
|
||||
|
||||
# Test 3: API Status (ohne Authentifizierung)
|
||||
try:
|
||||
response = requests.get(f"{base_url}/api/kiosk/status", timeout=5)
|
||||
if response.status_code in [200, 401, 403]: # Diese sind alle erwartete Responses
|
||||
results["api_status"] = {"status": "SUCCESS", "message": "API grundsätzlich erreichbar"}
|
||||
else:
|
||||
results["api_status"] = {"status": "WARNING", "message": f"Unerwarteter HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
results["api_status"] = {"status": "FAILED", "message": f"API-Status-Fehler: {str(e)}"}
|
||||
|
||||
return results
|
||||
|
||||
def test_file_structure():
|
||||
"""Testet die Datei- und Verzeichnisstruktur"""
|
||||
results = {}
|
||||
|
||||
print("📁 Teste Datei- und Verzeichnisstruktur...")
|
||||
|
||||
# Kritische Dateien
|
||||
critical_files = [
|
||||
"app.py",
|
||||
"models.py",
|
||||
"config/settings.py",
|
||||
"templates/base.html",
|
||||
"templates/login.html",
|
||||
"templates/dashboard.html",
|
||||
"static/css",
|
||||
"static/js",
|
||||
"utils/logging_config.py",
|
||||
"utils/queue_manager.py",
|
||||
"blueprints/guest.py",
|
||||
"blueprints/users.py",
|
||||
"blueprints/calendar.py"
|
||||
]
|
||||
|
||||
missing_files = []
|
||||
present_files = []
|
||||
|
||||
for file_path in critical_files:
|
||||
if os.path.exists(file_path):
|
||||
present_files.append(file_path)
|
||||
else:
|
||||
missing_files.append(file_path)
|
||||
|
||||
if missing_files:
|
||||
results["file_structure"] = {
|
||||
"status": "WARNING",
|
||||
"message": f"Fehlende Dateien: {', '.join(missing_files)}"
|
||||
}
|
||||
else:
|
||||
results["file_structure"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Alle {len(present_files)} kritischen Dateien vorhanden"
|
||||
}
|
||||
|
||||
# Verzeichnisse
|
||||
critical_dirs = ["logs", "database", "uploads", "static", "templates", "utils", "config", "blueprints"]
|
||||
missing_dirs = []
|
||||
present_dirs = []
|
||||
|
||||
for dir_path in critical_dirs:
|
||||
if os.path.exists(dir_path) and os.path.isdir(dir_path):
|
||||
present_dirs.append(dir_path)
|
||||
else:
|
||||
missing_dirs.append(dir_path)
|
||||
|
||||
if missing_dirs:
|
||||
results["directory_structure"] = {
|
||||
"status": "WARNING",
|
||||
"message": f"Fehlende Verzeichnisse: {', '.join(missing_dirs)}"
|
||||
}
|
||||
else:
|
||||
results["directory_structure"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Alle {len(present_dirs)} kritischen Verzeichnisse vorhanden"
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
def test_database_integrity():
|
||||
"""Testet die Datenbankintegrität"""
|
||||
results = {}
|
||||
|
||||
print("🗄️ Teste Datenbankintegrität...")
|
||||
|
||||
try:
|
||||
from models import User, Printer, Job, Stats, SystemLog, GuestRequest, UserPermission, Notification, get_db_session
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Test Tabellen-Existenz
|
||||
tables_test = {}
|
||||
models_to_test = [User, Printer, Job, Stats, SystemLog, GuestRequest, UserPermission, Notification]
|
||||
|
||||
for model in models_to_test:
|
||||
try:
|
||||
count = db_session.query(model).count()
|
||||
tables_test[model.__tablename__] = {"exists": True, "count": count}
|
||||
except Exception as e:
|
||||
tables_test[model.__tablename__] = {"exists": False, "error": str(e)}
|
||||
|
||||
existing_tables = sum(1 for t in tables_test.values() if t.get("exists"))
|
||||
total_tables = len(tables_test)
|
||||
|
||||
if existing_tables == total_tables:
|
||||
results["table_integrity"] = {
|
||||
"status": "SUCCESS",
|
||||
"message": f"Alle {total_tables} Tabellen existieren und sind zugänglich"
|
||||
}
|
||||
else:
|
||||
results["table_integrity"] = {
|
||||
"status": "FAILED",
|
||||
"message": f"Nur {existing_tables}/{total_tables} Tabellen zugänglich"
|
||||
}
|
||||
|
||||
# Test Datenbank-Constraints
|
||||
try:
|
||||
# Teste Foreign Key Constraints
|
||||
db_session.execute("PRAGMA foreign_key_check")
|
||||
results["database_constraints"] = {"status": "SUCCESS", "message": "Foreign Key Constraints OK"}
|
||||
except Exception as e:
|
||||
results["database_constraints"] = {"status": "WARNING", "message": f"Constraint-Check-Fehler: {str(e)}"}
|
||||
|
||||
db_session.close()
|
||||
|
||||
except Exception as e:
|
||||
results["database_integrity"] = {"status": "FAILED", "message": f"DB-Integritätstest fehlgeschlagen: {str(e)}"}
|
||||
|
||||
return results
|
||||
|
||||
def create_test_data():
|
||||
"""Erstellt Testdaten falls nötig"""
|
||||
results = {}
|
||||
|
||||
print("🧪 Erstelle Testdaten...")
|
||||
|
||||
try:
|
||||
from models import User, Printer, Job, get_db_session
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Teste ob Testdrucker existieren
|
||||
test_printer = db_session.query(Printer).filter(Printer.name.like("Test%")).first()
|
||||
|
||||
if not test_printer:
|
||||
# Erstelle Test-Drucker
|
||||
test_printer = Printer(
|
||||
name="Test Drucker 1",
|
||||
model="Test Model",
|
||||
location="Test Labor",
|
||||
ip_address="192.168.1.100",
|
||||
mac_address="00:11:22:33:44:55",
|
||||
plug_ip="192.168.1.101",
|
||||
plug_username="test_user",
|
||||
plug_password="test_pass",
|
||||
status="offline"
|
||||
)
|
||||
db_session.add(test_printer)
|
||||
db_session.commit()
|
||||
|
||||
results["test_printer"] = {"status": "SUCCESS", "message": "Test-Drucker erstellt"}
|
||||
else:
|
||||
results["test_printer"] = {"status": "SUCCESS", "message": "Test-Drucker bereits vorhanden"}
|
||||
|
||||
# Teste ob Testbenutzer existiert
|
||||
test_user = db_session.query(User).filter(User.username == "testuser").first()
|
||||
|
||||
if not test_user:
|
||||
# Erstelle Test-Benutzer
|
||||
test_user = User(
|
||||
username="testuser",
|
||||
email="test@test.com",
|
||||
name="Test Benutzer",
|
||||
role="user"
|
||||
)
|
||||
test_user.set_password("testpass")
|
||||
db_session.add(test_user)
|
||||
db_session.commit()
|
||||
|
||||
results["test_user"] = {"status": "SUCCESS", "message": "Test-Benutzer erstellt"}
|
||||
else:
|
||||
results["test_user"] = {"status": "SUCCESS", "message": "Test-Benutzer bereits vorhanden"}
|
||||
|
||||
db_session.close()
|
||||
|
||||
except Exception as e:
|
||||
results["test_data_creation"] = {"status": "FAILED", "message": f"Test-Daten-Erstellung fehlgeschlagen: {str(e)}"}
|
||||
|
||||
return results
|
||||
|
||||
def run_comprehensive_test():
|
||||
"""Führt alle Tests aus und zeigt Ergebnisse an"""
|
||||
print("🚀 Starte umfassenden Systemfunktionalitätstest für MYP Platform\n")
|
||||
print("=" * 70)
|
||||
|
||||
all_results = {}
|
||||
|
||||
# Interne Komponenten
|
||||
all_results.update(test_internal_components())
|
||||
print()
|
||||
|
||||
# Datei-/Verzeichnisstruktur
|
||||
all_results.update(test_file_structure())
|
||||
print()
|
||||
|
||||
# Datenbankintegrität
|
||||
all_results.update(test_database_integrity())
|
||||
print()
|
||||
|
||||
# Testdaten erstellen
|
||||
all_results.update(create_test_data())
|
||||
print()
|
||||
|
||||
# API-Endpunkte (nur wenn Server läuft)
|
||||
all_results.update(test_api_endpoints())
|
||||
print()
|
||||
|
||||
# Ergebnisse zusammenfassen
|
||||
print("=" * 70)
|
||||
print("📊 TESTERGEBNISSE ZUSAMMENFASSUNG")
|
||||
print("=" * 70)
|
||||
|
||||
success_count = 0
|
||||
warning_count = 0
|
||||
failed_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for test_name, result in all_results.items():
|
||||
status = result["status"]
|
||||
message = result["message"]
|
||||
|
||||
if status == "SUCCESS":
|
||||
print(f"✅ {test_name}: {message}")
|
||||
success_count += 1
|
||||
elif status == "WARNING":
|
||||
print(f"⚠️ {test_name}: {message}")
|
||||
warning_count += 1
|
||||
elif status == "FAILED":
|
||||
print(f"❌ {test_name}: {message}")
|
||||
failed_count += 1
|
||||
elif status == "SKIPPED":
|
||||
print(f"⏭️ {test_name}: {message}")
|
||||
skipped_count += 1
|
||||
|
||||
total_tests = len(all_results)
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("📈 STATISTIKEN")
|
||||
print("=" * 70)
|
||||
print(f"Gesamt: {total_tests} Tests")
|
||||
print(f"✅ Erfolgreich: {success_count}")
|
||||
print(f"⚠️ Warnungen: {warning_count}")
|
||||
print(f"❌ Fehlgeschlagen: {failed_count}")
|
||||
print(f"⏭️ Übersprungen: {skipped_count}")
|
||||
|
||||
# Empfehlungen
|
||||
print("\n" + "=" * 70)
|
||||
print("💡 EMPFEHLUNGEN")
|
||||
print("=" * 70)
|
||||
|
||||
if failed_count == 0 and warning_count <= 2:
|
||||
print("🎉 System ist voll funktionsfähig!")
|
||||
print(" Alle kritischen Komponenten arbeiten ordnungsgemäß.")
|
||||
elif failed_count == 0:
|
||||
print("✅ System ist grundsätzlich funktionsfähig.")
|
||||
print(" Einige Warnungen sollten beachtet werden.")
|
||||
else:
|
||||
print("⚠️ System hat kritische Probleme.")
|
||||
print(" Fehlgeschlagene Tests müssen behoben werden.")
|
||||
|
||||
# Speichere Ergebnisse in JSON-Datei
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
results_file = f"test_results_{timestamp}.json"
|
||||
|
||||
with open(results_file, "w", encoding="utf-8") as f:
|
||||
json.dump({
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"summary": {
|
||||
"total": total_tests,
|
||||
"success": success_count,
|
||||
"warnings": warning_count,
|
||||
"failed": failed_count,
|
||||
"skipped": skipped_count
|
||||
},
|
||||
"detailed_results": all_results
|
||||
}, f, indent=2, ensure_ascii=False)
|
||||
|
||||
print(f"\n📄 Detaillierte Ergebnisse gespeichert in: {results_file}")
|
||||
|
||||
return failed_count == 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = run_comprehensive_test()
|
||||
sys.exit(0 if success else 1)
|
||||
212
backend/app - Kopie/utils/test_tapo_direkt.py
Normal file
212
backend/app - Kopie/utils/test_tapo_direkt.py
Normal file
@@ -0,0 +1,212 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
DIREKT-TEST für TP-Link Tapo P110-Steckdosen
|
||||
Umgeht Ping-Befehle und testet direkte TCP-Verbindung
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# Anmeldedaten für Tapo-Steckdosen
|
||||
TAPO_USERNAME = "till.tomczak@mercedes-benz.com"
|
||||
TAPO_PASSWORD = "744563017196A"
|
||||
|
||||
# Standard-IPs für Tapo-Steckdosen
|
||||
# (falls nicht verfügbar, passen Sie diese an die tatsächlichen IPs in Ihrem Netzwerk an)
|
||||
TAPO_IPS = [
|
||||
# Typische IP-Bereiche
|
||||
"192.168.1.100",
|
||||
"192.168.1.101",
|
||||
"192.168.1.102",
|
||||
"192.168.1.103",
|
||||
"192.168.1.104",
|
||||
"192.168.1.105",
|
||||
"192.168.0.100",
|
||||
"192.168.0.101",
|
||||
"192.168.0.102",
|
||||
"192.168.0.103",
|
||||
"192.168.0.104",
|
||||
"192.168.0.105",
|
||||
|
||||
# Mercedes-Benz Netzwerk spezifisch
|
||||
"10.0.0.100",
|
||||
"10.0.0.101",
|
||||
"10.0.0.102",
|
||||
"10.0.0.103",
|
||||
"10.0.0.104",
|
||||
"10.0.0.105",
|
||||
|
||||
# Zusätzliche mögliche IPs
|
||||
"192.168.178.100",
|
||||
"192.168.178.101",
|
||||
"192.168.178.102",
|
||||
"192.168.178.103",
|
||||
"192.168.178.104",
|
||||
"192.168.178.105",
|
||||
]
|
||||
|
||||
def log_message(message, level="INFO"):
|
||||
"""Logge eine Nachricht mit Zeitstempel"""
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"[{timestamp}] [{level}] {message}")
|
||||
|
||||
def check_tcp_connection(host, port=80, timeout=1):
|
||||
"""
|
||||
Prüft ob eine TCP-Verbindung zu einem Host und Port möglich ist.
|
||||
Vermeidet Ping und charmap-Probleme.
|
||||
|
||||
Args:
|
||||
host: Hostname oder IP-Adresse
|
||||
port: TCP-Port (Standard: 80)
|
||||
timeout: Timeout in Sekunden
|
||||
|
||||
Returns:
|
||||
bool: True wenn Verbindung erfolgreich
|
||||
"""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(timeout)
|
||||
result = sock.connect_ex((host, port))
|
||||
sock.close()
|
||||
return result == 0
|
||||
except:
|
||||
return False
|
||||
|
||||
def test_tapo_connection():
|
||||
"""
|
||||
Testet die Verbindung zu TP-Link Tapo P110-Steckdosen.
|
||||
"""
|
||||
log_message("🔄 Überprüfe ob PyP100-Modul installiert ist...")
|
||||
|
||||
try:
|
||||
from PyP100 import PyP110
|
||||
log_message("✅ PyP100-Modul erfolgreich importiert")
|
||||
except ImportError:
|
||||
log_message("❌ PyP100-Modul nicht installiert", "ERROR")
|
||||
log_message(" Installiere jetzt mit: pip install PyP100==0.1.2")
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
subprocess.run([sys.executable, "-m", "pip", "install", "PyP100==0.1.2"], check=True)
|
||||
log_message("✅ PyP100-Modul erfolgreich installiert")
|
||||
|
||||
# Erneut importieren
|
||||
from PyP100 import PyP110
|
||||
except Exception as e:
|
||||
log_message(f"❌ Fehler bei Installation von PyP100: {str(e)}", "ERROR")
|
||||
log_message(" Bitte installieren Sie manuell: pip install PyP100==0.1.2")
|
||||
return
|
||||
|
||||
log_message("🔍 Starte Test für Tapo-Steckdosen...")
|
||||
log_message(f"🔐 Anmeldedaten: {TAPO_USERNAME} / {TAPO_PASSWORD}")
|
||||
|
||||
successful_connections = 0
|
||||
found_ips = []
|
||||
|
||||
for ip in TAPO_IPS:
|
||||
log_message(f"🔄 Teste IP-Adresse: {ip}")
|
||||
|
||||
# TCP-Verbindungstest für Grundkonnektivität (Port 80 für HTTP)
|
||||
conn_success = check_tcp_connection(ip, port=80)
|
||||
|
||||
if not conn_success:
|
||||
# Alternativ Port 443 testen für HTTPS
|
||||
conn_success = check_tcp_connection(ip, port=443)
|
||||
|
||||
if not conn_success:
|
||||
log_message(f" ❌ IP {ip} nicht erreichbar (Verbindung fehlgeschlagen)")
|
||||
continue
|
||||
|
||||
log_message(f" ✅ IP {ip} ist erreichbar (TCP-Verbindung erfolgreich)")
|
||||
|
||||
# Tapo-Verbindung testen
|
||||
try:
|
||||
log_message(f" 🔄 Verbinde zu Tapo-Steckdose {ip}...")
|
||||
p110 = PyP110.P110(ip, TAPO_USERNAME, TAPO_PASSWORD)
|
||||
p110.handshake() # Authentifizierung
|
||||
p110.login() # Login
|
||||
|
||||
# Geräteinformationen abrufen
|
||||
device_info = p110.getDeviceInfo()
|
||||
|
||||
# Status abrufen
|
||||
is_on = device_info.get('device_on', False)
|
||||
nickname = device_info.get('nickname', "Unbekannt")
|
||||
|
||||
log_message(f" ✅ Verbindung zu Tapo-Steckdose '{nickname}' ({ip}) erfolgreich")
|
||||
log_message(f" 📱 Gerätename: {nickname}")
|
||||
log_message(f" ⚡ Status: {'Eingeschaltet' if is_on else 'Ausgeschaltet'}")
|
||||
|
||||
if 'on_time' in device_info:
|
||||
on_time = device_info.get('on_time', 0)
|
||||
hours, minutes = divmod(on_time // 60, 60)
|
||||
log_message(f" ⏱️ Betriebszeit: {hours}h {minutes}m")
|
||||
|
||||
successful_connections += 1
|
||||
found_ips.append(ip)
|
||||
|
||||
# Steckdose testen: EIN/AUS
|
||||
if len(sys.argv) > 1 and sys.argv[1] == '--toggle':
|
||||
if is_on:
|
||||
log_message(f" 🔄 Schalte Steckdose {nickname} AUS...")
|
||||
p110.turnOff()
|
||||
log_message(f" ✅ Steckdose ausgeschaltet")
|
||||
else:
|
||||
log_message(f" 🔄 Schalte Steckdose {nickname} EIN...")
|
||||
p110.turnOn()
|
||||
log_message(f" ✅ Steckdose eingeschaltet")
|
||||
|
||||
# Kurze Pause
|
||||
time.sleep(1)
|
||||
|
||||
# Status erneut abrufen
|
||||
device_info = p110.getDeviceInfo()
|
||||
is_on = device_info.get('device_on', False)
|
||||
log_message(f" ⚡ Neuer Status: {'Eingeschaltet' if is_on else 'Ausgeschaltet'}")
|
||||
|
||||
except Exception as e:
|
||||
log_message(f" ❌ Verbindung zu Tapo-Steckdose {ip} fehlgeschlagen: {str(e)}", "ERROR")
|
||||
|
||||
# Zusammenfassung
|
||||
log_message("\n📊 Zusammenfassung:")
|
||||
log_message(f" Getestete IPs: {len(TAPO_IPS)}")
|
||||
log_message(f" Gefundene Tapo-Steckdosen: {successful_connections}")
|
||||
|
||||
if successful_connections > 0:
|
||||
log_message("✅ Tapo-Steckdosen erfolgreich gefunden und getestet!")
|
||||
log_message(f"📝 Gefundene IPs: {found_ips}")
|
||||
|
||||
# Ausgabe für Konfiguration
|
||||
log_message("\n🔧 Konfiguration für settings.py:")
|
||||
log_message(f"""
|
||||
# TP-Link Tapo Standard-Anmeldedaten
|
||||
TAPO_USERNAME = "{TAPO_USERNAME}"
|
||||
TAPO_PASSWORD = "{TAPO_PASSWORD}"
|
||||
|
||||
# Automatische Steckdosen-Erkennung aktivieren
|
||||
TAPO_AUTO_DISCOVERY = True
|
||||
|
||||
# Standard-Steckdosen-IPs
|
||||
DEFAULT_TAPO_IPS = {found_ips}
|
||||
""")
|
||||
else:
|
||||
log_message("❌ Keine Tapo-Steckdosen gefunden!", "ERROR")
|
||||
log_message(" Bitte überprüfen Sie die IP-Adressen und Anmeldedaten")
|
||||
|
||||
# Fehlerbehebungs-Tipps
|
||||
log_message("\n🔧 Fehlerbehebungs-Tipps:")
|
||||
log_message("1. Stellen Sie sicher, dass die Steckdosen mit dem WLAN verbunden sind")
|
||||
log_message("2. Prüfen Sie die IP-Adressen in der Tapo-App oder im Router")
|
||||
log_message("3. Stellen Sie sicher, dass die Anmeldedaten korrekt sind")
|
||||
log_message("4. Prüfen Sie ob die Steckdosen über die Tapo-App erreichbar sind")
|
||||
log_message("5. Führen Sie einen Neustart der Steckdosen durch (aus- und wieder einstecken)")
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n====== TAPO P110 DIREKT-TEST (OHNE PING) ======\n")
|
||||
test_tapo_connection()
|
||||
print("\n====== TEST ABGESCHLOSSEN ======\n")
|
||||
132
backend/app - Kopie/utils/test_tapo_sofort.py
Normal file
132
backend/app - Kopie/utils/test_tapo_sofort.py
Normal file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
SOFORT-TEST für TP-Link Tapo P110-Steckdosen
|
||||
Nutzt direkt PyP100 mit hardkodierten Anmeldedaten
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# TAPO Anmeldedaten direkt hardkodiert (wie in den funktionierenden Versionen)
|
||||
os.environ["TAPO_USERNAME"] = "till.tomczak@mercedes-benz.com"
|
||||
os.environ["TAPO_PASSWORD"] = "744563017196A" # Das 'A' am Ende ist wichtig
|
||||
|
||||
# IPs der Steckdosen
|
||||
TAPO_IPS = [
|
||||
"192.168.0.103",
|
||||
"192.168.0.104",
|
||||
"192.168.0.100",
|
||||
"192.168.0.101",
|
||||
"192.168.0.102"
|
||||
]
|
||||
|
||||
def log(msg):
|
||||
"""Protokolliert eine Nachricht mit Zeitstempel"""
|
||||
timestamp = datetime.now().strftime("%H:%M:%S")
|
||||
print(f"[{timestamp}] {msg}")
|
||||
|
||||
def test_connection():
|
||||
"""Teste Verbindung zu den Steckdosen"""
|
||||
log("🔄 Teste PyP100-Import...")
|
||||
|
||||
try:
|
||||
from PyP100 import PyP100
|
||||
log("✅ PyP100-Modul erfolgreich importiert")
|
||||
except ImportError:
|
||||
log("❌ PyP100-Modul nicht gefunden. Installiere es...")
|
||||
try:
|
||||
import subprocess
|
||||
subprocess.run([sys.executable, "-m", "pip", "install", "PyP100==0.0.12"], check=True)
|
||||
from PyP100 import PyP100
|
||||
log("✅ PyP100-Modul installiert")
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler bei Installation: {str(e)}")
|
||||
return False
|
||||
|
||||
# Anmeldedaten aus Umgebungsvariablen lesen
|
||||
username = os.environ.get("TAPO_USERNAME")
|
||||
password = os.environ.get("TAPO_PASSWORD")
|
||||
|
||||
log(f"👤 Benutzername: {username}")
|
||||
log(f"🔑 Passwort: {password}")
|
||||
|
||||
# Teste jede IP
|
||||
success = False
|
||||
|
||||
for ip in TAPO_IPS:
|
||||
log(f"🔄 Teste Steckdose mit IP: {ip}")
|
||||
|
||||
try:
|
||||
# Wichtig: Verwende PyP100 (nicht PyP110) wie in den funktionierenden Versionen
|
||||
p100 = PyP100.P100(ip, username, password)
|
||||
|
||||
# Handshake und Login
|
||||
log(" 🔄 Handshake...")
|
||||
p100.handshake()
|
||||
|
||||
log(" 🔄 Login...")
|
||||
p100.login()
|
||||
|
||||
# Status abfragen
|
||||
log(" 🔄 Status abfragen...")
|
||||
device_info = p100.getDeviceInfo()
|
||||
|
||||
# Erfolg!
|
||||
state = "Eingeschaltet" if device_info.get("device_on", False) else "Ausgeschaltet"
|
||||
log(f"✅ ERFOLG! Steckdose {ip} erfolgreich verbunden")
|
||||
log(f" 📱 Name: {device_info.get('nickname', 'Unbekannt')}")
|
||||
log(f" ⚡ Status: {state}")
|
||||
|
||||
# Steckdose ein-/ausschalten wenn gewünscht
|
||||
if "--toggle" in sys.argv:
|
||||
if device_info.get("device_on", False):
|
||||
log(" 🔄 Schalte Steckdose AUS...")
|
||||
p100.turnOff()
|
||||
else:
|
||||
log(" 🔄 Schalte Steckdose EIN...")
|
||||
p100.turnOn()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
# Neuen Status abrufen
|
||||
device_info = p100.getDeviceInfo()
|
||||
state = "Eingeschaltet" if device_info.get("device_on", False) else "Ausgeschaltet"
|
||||
log(f" ⚡ Neuer Status: {state}")
|
||||
|
||||
success = True
|
||||
|
||||
# Konfiguration für settings.py ausgeben
|
||||
log("\n✅ KONFIGURATION FÜR SETTINGS.PY:")
|
||||
log(f"""
|
||||
# TP-Link Tapo Standard-Anmeldedaten
|
||||
TAPO_USERNAME = "{username}"
|
||||
TAPO_PASSWORD = "{password}"
|
||||
|
||||
# Standard-Steckdosen-IPs
|
||||
DEFAULT_TAPO_IPS = ["{ip}"]
|
||||
""")
|
||||
|
||||
# Nur die erste erfolgreiche Steckdose testen
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
log(f"❌ Fehler bei Steckdose {ip}: {str(e)}")
|
||||
|
||||
if not success:
|
||||
log("\n❌ Keine Tapo-Steckdose konnte verbunden werden!")
|
||||
log("Prüfen Sie folgende mögliche Ursachen:")
|
||||
log("1. Steckdosen sind nicht eingesteckt oder mit dem WLAN verbunden")
|
||||
log("2. IP-Adressen sind falsch")
|
||||
log("3. Anmeldedaten sind falsch (prüfen Sie das 'A' am Ende des Passworts)")
|
||||
log("4. Netzwerkprobleme verhindern den Zugriff")
|
||||
|
||||
return success
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("\n====== TAPO P110 SOFORT-TEST ======\n")
|
||||
test_connection()
|
||||
print("\n====== TEST BEENDET ======\n")
|
||||
60
backend/app - Kopie/utils/update_printer_locations.py
Normal file
60
backend/app - Kopie/utils/update_printer_locations.py
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python3.11
|
||||
"""
|
||||
Skript zur Aktualisierung der Drucker-Standorte in der Datenbank.
|
||||
Ändert alle Standorte von "Labor" zu "Werk 040 - Berlin - TBA".
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('.')
|
||||
|
||||
from database.db_manager import DatabaseManager
|
||||
from models import Printer
|
||||
from datetime import datetime
|
||||
|
||||
def update_printer_locations():
|
||||
"""Aktualisiert alle Drucker-Standorte zu 'Werk 040 - Berlin - TBA'."""
|
||||
|
||||
print("=== Drucker-Standorte aktualisieren ===")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
# Alle Drucker abrufen
|
||||
all_printers = session.query(Printer).all()
|
||||
print(f"Gefundene Drucker: {len(all_printers)}")
|
||||
|
||||
if not all_printers:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
session.close()
|
||||
return
|
||||
|
||||
# Neue Standort-Bezeichnung
|
||||
new_location = "Werk 040 - Berlin - TBA"
|
||||
updated_count = 0
|
||||
|
||||
# Alle Drucker durchgehen und Standort aktualisieren
|
||||
for printer in all_printers:
|
||||
old_location = printer.location
|
||||
printer.location = new_location
|
||||
|
||||
print(f"✅ {printer.name}: '{old_location}' → '{new_location}'")
|
||||
updated_count += 1
|
||||
|
||||
# Änderungen speichern
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
print(f"\n✅ {updated_count} Drucker-Standorte erfolgreich aktualisiert")
|
||||
print(f"Neuer Standort: {new_location}")
|
||||
print("Standort-Aktualisierung abgeschlossen!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler bei der Standort-Aktualisierung: {e}")
|
||||
if 'session' in locals():
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_printer_locations()
|
||||
110
backend/app - Kopie/utils/update_printers.py
Normal file
110
backend/app - Kopie/utils/update_printers.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Skript zur Synchronisation der Drucker in der Datenbank mit den hardkodierten Druckern.
|
||||
Setzt den Status basierend auf der Konfiguration.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('.')
|
||||
|
||||
from config.settings import PRINTERS
|
||||
from database.db_manager import DatabaseManager
|
||||
from models import Printer
|
||||
from datetime import datetime
|
||||
|
||||
def update_printer_status():
|
||||
"""Aktualisiert den Status aller Drucker basierend auf der hardkodierten Konfiguration."""
|
||||
|
||||
print("=== Drucker-Status-Update ===")
|
||||
print(f"Hardkodierte Drucker: {len(PRINTERS)}")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
# Alle Drucker aus der Datenbank abrufen
|
||||
printers = session.query(Printer).all()
|
||||
print(f"Drucker in Datenbank: {len(printers)}")
|
||||
|
||||
updated_count = 0
|
||||
|
||||
for printer in printers:
|
||||
# Prüfen, ob Drucker in der hardkodierten Konfiguration existiert
|
||||
if printer.name in PRINTERS:
|
||||
# Drucker ist konfiguriert -> online/verfügbar
|
||||
old_status = printer.status
|
||||
printer.status = "available"
|
||||
printer.active = True
|
||||
|
||||
# IP-Adresse aus Konfiguration aktualisieren
|
||||
config_ip = PRINTERS[printer.name]["ip"]
|
||||
if printer.ip_address != config_ip:
|
||||
printer.ip_address = config_ip
|
||||
|
||||
print(f"✅ {printer.name}: {old_status} -> available (IP: {config_ip})")
|
||||
updated_count += 1
|
||||
else:
|
||||
# Drucker nicht konfiguriert -> offline
|
||||
old_status = printer.status
|
||||
printer.status = "offline"
|
||||
printer.active = False
|
||||
print(f"❌ {printer.name}: {old_status} -> offline")
|
||||
updated_count += 1
|
||||
|
||||
# Änderungen speichern
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
print(f"\n✅ {updated_count} Drucker aktualisiert")
|
||||
print("Status-Update abgeschlossen!")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Update: {e}")
|
||||
if 'session' in locals():
|
||||
session.rollback()
|
||||
session.close()
|
||||
|
||||
def list_printer_status():
|
||||
"""Zeigt den aktuellen Status aller Drucker an."""
|
||||
|
||||
print("\n=== Aktueller Drucker-Status ===")
|
||||
|
||||
try:
|
||||
db = DatabaseManager()
|
||||
session = db.get_session()
|
||||
|
||||
printers = session.query(Printer).all()
|
||||
|
||||
if not printers:
|
||||
print("Keine Drucker in der Datenbank gefunden.")
|
||||
return
|
||||
|
||||
print(f"{'Name':<15} {'Status':<12} {'Aktiv':<8} {'IP-Adresse':<15} {'Konfiguriert':<12}")
|
||||
print("-" * 70)
|
||||
|
||||
for printer in printers:
|
||||
configured = "✅" if printer.name in PRINTERS else "❌"
|
||||
active_str = "✅" if printer.active else "❌"
|
||||
|
||||
print(f"{printer.name:<15} {printer.status:<12} {active_str:<8} {printer.ip_address or 'N/A':<15} {configured:<12}")
|
||||
|
||||
session.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Fehler beim Abrufen: {e}")
|
||||
if 'session' in locals():
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Drucker-Status-Management")
|
||||
print("=" * 30)
|
||||
|
||||
# Aktuellen Status anzeigen
|
||||
list_printer_status()
|
||||
|
||||
# Status aktualisieren
|
||||
update_printer_status()
|
||||
|
||||
# Neuen Status anzeigen
|
||||
list_printer_status()
|
||||
295
backend/app - Kopie/utils/update_requirements.py
Normal file
295
backend/app - Kopie/utils/update_requirements.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
MYP Platform - Requirements Update Script
|
||||
Aktualisiert die Requirements basierend auf tatsächlich verwendeten Imports
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import ast
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
from typing import Set, List, Dict
|
||||
|
||||
def get_imports_from_file(file_path: Path) -> Set[str]:
|
||||
"""Extrahiert alle Import-Statements aus einer Python-Datei."""
|
||||
imports = set()
|
||||
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
tree = ast.parse(content)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
imports.add(alias.name.split('.')[0])
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
if node.module:
|
||||
imports.add(node.module.split('.')[0])
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Parsen von {file_path}: {e}")
|
||||
|
||||
return imports
|
||||
|
||||
def get_all_imports(project_root: Path) -> Set[str]:
|
||||
"""Sammelt alle Imports aus dem Projekt."""
|
||||
all_imports = set()
|
||||
|
||||
# Wichtige Dateien analysieren
|
||||
important_files = [
|
||||
'app.py',
|
||||
'models.py',
|
||||
'utils/rate_limiter.py',
|
||||
'utils/job_scheduler.py',
|
||||
'utils/queue_manager.py',
|
||||
'utils/ssl_manager.py',
|
||||
'utils/security.py',
|
||||
'utils/permissions.py',
|
||||
'utils/analytics.py',
|
||||
'utils/template_helpers.py',
|
||||
'utils/logging_config.py'
|
||||
]
|
||||
|
||||
for file_path in important_files:
|
||||
full_path = project_root / file_path
|
||||
if full_path.exists():
|
||||
imports = get_imports_from_file(full_path)
|
||||
all_imports.update(imports)
|
||||
print(f"✓ Analysiert: {file_path} ({len(imports)} Imports)")
|
||||
|
||||
return all_imports
|
||||
|
||||
def get_package_mapping() -> Dict[str, str]:
|
||||
"""Mapping von Import-Namen zu PyPI-Paketnamen."""
|
||||
return {
|
||||
'flask': 'Flask',
|
||||
'flask_login': 'Flask-Login',
|
||||
'flask_wtf': 'Flask-WTF',
|
||||
'sqlalchemy': 'SQLAlchemy',
|
||||
'werkzeug': 'Werkzeug',
|
||||
'bcrypt': 'bcrypt',
|
||||
'cryptography': 'cryptography',
|
||||
'PyP100': 'PyP100',
|
||||
'redis': 'redis',
|
||||
'requests': 'requests',
|
||||
'jinja2': 'Jinja2',
|
||||
'markupsafe': 'MarkupSafe',
|
||||
'itsdangerous': 'itsdangerous',
|
||||
'psutil': 'psutil',
|
||||
'click': 'click',
|
||||
'blinker': 'blinker',
|
||||
'pywin32': 'pywin32',
|
||||
'pytest': 'pytest',
|
||||
'gunicorn': 'gunicorn'
|
||||
}
|
||||
|
||||
def get_current_versions() -> Dict[str, str]:
|
||||
"""Holt die aktuell installierten Versionen."""
|
||||
versions = {}
|
||||
|
||||
try:
|
||||
result = subprocess.run(['pip', 'list', '--format=freeze'],
|
||||
capture_output=True, text=True,
|
||||
encoding='utf-8', errors='replace')
|
||||
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if '==' in line:
|
||||
package, version = line.split('==', 1)
|
||||
versions[package.lower()] = version
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler beim Abrufen der Versionen: {e}")
|
||||
|
||||
return versions
|
||||
|
||||
def check_package_availability(package: str, version: str = None) -> bool:
|
||||
"""Prüft, ob ein Paket in der angegebenen Version verfügbar ist."""
|
||||
try:
|
||||
if version:
|
||||
cmd = ['pip', 'index', 'versions', package]
|
||||
else:
|
||||
cmd = ['pip', 'show', package]
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True,
|
||||
encoding='utf-8', errors='replace')
|
||||
return result.returncode == 0
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def generate_requirements(imports: Set[str], versions: Dict[str, str]) -> List[str]:
|
||||
"""Generiert die Requirements-Liste."""
|
||||
package_mapping = get_package_mapping()
|
||||
requirements = []
|
||||
|
||||
# Standard-Bibliotheken, die nicht installiert werden müssen
|
||||
stdlib_modules = {
|
||||
'os', 'sys', 'logging', 'atexit', 'datetime', 'time', 'subprocess',
|
||||
'json', 'signal', 'threading', 'functools', 'typing', 'contextlib',
|
||||
'secrets', 'hashlib', 'calendar', 'random', 'socket', 'ipaddress',
|
||||
'enum', 'dataclasses', 'concurrent', 'collections'
|
||||
}
|
||||
|
||||
# Lokale Module ausschließen
|
||||
local_modules = {
|
||||
'models', 'utils', 'config', 'blueprints'
|
||||
}
|
||||
|
||||
# Nur externe Pakete berücksichtigen
|
||||
external_imports = imports - stdlib_modules - local_modules
|
||||
|
||||
for import_name in sorted(external_imports):
|
||||
package_name = package_mapping.get(import_name, import_name)
|
||||
|
||||
# Version aus installierten Paketen holen
|
||||
version = versions.get(package_name.lower())
|
||||
|
||||
if version and check_package_availability(package_name, version):
|
||||
if package_name == 'pywin32':
|
||||
requirements.append(f"{package_name}=={version}; sys_platform == \"win32\"")
|
||||
else:
|
||||
requirements.append(f"{package_name}=={version}")
|
||||
else:
|
||||
print(f"⚠️ Paket {package_name} nicht gefunden oder Version unbekannt")
|
||||
|
||||
return requirements
|
||||
|
||||
def write_requirements_file(requirements: List[str], output_file: Path):
|
||||
"""Schreibt die Requirements in eine Datei."""
|
||||
header = """# MYP Platform - Python Dependencies
|
||||
# Basierend auf tatsächlich verwendeten Imports in app.py
|
||||
# Automatisch generiert am: {date}
|
||||
# Installiere mit: pip install -r requirements.txt
|
||||
|
||||
# ===== CORE FLASK FRAMEWORK =====
|
||||
# Direkt in app.py verwendet
|
||||
{flask_requirements}
|
||||
|
||||
# ===== DATENBANK =====
|
||||
# SQLAlchemy für Datenbankoperationen (models.py, app.py)
|
||||
{db_requirements}
|
||||
|
||||
# ===== SICHERHEIT UND AUTHENTIFIZIERUNG =====
|
||||
# Werkzeug für Passwort-Hashing und Utilities (app.py)
|
||||
{security_requirements}
|
||||
|
||||
# ===== SMART PLUG STEUERUNG =====
|
||||
# PyP100 für TP-Link Tapo Smart Plugs (utils/job_scheduler.py)
|
||||
{smartplug_requirements}
|
||||
|
||||
# ===== RATE LIMITING UND CACHING =====
|
||||
# Redis für Rate Limiting (utils/rate_limiter.py) - optional
|
||||
{cache_requirements}
|
||||
|
||||
# ===== HTTP REQUESTS =====
|
||||
# Requests für HTTP-Anfragen (utils/queue_manager.py, utils/debug_drucker_erkennung.py)
|
||||
{http_requirements}
|
||||
|
||||
# ===== TEMPLATE ENGINE =====
|
||||
# Jinja2 und MarkupSafe (automatisch mit Flask installiert, aber explizit für utils/template_helpers.py)
|
||||
{template_requirements}
|
||||
|
||||
# ===== SYSTEM MONITORING =====
|
||||
# psutil für System-Monitoring (utils/debug_utils.py, utils/debug_cli.py)
|
||||
{monitoring_requirements}
|
||||
|
||||
# ===== ZUSÄTZLICHE CORE ABHÄNGIGKEITEN =====
|
||||
# Click für CLI-Kommandos (automatisch mit Flask)
|
||||
{core_requirements}
|
||||
|
||||
# ===== WINDOWS-SPEZIFISCHE ABHÄNGIGKEITEN =====
|
||||
# Nur für Windows-Systeme erforderlich
|
||||
{windows_requirements}
|
||||
|
||||
# ===== OPTIONAL: ENTWICKLUNG UND TESTING =====
|
||||
# Nur für Entwicklungsumgebung
|
||||
{dev_requirements}
|
||||
|
||||
# ===== OPTIONAL: PRODUKTIONS-SERVER =====
|
||||
# Gunicorn für Produktionsumgebung
|
||||
{prod_requirements}
|
||||
"""
|
||||
|
||||
# Requirements kategorisieren
|
||||
flask_reqs = [r for r in requirements if any(x in r.lower() for x in ['flask'])]
|
||||
db_reqs = [r for r in requirements if 'SQLAlchemy' in r]
|
||||
security_reqs = [r for r in requirements if any(x in r for x in ['Werkzeug', 'bcrypt', 'cryptography'])]
|
||||
smartplug_reqs = [r for r in requirements if 'PyP100' in r]
|
||||
cache_reqs = [r for r in requirements if 'redis' in r]
|
||||
http_reqs = [r for r in requirements if 'requests' in r]
|
||||
template_reqs = [r for r in requirements if any(x in r for x in ['Jinja2', 'MarkupSafe', 'itsdangerous'])]
|
||||
monitoring_reqs = [r for r in requirements if 'psutil' in r]
|
||||
core_reqs = [r for r in requirements if any(x in r for x in ['click', 'blinker'])]
|
||||
windows_reqs = [r for r in requirements if 'sys_platform' in r]
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
content = header.format(
|
||||
date=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
||||
flask_requirements='\n'.join(flask_reqs) or '# Keine Flask-spezifischen Requirements',
|
||||
db_requirements='\n'.join(db_reqs) or '# Keine Datenbank-Requirements',
|
||||
security_requirements='\n'.join(security_reqs) or '# Keine Sicherheits-Requirements',
|
||||
smartplug_requirements='\n'.join(smartplug_reqs) or '# Keine Smart Plug Requirements',
|
||||
cache_requirements='\n'.join(cache_reqs) or '# Keine Cache-Requirements',
|
||||
http_requirements='\n'.join(http_reqs) or '# Keine HTTP-Requirements',
|
||||
template_requirements='\n'.join(template_reqs) or '# Keine Template-Requirements',
|
||||
monitoring_requirements='\n'.join(monitoring_reqs) or '# Keine Monitoring-Requirements',
|
||||
core_requirements='\n'.join(core_reqs) or '# Keine Core-Requirements',
|
||||
windows_requirements='\n'.join(windows_reqs) or '# Keine Windows-Requirements',
|
||||
dev_requirements='pytest==8.3.4; extra == "dev"\npytest-cov==6.0.0; extra == "dev"',
|
||||
prod_requirements='gunicorn==23.0.0; extra == "prod"'
|
||||
)
|
||||
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
|
||||
def main():
|
||||
"""Hauptfunktion."""
|
||||
print("🔄 MYP Platform Requirements Update")
|
||||
print("=" * 50)
|
||||
|
||||
# Projekt-Root ermitteln
|
||||
project_root = Path(__file__).parent
|
||||
|
||||
print(f"📁 Projekt-Verzeichnis: {project_root}")
|
||||
|
||||
# Imports sammeln
|
||||
print("\n📋 Sammle Imports aus wichtigen Dateien...")
|
||||
imports = get_all_imports(project_root)
|
||||
|
||||
print(f"\n📦 Gefundene externe Imports: {len(imports)}")
|
||||
for imp in sorted(imports):
|
||||
print(f" - {imp}")
|
||||
|
||||
# Aktuelle Versionen abrufen
|
||||
print("\n🔍 Prüfe installierte Versionen...")
|
||||
versions = get_current_versions()
|
||||
|
||||
# Requirements generieren
|
||||
print("\n⚙️ Generiere Requirements...")
|
||||
requirements = generate_requirements(imports, versions)
|
||||
|
||||
# Requirements-Datei schreiben
|
||||
output_file = project_root / 'requirements.txt'
|
||||
write_requirements_file(requirements, output_file)
|
||||
|
||||
print(f"\n✅ Requirements aktualisiert: {output_file}")
|
||||
print(f"📊 {len(requirements)} Pakete in requirements.txt")
|
||||
|
||||
# Zusammenfassung
|
||||
print("\n📋 Generierte Requirements:")
|
||||
for req in requirements:
|
||||
print(f" - {req}")
|
||||
|
||||
print("\n🎉 Requirements-Update abgeschlossen!")
|
||||
print("\nNächste Schritte:")
|
||||
print("1. pip install -r requirements.txt")
|
||||
print("2. Anwendung testen")
|
||||
print("3. requirements-dev.txt und requirements-prod.txt bei Bedarf anpassen")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
353
backend/app - Kopie/utils/windows_fixes.py
Normal file
353
backend/app - Kopie/utils/windows_fixes.py
Normal file
@@ -0,0 +1,353 @@
|
||||
"""
|
||||
Windows-spezifische Fixes für Thread- und Socket-Probleme
|
||||
Behebt bekannte Issues mit Flask Auto-Reload auf Windows.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import signal
|
||||
import threading
|
||||
import time
|
||||
import atexit
|
||||
from typing import List, Callable
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
# Logger für Windows-Fixes
|
||||
windows_logger = get_logger("windows_fixes")
|
||||
|
||||
# Globale Flags um doppelte Anwendung zu verhindern
|
||||
_windows_fixes_applied = False
|
||||
_socket_patches_applied = False
|
||||
|
||||
class WindowsThreadManager:
|
||||
"""
|
||||
Verwaltet Threads und deren ordnungsgemäße Beendigung auf Windows.
|
||||
Behebt Socket-Fehler beim Flask Auto-Reload.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.managed_threads: List[threading.Thread] = []
|
||||
self.cleanup_functions: List[Callable] = []
|
||||
self.shutdown_event = threading.Event()
|
||||
self._lock = threading.Lock()
|
||||
self._is_shutting_down = False
|
||||
|
||||
# Signal-Handler nur auf Windows registrieren
|
||||
if os.name == 'nt':
|
||||
self._register_signal_handlers()
|
||||
|
||||
def _register_signal_handlers(self):
|
||||
"""Registriert Windows-spezifische Signal-Handler."""
|
||||
try:
|
||||
signal.signal(signal.SIGINT, self._signal_handler)
|
||||
signal.signal(signal.SIGTERM, self._signal_handler)
|
||||
# Windows-spezifisches SIGBREAK
|
||||
if hasattr(signal, 'SIGBREAK'):
|
||||
signal.signal(signal.SIGBREAK, self._signal_handler)
|
||||
windows_logger.debug("✅ Windows Signal-Handler registriert")
|
||||
except Exception as e:
|
||||
windows_logger.warning(f"⚠️ Signal-Handler konnten nicht registriert werden: {str(e)}")
|
||||
|
||||
def _signal_handler(self, sig, frame):
|
||||
"""Signal-Handler für ordnungsgemäßes Shutdown."""
|
||||
if not self._is_shutting_down:
|
||||
windows_logger.warning(f"🛑 Windows Signal {sig} empfangen - initiiere Shutdown")
|
||||
self.shutdown_all()
|
||||
|
||||
def register_thread(self, thread: threading.Thread):
|
||||
"""Registriert einen Thread für ordnungsgemäße Beendigung."""
|
||||
with self._lock:
|
||||
if thread not in self.managed_threads:
|
||||
self.managed_threads.append(thread)
|
||||
windows_logger.debug(f"📝 Thread {thread.name} registriert")
|
||||
|
||||
def register_cleanup_function(self, func: Callable):
|
||||
"""Registriert eine Cleanup-Funktion."""
|
||||
with self._lock:
|
||||
if func not in self.cleanup_functions:
|
||||
self.cleanup_functions.append(func)
|
||||
windows_logger.debug(f"📝 Cleanup-Funktion registriert")
|
||||
|
||||
def shutdown_all(self):
|
||||
"""Beendet alle verwalteten Threads und führt Cleanup durch."""
|
||||
if self._is_shutting_down:
|
||||
return
|
||||
|
||||
with self._lock:
|
||||
self._is_shutting_down = True
|
||||
windows_logger.info("🔄 Starte Windows Thread-Shutdown...")
|
||||
|
||||
# Shutdown-Event setzen
|
||||
self.shutdown_event.set()
|
||||
|
||||
# Cleanup-Funktionen ausführen
|
||||
for func in self.cleanup_functions:
|
||||
try:
|
||||
windows_logger.debug(f"🧹 Führe Cleanup-Funktion aus: {func.__name__}")
|
||||
func()
|
||||
except Exception as e:
|
||||
windows_logger.error(f"❌ Fehler bei Cleanup-Funktion {func.__name__}: {str(e)}")
|
||||
|
||||
# Threads beenden
|
||||
active_threads = [t for t in self.managed_threads if t.is_alive()]
|
||||
if active_threads:
|
||||
windows_logger.info(f"⏳ Warte auf {len(active_threads)} aktive Threads...")
|
||||
|
||||
for thread in active_threads:
|
||||
try:
|
||||
windows_logger.debug(f"🔄 Beende Thread: {thread.name}")
|
||||
thread.join(timeout=5)
|
||||
|
||||
if thread.is_alive():
|
||||
windows_logger.warning(f"⚠️ Thread {thread.name} konnte nicht ordnungsgemäß beendet werden")
|
||||
else:
|
||||
windows_logger.debug(f"✅ Thread {thread.name} erfolgreich beendet")
|
||||
except Exception as e:
|
||||
windows_logger.error(f"❌ Fehler beim Beenden von Thread {thread.name}: {str(e)}")
|
||||
|
||||
windows_logger.info("✅ Windows Thread-Shutdown abgeschlossen")
|
||||
|
||||
# Globale Instanz
|
||||
_windows_thread_manager = None
|
||||
|
||||
def get_windows_thread_manager() -> WindowsThreadManager:
|
||||
"""Gibt die globale Instanz des Windows Thread-Managers zurück."""
|
||||
global _windows_thread_manager
|
||||
if _windows_thread_manager is None:
|
||||
_windows_thread_manager = WindowsThreadManager()
|
||||
return _windows_thread_manager
|
||||
|
||||
def fix_windows_socket_issues():
|
||||
"""
|
||||
Anwendung von Windows-spezifischen Socket-Fixes.
|
||||
Vereinfachte, sichere Version ohne Monkey-Patching.
|
||||
"""
|
||||
global _socket_patches_applied
|
||||
|
||||
if os.name != 'nt':
|
||||
return
|
||||
|
||||
if _socket_patches_applied:
|
||||
windows_logger.debug("⏭️ Socket-Patches bereits angewendet")
|
||||
return
|
||||
|
||||
try:
|
||||
# SICHERERE Alternative: Nur TCP Socket-Optionen setzen ohne Monkey-Patching
|
||||
import socket
|
||||
|
||||
# Erweitere die Socket-Klasse mit einer Hilfsmethode
|
||||
if not hasattr(socket.socket, 'windows_bind_with_reuse'):
|
||||
|
||||
def windows_bind_with_reuse(self, address):
|
||||
"""Windows-optimierte bind-Methode mit SO_REUSEADDR."""
|
||||
try:
|
||||
# SO_REUSEADDR aktivieren
|
||||
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
windows_logger.debug(f"SO_REUSEADDR aktiviert für Socket {address}")
|
||||
except Exception as e:
|
||||
windows_logger.debug(f"SO_REUSEADDR konnte nicht gesetzt werden: {str(e)}")
|
||||
|
||||
# Standard-bind ausführen
|
||||
return self.bind(address)
|
||||
|
||||
# Füge die Hilfsmethode hinzu ohne die ursprüngliche bind-Methode zu überschreiben
|
||||
socket.socket.windows_bind_with_reuse = windows_bind_with_reuse
|
||||
|
||||
# Setze globale Socket-Optionen für bessere Windows-Kompatibilität
|
||||
socket.setdefaulttimeout(30) # 30 Sekunden Standard-Timeout
|
||||
|
||||
_socket_patches_applied = True
|
||||
windows_logger.debug("✅ Windows Socket-Optimierungen angewendet (sicher)")
|
||||
|
||||
except Exception as e:
|
||||
windows_logger.warning(f"⚠️ Socket-Optimierungen konnten nicht angewendet werden: {str(e)}")
|
||||
|
||||
def apply_safe_socket_options():
|
||||
"""
|
||||
Wendet sichere Socket-Optionen für Windows an ohne Monkey-Patching.
|
||||
"""
|
||||
if os.name != 'nt':
|
||||
return
|
||||
|
||||
try:
|
||||
import socket
|
||||
|
||||
# Sichere Socket-Defaults für Windows
|
||||
if hasattr(socket, 'TCP_NODELAY'):
|
||||
# TCP_NODELAY als Standard aktivieren für bessere Performance
|
||||
pass # Wird pro Socket gesetzt, nicht global
|
||||
|
||||
windows_logger.debug("✅ Sichere Socket-Optionen angewendet")
|
||||
|
||||
except Exception as e:
|
||||
windows_logger.debug(f"Socket-Optionen konnten nicht gesetzt werden: {str(e)}")
|
||||
|
||||
def setup_windows_environment():
|
||||
"""
|
||||
Richtet die Windows-Umgebung für bessere Flask-Kompatibilität ein.
|
||||
"""
|
||||
if os.name != 'nt':
|
||||
return
|
||||
|
||||
try:
|
||||
# Umgebungsvariablen für bessere Windows-Kompatibilität
|
||||
os.environ['PYTHONIOENCODING'] = 'utf-8'
|
||||
os.environ['PYTHONUTF8'] = '1'
|
||||
|
||||
windows_logger.debug("✅ Windows-Umgebung optimiert")
|
||||
|
||||
except Exception as e:
|
||||
windows_logger.warning(f"⚠️ Windows-Umgebung konnte nicht optimiert werden: {str(e)}")
|
||||
|
||||
def is_flask_reloader_process() -> bool:
|
||||
"""
|
||||
Prüft, ob der aktuelle Prozess der Flask-Reloader-Prozess ist.
|
||||
"""
|
||||
return os.environ.get('WERKZEUG_RUN_MAIN') != 'true'
|
||||
|
||||
def apply_all_windows_fixes():
|
||||
"""Wendet alle Windows-spezifischen Fixes an."""
|
||||
global _windows_fixes_applied
|
||||
|
||||
if _windows_fixes_applied:
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info("🔧 Wende Windows-spezifische Fixes an...")
|
||||
|
||||
# 1. Encoding-Fixes
|
||||
apply_encoding_fixes()
|
||||
|
||||
# 2. Threading-Fixes
|
||||
apply_threading_fixes()
|
||||
|
||||
# 3. Signal-Handler-Fixes
|
||||
apply_signal_fixes()
|
||||
|
||||
# 4. Subprocess-Patch für UTF-8 Encoding
|
||||
patch_subprocess()
|
||||
|
||||
# 5. Globaler Subprocess-Patch für bereits importierte Module
|
||||
apply_global_subprocess_patch()
|
||||
|
||||
_windows_fixes_applied = True
|
||||
logger.info("✅ Alle Windows-Fixes erfolgreich angewendet")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Fehler beim Anwenden der Windows-Fixes: {str(e)}")
|
||||
raise e
|
||||
|
||||
# Automatisch Windows-Fixes beim Import anwenden (nur einmal)
|
||||
if os.name == 'nt' and not _windows_fixes_applied:
|
||||
# Sehr früher subprocess-Patch für sofortige Wirkung
|
||||
try:
|
||||
import subprocess
|
||||
if not hasattr(subprocess, '_early_patched'):
|
||||
patch_subprocess()
|
||||
subprocess._early_patched = True
|
||||
logger.info("✅ Früher subprocess-Patch beim Import angewendet")
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ Früher subprocess-Patch fehlgeschlagen: {str(e)}")
|
||||
|
||||
apply_all_windows_fixes()
|
||||
|
||||
# ===== SICHERE SUBPROCESS-WRAPPER =====
|
||||
|
||||
def safe_subprocess_run(*args, **kwargs):
|
||||
"""
|
||||
Sicherer subprocess.run Wrapper für Windows mit UTF-8 Encoding.
|
||||
Verhindert charmap-Fehler durch explizite Encoding-Einstellungen.
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
# Standard-Encoding für Windows setzen
|
||||
if 'encoding' not in kwargs and kwargs.get('text', False):
|
||||
kwargs['encoding'] = 'utf-8'
|
||||
kwargs['errors'] = 'replace'
|
||||
|
||||
# Timeout-Standard setzen falls nicht vorhanden
|
||||
if 'timeout' not in kwargs:
|
||||
kwargs['timeout'] = 30
|
||||
|
||||
try:
|
||||
return subprocess.run(*args, **kwargs)
|
||||
except subprocess.TimeoutExpired as e:
|
||||
logger.warning(f"Subprocess-Timeout nach {kwargs.get('timeout', 30)}s: {' '.join(args[0]) if args and isinstance(args[0], list) else str(args)}")
|
||||
raise e
|
||||
except UnicodeDecodeError as e:
|
||||
logger.error(f"Unicode-Decode-Fehler in subprocess: {str(e)}")
|
||||
# Fallback ohne text=True
|
||||
kwargs_fallback = kwargs.copy()
|
||||
kwargs_fallback.pop('text', None)
|
||||
kwargs_fallback.pop('encoding', None)
|
||||
kwargs_fallback.pop('errors', None)
|
||||
return subprocess.run(*args, **kwargs_fallback)
|
||||
except Exception as e:
|
||||
logger.error(f"Subprocess-Fehler: {str(e)}")
|
||||
raise e
|
||||
|
||||
# ===== SUBPROCESS-MONKEY-PATCH =====
|
||||
|
||||
def patch_subprocess():
|
||||
"""
|
||||
Patcht subprocess.run und subprocess.Popen um automatisch sichere Encoding-Einstellungen zu verwenden.
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
# Original-Funktionen speichern
|
||||
if not hasattr(subprocess, '_original_run'):
|
||||
subprocess._original_run = subprocess.run
|
||||
subprocess._original_popen = subprocess.Popen
|
||||
|
||||
def patched_run(*args, **kwargs):
|
||||
# Automatisch UTF-8 Encoding für text=True setzen
|
||||
if kwargs.get('text', False) and 'encoding' not in kwargs:
|
||||
kwargs['encoding'] = 'utf-8'
|
||||
kwargs['errors'] = 'replace'
|
||||
|
||||
return subprocess._original_run(*args, **kwargs)
|
||||
|
||||
def patched_popen(*args, **kwargs):
|
||||
# Automatisch UTF-8 Encoding für text=True setzen
|
||||
if kwargs.get('text', False) and 'encoding' not in kwargs:
|
||||
kwargs['encoding'] = 'utf-8'
|
||||
kwargs['errors'] = 'replace'
|
||||
|
||||
# Auch für universal_newlines (ältere Python-Versionen)
|
||||
if kwargs.get('universal_newlines', False) and 'encoding' not in kwargs:
|
||||
kwargs['encoding'] = 'utf-8'
|
||||
kwargs['errors'] = 'replace'
|
||||
|
||||
return subprocess._original_popen(*args, **kwargs)
|
||||
|
||||
subprocess.run = patched_run
|
||||
subprocess.Popen = patched_popen
|
||||
logger.info("✅ Subprocess automatisch gepatcht für UTF-8 Encoding (run + Popen)")
|
||||
|
||||
# ===== GLOBALER SUBPROCESS-PATCH =====
|
||||
|
||||
def apply_global_subprocess_patch():
|
||||
"""
|
||||
Wendet den subprocess-Patch global an, auch für bereits importierte Module.
|
||||
"""
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
# Patch subprocess direkt
|
||||
patch_subprocess()
|
||||
|
||||
# Patch auch in bereits importierten Modulen
|
||||
for module_name, module in sys.modules.items():
|
||||
if hasattr(module, 'subprocess') and module.subprocess is subprocess:
|
||||
# Modul verwendet subprocess - patch es
|
||||
module.subprocess = subprocess
|
||||
logger.debug(f"✅ Subprocess in Modul {module_name} gepatcht")
|
||||
|
||||
logger.info("✅ Globaler subprocess-Patch angewendet")
|
||||
|
||||
# ===== EXPORT SAFE SUBPROCESS =====
|
||||
|
||||
# Sichere subprocess-Funktion exportieren
|
||||
__all__.append('safe_subprocess_run')
|
||||
__all__.append('patch_subprocess')
|
||||
__all__.append('apply_global_subprocess_patch')
|
||||
Reference in New Issue
Block a user