ich geh behindert

This commit is contained in:
2025-06-05 01:34:10 +02:00
parent 0ae23e5272
commit 375c48d72f
478 changed files with 11113 additions and 231267 deletions

Binary file not shown.

View File

@ -7,7 +7,7 @@ import sys
import os
sys.path.append('.')
from config.settings import PRINTERS
from utils.settings import PRINTERS
from database.db_manager import DatabaseManager
from models import Printer
from datetime import datetime

View File

@ -7,7 +7,7 @@ import sys
import os
sys.path.append('.')
from config.settings import PRINTERS
from utils.settings import PRINTERS
from database.db_manager import DatabaseManager
from models import Printer
from datetime import datetime

319
backend/utils/config.py Normal file
View File

@ -0,0 +1,319 @@
"""
Zentrale Konfiguration für das 3D-Druck-Management-System
Diese Datei konsolidiert alle Konfigurationseinstellungen aus dem ehemaligen config-Ordner.
"""
import os
import json
from datetime import timedelta
# ===== BASIS-KONFIGURATION =====
def get_env_variable(name: str, default: str = None) -> str:
"""
Holt eine Umgebungsvariable oder gibt den Standardwert zurück.
Args:
name: Name der Umgebungsvariable
default: Standardwert, falls die Variable nicht gesetzt ist
Returns:
str: Wert der Umgebungsvariable oder Standardwert
"""
return os.environ.get(name, default)
# Geheimschlüssel für Flask-Sessions und CSRF-Schutz
SECRET_KEY = "7445630171969DFAC92C53CEC92E67A9CB2E00B3CB2F"
# Pfad-Konfiguration
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.dirname(BASE_DIR)
DATABASE_PATH = os.path.join(BASE_DIR, "instance", "printer_manager.db")
# ===== SMART PLUG KONFIGURATION =====
# TP-Link Tapo P110 Standardkonfiguration
TAPO_USERNAME = "till.tomczak@mercedes-benz.com"
TAPO_PASSWORD = "744563017196A"
# Automatische Steckdosen-Erkennung
TAPO_AUTO_DISCOVERY = True
# Standard-Steckdosen-IPs
DEFAULT_TAPO_IPS = [
"192.168.0.103",
"192.168.0.104",
"192.168.0.100",
"192.168.0.101",
"192.168.0.102",
"192.168.0.105"
]
# Timeout-Konfiguration für Tapo-Verbindungen
TAPO_TIMEOUT = 10 # Sekunden
TAPO_RETRY_COUNT = 3 # Anzahl Wiederholungsversuche
# ===== DRUCKER-KONFIGURATION =====
PRINTERS = {
"Printer 1": {"ip": "192.168.0.100"},
"Printer 2": {"ip": "192.168.0.101"},
"Printer 3": {"ip": "192.168.0.102"},
"Printer 4": {"ip": "192.168.0.103"},
"Printer 5": {"ip": "192.168.0.104"},
"Printer 6": {"ip": "192.168.0.106"}
}
# ===== LOGGING-KONFIGURATION =====
LOG_DIR = os.path.join(BASE_DIR, "logs")
LOG_SUBDIRS = ["app", "scheduler", "auth", "jobs", "printers", "errors", "admin", "admin_api",
"user", "kiosk", "guest", "uploads", "sessions", "maintenance", "analytics",
"security", "database", "queue_manager", "printer_monitor"]
LOG_LEVEL = get_env_variable("LOG_LEVEL", "INFO")
LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
LOG_FILE_MAX_BYTES = 10 * 1024 * 1024 # 10MB
LOG_BACKUP_COUNT = 5
# ===== FLASK-KONFIGURATION =====
FLASK_HOST = get_env_variable("FLASK_HOST", "0.0.0.0")
FLASK_PORT = int(get_env_variable("FLASK_PORT", "5000"))
FLASK_FALLBACK_PORT = 8080
FLASK_DEBUG = get_env_variable("FLASK_DEBUG", "False").lower() in ("true", "1", "yes")
SESSION_LIFETIME = timedelta(hours=2)
# ===== UPLOAD-KONFIGURATION =====
UPLOAD_FOLDER = os.path.join(BASE_DIR, "uploads")
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'gcode', '3mf', 'stl', 'obj', 'amf'}
MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # 16MB Maximum-Dateigröße
MAX_FILE_SIZE = 16 * 1024 * 1024 # 16MB Maximum-Dateigröße für Drag & Drop System
# ===== UMGEBUNG =====
ENVIRONMENT = get_env_variable("MYP_ENVIRONMENT", "development")
# ===== SSL-KONFIGURATION =====
SSL_ENABLED = get_env_variable("MYP_SSL_ENABLED", "False").lower() in ("true", "1", "yes")
SSL_CERT_PATH = os.path.join(BASE_DIR, "certs", "myp.crt")
SSL_KEY_PATH = os.path.join(BASE_DIR, "certs", "myp.key")
SSL_HOSTNAME = get_env_variable("MYP_SSL_HOSTNAME", "localhost")
# ===== SCHEDULER-KONFIGURATION =====
SCHEDULER_INTERVAL = 60 # Sekunden
SCHEDULER_ENABLED = get_env_variable("SCHEDULER_ENABLED", "True").lower() in ("true", "1", "yes")
# ===== DATENBANK-KONFIGURATION =====
DATABASE_URL = get_env_variable("DATABASE_URL", f"sqlite:///{DATABASE_PATH}")
SQLALCHEMY_DATABASE_URI = DATABASE_URL
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ENGINE_OPTIONS = {
'pool_pre_ping': True,
'pool_recycle': 300,
'connect_args': {
'check_same_thread': False
}
}
# ===== SICHERHEITS-KONFIGURATION =====
# CSRF-Schutz
WTF_CSRF_ENABLED = True
WTF_CSRF_TIME_LIMIT = 3600 # 1 Stunde
# Session-Sicherheit
SESSION_COOKIE_SECURE = SSL_ENABLED # Nur bei HTTPS
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SAMESITE = 'Lax'
PERMANENT_SESSION_LIFETIME = SESSION_LIFETIME
# Sicherheits-Headers
SECURITY_HEADERS = {
'Content-Security-Policy': (
"default-src 'self'; "
"script-src 'self' 'unsafe-eval' 'unsafe-inline'; "
"script-src-elem 'self' 'unsafe-inline'; "
"style-src 'self' 'unsafe-inline'; "
"font-src 'self'; "
"img-src 'self' data:; "
"connect-src 'self'; "
"worker-src 'self' blob:; "
"frame-src 'none'; "
"object-src 'none'; "
"base-uri 'self'; "
"form-action 'self'; "
"frame-ancestors 'none';"
),
'X-Content-Type-Options': 'nosniff',
'X-Frame-Options': 'DENY',
'X-XSS-Protection': '1; mode=block',
'Referrer-Policy': 'strict-origin-when-cross-origin',
'Permissions-Policy': 'geolocation=(), microphone=(), camera=()'
}
# Nur HTTPS-Header wenn SSL aktiviert
if SSL_ENABLED:
SECURITY_HEADERS['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
# Rate Limiting
RATE_LIMITS = {
'default': "200 per day, 50 per hour",
'login': "5 per minute",
'api': "100 per hour",
'admin': "500 per hour"
}
# ===== MAIL-KONFIGURATION (Optional) =====
MAIL_SERVER = get_env_variable('MAIL_SERVER')
MAIL_PORT = int(get_env_variable('MAIL_PORT', '587'))
MAIL_USE_TLS = get_env_variable('MAIL_USE_TLS', 'true').lower() in ['true', 'on', '1']
MAIL_USERNAME = get_env_variable('MAIL_USERNAME')
MAIL_PASSWORD = get_env_variable('MAIL_PASSWORD')
# ===== HILFSFUNKTIONEN =====
def get_log_file(category: str) -> str:
"""
Gibt den Pfad zur Log-Datei für eine bestimmte Kategorie zurück.
Args:
category: Log-Kategorie
Returns:
str: Pfad zur Log-Datei
"""
if category not in LOG_SUBDIRS:
category = "app"
return os.path.join(LOG_DIR, category, f"{category}.log")
def ensure_log_directories():
"""Erstellt alle erforderlichen Log-Verzeichnisse."""
os.makedirs(LOG_DIR, exist_ok=True)
for subdir in LOG_SUBDIRS:
os.makedirs(os.path.join(LOG_DIR, subdir), exist_ok=True)
def ensure_database_directory():
"""Erstellt das Datenbank-Verzeichnis."""
db_dir = os.path.dirname(DATABASE_PATH)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
def ensure_ssl_directory():
"""Erstellt das SSL-Verzeichnis, falls es nicht existiert."""
ssl_dir = os.path.dirname(SSL_CERT_PATH)
if ssl_dir and not os.path.exists(ssl_dir):
os.makedirs(ssl_dir, exist_ok=True)
def ensure_upload_directory():
"""Erstellt das Upload-Verzeichnis, falls es nicht existiert."""
if not os.path.exists(UPLOAD_FOLDER):
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
# Unterverzeichnisse erstellen
subdirs = ["jobs", "avatars", "assets", "logs", "backups", "temp", "guests"]
for subdir in subdirs:
os.makedirs(os.path.join(UPLOAD_FOLDER, subdir), exist_ok=True)
def get_security_headers():
"""Gibt die Sicherheits-Headers zurück"""
return SECURITY_HEADERS
def create_simple_ssl_cert():
"""
Erstellt ein Mercedes-Benz SSL-Zertifikat mit dem SSL-Manager.
"""
try:
from utils.ssl_manager import ssl_manager
success = ssl_manager.generate_mercedes_certificate()
if success:
print(f"Mercedes-Benz SSL-Zertifikat erfolgreich erstellt: {SSL_CERT_PATH}")
return True
else:
print("Fehler beim Erstellen des Mercedes-Benz SSL-Zertifikats")
return False
except ImportError as e:
print(f"SSL-Manager nicht verfügbar: {e}")
return False
except Exception as e:
print(f"Fehler beim Erstellen der SSL-Zertifikate: {e}")
return False
# ===== KONFIGURATIONSKLASSEN FÜR VERSCHIEDENE UMGEBUNGEN =====
class Config:
"""Basis-Konfigurationsklasse"""
# Alle Attribute aus den Konstanten übernehmen
SECRET_KEY = SECRET_KEY
DATABASE_URL = DATABASE_URL
SQLALCHEMY_DATABASE_URI = SQLALCHEMY_DATABASE_URI
SQLALCHEMY_TRACK_MODIFICATIONS = SQLALCHEMY_TRACK_MODIFICATIONS
SQLALCHEMY_ENGINE_OPTIONS = SQLALCHEMY_ENGINE_OPTIONS
UPLOAD_FOLDER = UPLOAD_FOLDER
MAX_CONTENT_LENGTH = MAX_CONTENT_LENGTH
ALLOWED_EXTENSIONS = ALLOWED_EXTENSIONS
WTF_CSRF_ENABLED = WTF_CSRF_ENABLED
WTF_CSRF_TIME_LIMIT = WTF_CSRF_TIME_LIMIT
SESSION_COOKIE_SECURE = SESSION_COOKIE_SECURE
SESSION_COOKIE_HTTPONLY = SESSION_COOKIE_HTTPONLY
SESSION_COOKIE_SAMESITE = SESSION_COOKIE_SAMESITE
PERMANENT_SESSION_LIFETIME = PERMANENT_SESSION_LIFETIME
LOG_LEVEL = LOG_LEVEL
LOG_FILE_MAX_BYTES = LOG_FILE_MAX_BYTES
LOG_BACKUP_COUNT = LOG_BACKUP_COUNT
SCHEDULER_ENABLED = SCHEDULER_ENABLED
SCHEDULER_INTERVAL = SCHEDULER_INTERVAL
SSL_ENABLED = SSL_ENABLED
SSL_CERT_PATH = SSL_CERT_PATH
SSL_KEY_PATH = SSL_KEY_PATH
DEFAULT_PORT = FLASK_PORT
DEFAULT_HOST = FLASK_HOST
@staticmethod
def init_app(app):
"""Initialisiert die Anwendung mit dieser Konfiguration."""
pass
class DevelopmentConfig(Config):
"""Entwicklungsumgebung-Konfiguration"""
DEBUG = True
TESTING = False
LOG_LEVEL = 'DEBUG'
SESSION_COOKIE_SECURE = False
WTF_CSRF_ENABLED = False # Für einfacheres API-Testing
class TestingConfig(Config):
"""Test-Umgebung-Konfiguration"""
TESTING = True
DEBUG = True
DATABASE_URL = 'sqlite:///:memory:'
SQLALCHEMY_DATABASE_URI = DATABASE_URL
WTF_CSRF_ENABLED = False
PERMANENT_SESSION_LIFETIME = timedelta(minutes=5)
class ProductionConfig(Config):
"""Produktionsumgebung-Konfiguration"""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True
WTF_CSRF_ENABLED = True
LOG_LEVEL = 'WARNING'
SSL_ENABLED = True
# Konfigurationswörterbuch
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
def get_config_by_name(config_name):
"""Gibt die Konfigurationsklasse nach Name zurück."""
return config.get(config_name, config['default'])

View File

@ -16,7 +16,7 @@ from sqlalchemy import text, create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.pool import StaticPool
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
from utils.logging_config import get_logger
logger = get_logger("database_cleanup")

View File

@ -9,7 +9,7 @@ import logging
from typing import List, Dict, Any
from datetime import datetime
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
from models import init_db
logger = logging.getLogger(__name__)

View File

@ -20,7 +20,7 @@ sys.path.insert(0, app_dir)
# Alternative Datenbankpfad-Definition falls Import fehlschlägt
DATABASE_PATH = None
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
except ImportError:
# Fallback: Datenbankpfad manuell setzen
DATABASE_PATH = os.path.join(app_dir, "database", "myp.db")

View File

@ -15,7 +15,7 @@ from pathlib import Path
from sqlalchemy import text
from sqlalchemy.engine import Engine
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
from utils.logging_config import get_logger
from models import get_cached_session, create_optimized_engine

View File

@ -112,7 +112,7 @@ def print_section(message):
def get_database_path():
"""Gibt den Pfad zur Datenbank zurück."""
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
return DATABASE_PATH
except ImportError:
# Fallback auf Standard-Pfad
@ -157,7 +157,7 @@ def check_database():
def check_log_files():
"""Prüft die Log-Dateien und zeigt die neuesten Einträge an."""
try:
from config.settings import LOG_DIR, LOG_SUBDIRS
from utils.settings import LOG_DIR, LOG_SUBDIRS
if not os.path.exists(LOG_DIR):
print_error(f"Log-Verzeichnis nicht gefunden: {LOG_DIR}")
@ -227,7 +227,7 @@ def check_environment():
try:
# Flask-Konfiguration prüfen
print_info("Flask-Konfiguration:")
from config.settings import FLASK_HOST, FLASK_PORT, FLASK_DEBUG, SSL_ENABLED
from utils.settings import FLASK_HOST, FLASK_PORT, FLASK_DEBUG, SSL_ENABLED
print(f" Host: {FLASK_HOST}")
print(f" Port: {FLASK_PORT}")
@ -305,7 +305,7 @@ def scan_printer(ip_address, timeout=5):
print(" 🔌 Smart Plug Test: ", end="")
try:
# Standardmäßig Anmeldeinformationen aus der Konfiguration verwenden
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
from utils.settings import TAPO_USERNAME, TAPO_PASSWORD
p110 = PyP110.P110(ip_address, TAPO_USERNAME, TAPO_PASSWORD)
p110.handshake()
@ -579,7 +579,7 @@ def show_logs():
print_header("Log-Analyse")
try:
from config.settings import LOG_DIR, LOG_SUBDIRS
from utils.settings import LOG_DIR, LOG_SUBDIRS
if not os.path.exists(LOG_DIR):
print_error(f"Log-Verzeichnis nicht gefunden: {LOG_DIR}")

View File

@ -34,7 +34,7 @@ def test_database_connection():
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
db_file = DATABASE_PATH
except ImportError:
# Fallback für lokale Ausführung
@ -134,7 +134,7 @@ def test_network_connectivity():
try:
# Verwende konfigurierten Datenbankpfad
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
db_file = DATABASE_PATH
except ImportError:
db_file = os.path.join('database', 'myp.db')
@ -207,7 +207,7 @@ def test_tapo_connections():
try:
# Verwende konfigurierten Datenbankpfad
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
db_file = DATABASE_PATH
except ImportError:
db_file = os.path.join('database', 'myp.db')

View File

@ -23,7 +23,7 @@ from flask_login import current_user
from utils.logging_config import get_logger
from models import Job, Printer, JobOrder, get_db_session
from utils.file_manager import save_job_file, save_temp_file
from config.settings import ALLOWED_EXTENSIONS, MAX_FILE_SIZE, UPLOAD_FOLDER
from utils.settings import ALLOWED_EXTENSIONS, MAX_FILE_SIZE, UPLOAD_FOLDER
logger = get_logger("drag_drop")

View File

@ -8,7 +8,7 @@ import shutil
from datetime import datetime
from werkzeug.utils import secure_filename
from typing import Optional, Tuple, Dict, List
from config.settings import UPLOAD_FOLDER, ALLOWED_EXTENSIONS
from utils.settings import UPLOAD_FOLDER, ALLOWED_EXTENSIONS
class FileManager:
"""

View File

@ -11,7 +11,7 @@ from datetime import datetime
# Pfad zur App hinzufügen
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
def fix_users_table_immediate():
"""Repariert die users Tabelle sofort."""

View File

@ -24,7 +24,7 @@ from functools import wraps
from werkzeug.datastructures import FileStorage
from utils.logging_config import get_logger
from config.settings import ALLOWED_EXTENSIONS, MAX_FILE_SIZE
from utils.settings import ALLOWED_EXTENSIONS, MAX_FILE_SIZE
logger = get_logger("validation")

View File

@ -9,7 +9,7 @@ from sqlalchemy.orm import joinedload
from utils.logging_config import get_logger
from models import Job, Printer, get_db_session
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
from utils.settings import TAPO_USERNAME, TAPO_PASSWORD
# Lazy logger initialization
_logger = None
@ -308,7 +308,7 @@ class BackgroundTaskScheduler:
return False
# IMMER globale Anmeldedaten verwenden (da diese funktionieren)
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
from utils.settings import TAPO_USERNAME, TAPO_PASSWORD
username = TAPO_USERNAME
password = TAPO_PASSWORD
self.logger.debug(f"🔧 Verwende globale Tapo-Anmeldedaten für {ip}")
@ -682,7 +682,7 @@ def test_tapo_connection(ip_address: str, username: str = None, password: str =
# Verwende globale Anmeldedaten falls nicht angegeben
if not username or not password:
from config.settings import TAPO_USERNAME, TAPO_PASSWORD
from utils.settings import TAPO_USERNAME, TAPO_PASSWORD
username = TAPO_USERNAME
password = TAPO_PASSWORD
logger.debug(f"Verwende globale Tapo-Anmeldedaten für {ip_address}")

View File

@ -147,10 +147,10 @@ def setup_logging(log_level: str = "INFO", base_log_dir: str = None) -> None:
root_logger.addHandler(console_handler)
_logging_initialized = True
print(f" Logging-System erfolgreich initialisiert (Level: {log_level})")
print(f"[OK] Logging-System erfolgreich initialisiert (Level: {log_level})")
except Exception as e:
print(f" KRITISCHER FEHLER bei Logging-Initialisierung: {e}")
print(f"[ERROR] KRITISCHER FEHLER bei Logging-Initialisierung: {e}")
# Notfall-Konfiguration
logging.basicConfig(
level=getattr(logging, log_level.upper(), logging.INFO),
@ -221,7 +221,7 @@ def get_logger(name: str, log_level: str = None) -> logging.Logger:
return logger
except Exception as e:
print(f" Fehler beim Erstellen des Loggers '{name}': {e}")
print(f"[ERROR] Fehler beim Erstellen des Loggers '{name}': {e}")
# Fallback: Einfacher Logger ohne File-Handler
fallback_logger = logging.getLogger(name)
if name not in _logger_registry:
@ -257,7 +257,7 @@ def measure_execution_time(logger: logging.Logger = None, task_name: str = "Task
execution_time = (time.time() - start_time) * 1000 # in Millisekunden
# Protokolliere Erfolg
log.info(f" {task_name} '{func.__name__}' erfolgreich in {execution_time:.2f}ms")
log.info(f"[OK] {task_name} '{func.__name__}' erfolgreich in {execution_time:.2f}ms")
return result
@ -266,7 +266,7 @@ def measure_execution_time(logger: logging.Logger = None, task_name: str = "Task
execution_time = (time.time() - start_time) * 1000
# Protokolliere Fehler
log.error(f" {task_name} '{func.__name__}' fehlgeschlagen nach {execution_time:.2f}ms: {str(e)}")
log.error(f"[ERROR] {task_name} '{func.__name__}' fehlgeschlagen nach {execution_time:.2f}ms: {str(e)}")
# Exception weiterleiten
raise
@ -284,7 +284,7 @@ def log_startup_info():
try:
startup_logger.info("=" * 50)
startup_logger.info("🚀 MYP Platform Backend wird gestartet...")
startup_logger.info("[START] MYP Platform Backend wird gestartet...")
startup_logger.info(f"🐍 Python Version: {sys.version}")
startup_logger.info(f"💻 Betriebssystem: {os.name} ({sys.platform})")
startup_logger.info(f"📁 Arbeitsverzeichnis: {os.getcwd()}")
@ -298,7 +298,7 @@ def log_startup_info():
startup_logger.info("=" * 50)
except Exception as e:
print(f" Fehler beim Startup-Logging: {e}")
print(f"[ERROR] Fehler beim Startup-Logging: {e}")
def debug_request(logger: logging.Logger, request) -> None:
"""
@ -322,7 +322,7 @@ def debug_request(logger: logging.Logger, request) -> None:
logger.debug(f"📝 Form-Daten: {safe_form}")
except Exception as e:
logger.error(f" Fehler beim Request-Debugging: {str(e)}")
logger.error(f"[ERROR] Fehler beim Request-Debugging: {str(e)}")
def debug_response(logger: logging.Logger, response, duration_ms: Optional[float] = None) -> None:
"""
@ -334,7 +334,7 @@ def debug_response(logger: logging.Logger, response, duration_ms: Optional[float
duration_ms: Optionale Ausführungszeit in Millisekunden
"""
try:
status_emoji = "" if response.status_code < 400 else "" if response.status_code >= 500 else "⚠️"
status_emoji = "[OK]" if response.status_code < 400 else "[ERROR]" if response.status_code >= 500 else "[WARN]"
log_msg = f"📤 RESPONSE: {status_emoji} {response.status_code}"
@ -345,7 +345,7 @@ def debug_response(logger: logging.Logger, response, duration_ms: Optional[float
logger.debug(f"📏 Content-Length: {response.content_length or 'Unbekannt'}")
except Exception as e:
logger.error(f" Fehler beim Response-Debugging: {str(e)}")
logger.error(f"[ERROR] Fehler beim Response-Debugging: {str(e)}")
# ===== NOTFALL-LOGGING =====
@ -371,4 +371,4 @@ if __name__ != "__main__":
try:
setup_logging()
except Exception as e:
print(f" Auto-Initialisierung des Logging-Systems fehlgeschlagen: {e}")
print(f"[ERROR] Auto-Initialisierung des Logging-Systems fehlgeschlagen: {e}")

View File

@ -0,0 +1,374 @@
# -*- coding: utf-8 -*-
"""
Windows-sichere Logging-Konfiguration für MYP Platform
======================================================
Robuste Logging-Konfiguration mit Windows-spezifischen Fixes für File-Locking-Probleme.
"""
import os
import sys
import time
import logging
import threading
from datetime import datetime
from functools import wraps
from typing import Optional, Dict, Any
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
# ===== WINDOWS-SICHERE LOGGING-KLASSE =====
class WindowsSafeRotatingFileHandler(RotatingFileHandler):
"""
Windows-sichere Implementierung von RotatingFileHandler.
Behebt das WinError 32 Problem bei gleichzeitigen Log-Dateizugriffen.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
# Verwende UTF-8 Encoding standardmäßig
if encoding is None:
encoding = 'utf-8'
# Windows-spezifische Konfiguration
self._windows_safe_mode = os.name == 'nt'
self._rotation_lock = threading.Lock()
super().__init__(filename, mode, maxBytes, backupCount, encoding, delay)
def doRollover(self):
"""
Windows-sichere Log-Rotation mit verbessertem Error-Handling.
"""
if not self._windows_safe_mode:
# Normale Rotation für Unix-Systeme
return super().doRollover()
# Windows-spezifische sichere Rotation
with self._rotation_lock:
try:
if self.stream:
self.stream.close()
self.stream = None
# Warte kurz bevor Rotation versucht wird
time.sleep(0.1)
# Versuche Rotation mehrmals mit exponentialem Backoff
max_attempts = 5
for attempt in range(max_attempts):
try:
# Rotation durchführen
super().doRollover()
break
except (PermissionError, OSError) as e:
if attempt == max_attempts - 1:
# Bei letztem Versuch: Erstelle neue Log-Datei ohne Rotation
print(f"WARNUNG: Log-Rotation fehlgeschlagen - erstelle neue Datei: {e}")
self._create_new_log_file()
break
else:
# Warte exponentiell länger bei jedem Versuch
wait_time = 0.5 * (2 ** attempt)
time.sleep(wait_time)
except Exception as e:
print(f"KRITISCHER FEHLER bei Log-Rotation: {e}")
# Notfall: Erstelle neue Log-Datei
self._create_new_log_file()
def _create_new_log_file(self):
"""
Erstellt eine neue Log-Datei als Fallback wenn Rotation fehlschlägt.
"""
try:
# Füge Timestamp zum Dateinamen hinzu
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
base_name, ext = os.path.splitext(self.baseFilename)
new_filename = f"{base_name}_{timestamp}{ext}"
# Öffne neue Datei
self.baseFilename = new_filename
self.stream = self._open()
except Exception as e:
print(f"NOTFALL: Kann keine neue Log-Datei erstellen: {e}")
# Letzter Ausweg: Console-Logging
self.stream = sys.stderr
# ===== GLOBALE LOGGING-KONFIGURATION =====
# Logger-Registry für Singleton-Pattern
_logger_registry: Dict[str, logging.Logger] = {}
_logging_initialized = False
_init_lock = threading.Lock()
def setup_logging(log_level: str = "INFO", base_log_dir: str = None) -> None:
"""
Initialisiert das zentrale Logging-System mit Windows-sicherer Konfiguration.
Args:
log_level: Logging-Level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
base_log_dir: Basis-Verzeichnis für Log-Dateien
"""
global _logging_initialized
with _init_lock:
if _logging_initialized:
return
try:
# Bestimme Log-Verzeichnis
if base_log_dir is None:
current_dir = os.path.dirname(os.path.abspath(__file__))
base_log_dir = os.path.join(current_dir, '..', 'logs')
# Erstelle Log-Verzeichnisse
log_dirs = ['app', 'auth', 'jobs', 'printers', 'scheduler', 'errors']
for log_dir in log_dirs:
full_path = os.path.join(base_log_dir, log_dir)
os.makedirs(full_path, exist_ok=True)
# Konfiguriere Root-Logger
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, log_level.upper(), logging.INFO))
# Entferne existierende Handler um Duplikate zu vermeiden
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
# Console-Handler für kritische Meldungen
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.WARNING)
console_formatter = logging.Formatter(
'%(asctime)s - %(name)s - [%(levelname)s] %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
console_handler.setFormatter(console_formatter)
root_logger.addHandler(console_handler)
_logging_initialized = True
print(f"✅ Logging-System erfolgreich initialisiert (Level: {log_level})")
except Exception as e:
print(f"❌ KRITISCHER FEHLER bei Logging-Initialisierung: {e}")
# Notfall-Konfiguration
logging.basicConfig(
level=getattr(logging, log_level.upper(), logging.INFO),
format='%(asctime)s - %(name)s - [%(levelname)s] - %(message)s',
handlers=[logging.StreamHandler(sys.stdout)]
)
_logging_initialized = True
def get_logger(name: str, log_level: str = None) -> logging.Logger:
"""
Erstellt oder gibt einen konfigurierten Logger zurück.
Args:
name: Name des Loggers (z.B. 'app', 'auth', 'jobs')
log_level: Optionaler spezifischer Log-Level für diesen Logger
Returns:
Konfigurierter Logger
"""
global _logger_registry
# Stelle sicher, dass Logging initialisiert ist
if not _logging_initialized:
setup_logging()
# Prüfe Registry für existierenden Logger
if name in _logger_registry:
return _logger_registry[name]
try:
# Erstelle neuen Logger
logger = logging.getLogger(name)
# Setze spezifischen Level falls angegeben
if log_level:
logger.setLevel(getattr(logging, log_level.upper(), logging.INFO))
# Erstelle File-Handler mit Windows-sicherer Rotation
log_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'logs', name)
os.makedirs(log_dir, exist_ok=True)
log_file = os.path.join(log_dir, f'{name}.log')
# Windows-sicherer RotatingFileHandler
file_handler = WindowsSafeRotatingFileHandler(
log_file,
maxBytes=10*1024*1024, # 10MB
backupCount=5,
encoding='utf-8'
)
# Detaillierter Formatter für File-Logs
file_formatter = logging.Formatter(
'%(asctime)s - [%(name)s] %(name)s - [%(levelname)s] %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
file_handler.setFormatter(file_formatter)
# Handler hinzufügen
logger.addHandler(file_handler)
# Verhindere Propagation zu Root-Logger um Duplikate zu vermeiden
logger.propagate = False
# In Registry speichern
_logger_registry[name] = logger
return logger
except Exception as e:
print(f"❌ Fehler beim Erstellen des Loggers '{name}': {e}")
# Fallback: Einfacher Logger ohne File-Handler
fallback_logger = logging.getLogger(name)
if name not in _logger_registry:
_logger_registry[name] = fallback_logger
return fallback_logger
# ===== PERFORMANCE-MEASUREMENT DECORATOR =====
def measure_execution_time(logger: logging.Logger = None, task_name: str = "Task"):
"""
Decorator zur Messung und Protokollierung der Ausführungszeit von Funktionen.
Args:
logger: Logger-Instanz für die Ausgabe
task_name: Bezeichnung der Aufgabe für die Logs
Returns:
Decorator-Funktion
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
start_time = time.time()
# Verwende provided Logger oder erstelle Standard-Logger
log = logger or get_logger("performance")
try:
# Führe Funktion aus
result = func(*args, **kwargs)
# Berechne Ausführungszeit
execution_time = (time.time() - start_time) * 1000 # in Millisekunden
# Protokolliere Erfolg
log.info(f"✅ {task_name} '{func.__name__}' erfolgreich in {execution_time:.2f}ms")
return result
except Exception as e:
# Berechne Ausführungszeit auch bei Fehlern
execution_time = (time.time() - start_time) * 1000
# Protokolliere Fehler
log.error(f"❌ {task_name} '{func.__name__}' fehlgeschlagen nach {execution_time:.2f}ms: {str(e)}")
# Exception weiterleiten
raise
return wrapper
return decorator
# ===== STARTUP/DEBUG LOGGING =====
def log_startup_info():
"""
Protokolliert System-Startup-Informationen.
"""
startup_logger = get_logger("startup")
try:
startup_logger.info("=" * 50)
startup_logger.info("🚀 MYP Platform Backend wird gestartet...")
startup_logger.info(f"🐍 Python Version: {sys.version}")
startup_logger.info(f"💻 Betriebssystem: {os.name} ({sys.platform})")
startup_logger.info(f"📁 Arbeitsverzeichnis: {os.getcwd()}")
startup_logger.info(f"⏰ Startzeit: {datetime.now().isoformat()}")
# Windows-spezifische Informationen
if os.name == 'nt':
startup_logger.info("🪟 Windows-Modus: Aktiviert")
startup_logger.info("🔒 Windows-sichere Log-Rotation: Aktiviert")
startup_logger.info("=" * 50)
except Exception as e:
print(f"❌ Fehler beim Startup-Logging: {e}")
def debug_request(logger: logging.Logger, request) -> None:
"""
Detailliertes Request-Debugging.
Args:
logger: Logger für die Ausgabe
request: Flask Request-Objekt
"""
try:
logger.debug(f"📨 REQUEST: {request.method} {request.path}")
logger.debug(f"🌐 Remote-Adresse: {request.remote_addr}")
logger.debug(f"🔤 Content-Type: {request.content_type}")
if request.args:
logger.debug(f"❓ Query-Parameter: {dict(request.args)}")
if request.form and logger.level <= logging.DEBUG:
# Filtere sensible Daten aus Form-Daten
safe_form = {k: '***' if 'password' in k.lower() else v for k, v in request.form.items()}
logger.debug(f"📝 Form-Daten: {safe_form}")
except Exception as e:
logger.error(f"❌ Fehler beim Request-Debugging: {str(e)}")
def debug_response(logger: logging.Logger, response, duration_ms: Optional[float] = None) -> None:
"""
Detailliertes Response-Debugging.
Args:
logger: Logger für die Ausgabe
response: Flask Response-Objekt
duration_ms: Optionale Ausführungszeit in Millisekunden
"""
try:
status_emoji = "✅" if response.status_code < 400 else "❌" if response.status_code >= 500 else "⚠️"
log_msg = f"📤 RESPONSE: {status_emoji} {response.status_code}"
if duration_ms is not None:
log_msg += f" ({duration_ms:.2f}ms)"
logger.debug(log_msg)
logger.debug(f"📏 Content-Length: {response.content_length or 'Unbekannt'}")
except Exception as e:
logger.error(f"❌ Fehler beim Response-Debugging: {str(e)}")
# ===== NOTFALL-LOGGING =====
def emergency_log(message: str, level: str = "ERROR") -> None:
"""
Notfall-Logging das auch funktioniert wenn das Hauptsystem fehlschlägt.
Args:
message: Nachricht
level: Log-Level
"""
try:
# Versuche normales Logging
logger = get_logger("emergency")
getattr(logger, level.lower(), logger.error)(message)
except:
# Fallback zu Print
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f"[NOTFALL {timestamp}] [{level}] {message}")
# Auto-Initialisierung beim Import
if __name__ != "__main__":
try:
setup_logging()
except Exception as e:
print(f"❌ Auto-Initialisierung des Logging-Systems fehlgeschlagen: {e}")

View File

@ -13,7 +13,7 @@ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from models import init_db, get_cached_session, GuestRequest, UserPermission, Notification, User
from utils.logging_config import get_logger
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
logger = get_logger("migrate")

View File

@ -17,7 +17,7 @@ import os
from models import get_db_session, Printer, PlugStatusLog
from utils.logging_config import get_logger
from config.settings import PRINTERS, TAPO_USERNAME, TAPO_PASSWORD, DEFAULT_TAPO_IPS, TAPO_AUTO_DISCOVERY
from utils.settings import PRINTERS, TAPO_USERNAME, TAPO_PASSWORD, DEFAULT_TAPO_IPS, TAPO_AUTO_DISCOVERY
# TP-Link Tapo P110 Unterstützung hinzufügen
try:

View File

@ -12,7 +12,7 @@ from datetime import datetime
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
except ImportError:
# Fallback falls Import fehlschlägt
DATABASE_PATH = "database/myp.db"

344
backend/utils/settings.py Normal file
View File

@ -0,0 +1,344 @@
"""
Zentrale Konfigurationsdatei für das 3D-Druck-Management-System
Diese Datei enthält alle Konfigurationseinstellungen, die zuvor im config-Ordner waren.
"""
import os
import json
from datetime import timedelta
def get_env_variable(name: str, default: str = None) -> str:
"""
Holt eine Umgebungsvariable oder gibt den Standardwert zurück.
Args:
name: Name der Umgebungsvariable
default: Standardwert, falls die Variable nicht gesetzt ist
Returns:
str: Wert der Umgebungsvariable oder Standardwert
"""
return os.environ.get(name, default)
# ===== GRUNDLEGENDE KONFIGURATION =====
# Hardcodierte Konfiguration
SECRET_KEY = "7445630171969DFAC92C53CEC92E67A9CB2E00B3CB2F"
# Dynamische Pfade basierend auf dem aktuellen Arbeitsverzeichnis
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Projekt-Wurzel
DATABASE_PATH = os.path.join(BASE_DIR, "instance", "printer_manager.db")
# ===== SMART PLUG KONFIGURATION =====
# TP-Link Tapo P110 Standardkonfiguration
TAPO_USERNAME = "till.tomczak@mercedes-benz.com"
TAPO_PASSWORD = "744563017196A"
# Automatische Steckdosen-Erkennung aktivieren
TAPO_AUTO_DISCOVERY = True
# Standard-Steckdosen-IPs (diese können später in der Datenbank überschrieben werden)
DEFAULT_TAPO_IPS = [
"192.168.0.103", # Erreichbare Steckdose laut Test
"192.168.0.104", # Erreichbare Steckdose laut Test
"192.168.0.100",
"192.168.0.101",
"192.168.0.102",
"192.168.0.105"
]
# Timeout-Konfiguration für Tapo-Verbindungen
TAPO_TIMEOUT = 10 # Sekunden
TAPO_RETRY_COUNT = 3 # Anzahl Wiederholungsversuche
# ===== DRUCKER-KONFIGURATION =====
PRINTERS = {
"Printer 1": {"ip": "192.168.0.100"},
"Printer 2": {"ip": "192.168.0.101"},
"Printer 3": {"ip": "192.168.0.102"},
"Printer 4": {"ip": "192.168.0.103"},
"Printer 5": {"ip": "192.168.0.104"},
"Printer 6": {"ip": "192.168.0.106"}
}
# ===== LOGGING-KONFIGURATION =====
LOG_DIR = os.path.join(BASE_DIR, "logs")
LOG_SUBDIRS = ["app", "scheduler", "auth", "jobs", "printers", "errors", "user", "kiosk",
"admin", "admin_api", "guest", "analytics", "uploads", "sessions"]
LOG_LEVEL = "INFO"
LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
LOG_FILE_MAX_BYTES = 10 * 1024 * 1024 # 10MB
LOG_BACKUP_COUNT = 5
# ===== FLASK-KONFIGURATION =====
FLASK_HOST = "0.0.0.0"
FLASK_PORT = 443 # Kann auf 8443 geändert werden für nicht-privilegierte Ports
FLASK_FALLBACK_PORT = 8080
FLASK_DEBUG = False # In Produktion auf False setzen!
SESSION_LIFETIME = timedelta(hours=2) # Session-Dauer
# ===== UPLOAD-KONFIGURATION =====
UPLOAD_FOLDER = os.path.join(BASE_DIR, "uploads")
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'gcode', '3mf', 'stl', 'obj', 'amf'}
MAX_CONTENT_LENGTH = 16 * 1024 * 1024 # 16MB Maximum-Dateigröße
MAX_FILE_SIZE = 16 * 1024 * 1024 # 16MB Maximum-Dateigröße für Drag & Drop System
# ===== UMGEBUNGSKONFIGURATION =====
ENVIRONMENT = get_env_variable("MYP_ENVIRONMENT", "development")
# ===== SSL-KONFIGURATION =====
SSL_ENABLED = get_env_variable("MYP_SSL_ENABLED", "True").lower() in ("true", "1", "yes")
SSL_CERT_PATH = os.path.join(BASE_DIR, "certs", "myp.crt")
SSL_KEY_PATH = os.path.join(BASE_DIR, "certs", "myp.key")
SSL_HOSTNAME = get_env_variable("MYP_SSL_HOSTNAME", "localhost")
# ===== SCHEDULER-KONFIGURATION =====
SCHEDULER_INTERVAL = 60 # Sekunden
SCHEDULER_ENABLED = True
# ===== DATENBANK-KONFIGURATION =====
DB_ENGINE = f"sqlite:///{DATABASE_PATH}"
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ENGINE_OPTIONS = {
'pool_pre_ping': True,
'pool_recycle': 300,
}
# ===== SICHERHEITSKONFIGURATION =====
WTF_CSRF_ENABLED = True
WTF_CSRF_TIME_LIMIT = 3600 # 1 Stunde
SESSION_COOKIE_SECURE = SSL_ENABLED # Nur bei HTTPS
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SAMESITE = 'Lax'
# ===== E-MAIL-KONFIGURATION (Optional) =====
MAIL_SERVER = get_env_variable('MAIL_SERVER')
MAIL_PORT = int(get_env_variable('MAIL_PORT', '587'))
MAIL_USE_TLS = get_env_variable('MAIL_USE_TLS', 'true').lower() in ['true', 'on', '1']
MAIL_USERNAME = get_env_variable('MAIL_USERNAME')
MAIL_PASSWORD = get_env_variable('MAIL_PASSWORD')
# ===== HILFSFUNKTIONEN =====
def get_log_file(category: str) -> str:
"""
Gibt den Pfad zur Log-Datei für eine bestimmte Kategorie zurück.
Args:
category: Log-Kategorie (app, scheduler, auth, jobs, printers, errors, etc.)
Returns:
str: Pfad zur Log-Datei
"""
if category not in LOG_SUBDIRS:
category = "app"
return os.path.join(LOG_DIR, category, f"{category}.log")
def ensure_log_directories():
"""Erstellt alle erforderlichen Log-Verzeichnisse."""
os.makedirs(LOG_DIR, exist_ok=True)
for subdir in LOG_SUBDIRS:
os.makedirs(os.path.join(LOG_DIR, subdir), exist_ok=True)
def ensure_database_directory():
"""Erstellt das Datenbank-Verzeichnis."""
db_dir = os.path.dirname(DATABASE_PATH)
if db_dir:
os.makedirs(db_dir, exist_ok=True)
def ensure_ssl_directory():
"""Erstellt das SSL-Verzeichnis, falls es nicht existiert."""
ssl_dir = os.path.dirname(SSL_CERT_PATH)
if ssl_dir and not os.path.exists(ssl_dir):
os.makedirs(ssl_dir, exist_ok=True)
def ensure_upload_directory():
"""Erstellt das Upload-Verzeichnis, falls es nicht existiert."""
if not os.path.exists(UPLOAD_FOLDER):
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
# Erstelle Unterordner für verschiedene Upload-Typen
subdirs = ['jobs', 'guests', 'avatars', 'assets', 'logs', 'backups', 'temp']
for subdir in subdirs:
os.makedirs(os.path.join(UPLOAD_FOLDER, subdir), exist_ok=True)
def get_ssl_context():
"""
Gibt den SSL-Kontext für Flask zurück, wenn SSL aktiviert ist.
Returns:
tuple oder None: Tuple mit Zertifikat- und Schlüsselpfad, wenn SSL aktiviert ist, sonst None
"""
if not SSL_ENABLED:
return None
# Wenn Zertifikate nicht existieren, diese automatisch erstellen
if not os.path.exists(SSL_CERT_PATH) or not os.path.exists(SSL_KEY_PATH):
ensure_ssl_directory()
# Im Entwicklungsmodus versuchen wir, einfache Zertifikate zu erstellen
if FLASK_DEBUG:
print("SSL-Zertifikate nicht gefunden. Erstelle einfache selbstsignierte Zertifikate...")
try:
# Einfache Zertifikate mit Python erstellen
create_simple_ssl_cert()
# Prüfen, ob die Zertifikate erfolgreich erstellt wurden
if not os.path.exists(SSL_CERT_PATH) or not os.path.exists(SSL_KEY_PATH):
print("Konnte keine SSL-Zertifikate erstellen.")
return None
except Exception as e:
print(f"Fehler beim Erstellen der SSL-Zertifikate: {e}")
return None
else:
print("WARNUNG: SSL-Zertifikate nicht gefunden und Nicht-Debug-Modus. SSL wird deaktiviert.")
return None
return (SSL_CERT_PATH, SSL_KEY_PATH)
def create_simple_ssl_cert():
"""
Erstellt ein Mercedes-Benz SSL-Zertifikat mit dem SSL-Manager.
"""
try:
# Verwende den SSL-Manager
from utils.ssl_manager import ssl_manager
success = ssl_manager.generate_mercedes_certificate()
if success:
print(f"Mercedes-Benz SSL-Zertifikat erfolgreich erstellt: {SSL_CERT_PATH}")
return True
else:
print("Fehler beim Erstellen des Mercedes-Benz SSL-Zertifikats")
return None
except ImportError as e:
print(f"SSL-Manager nicht verfügbar: {e}")
return None
except Exception as e:
print(f"Fehler beim Erstellen der SSL-Zertifikate: {e}")
return None
# ===== KONFIGURATIONSKLASSEN FÜR VERSCHIEDENE UMGEBUNGEN =====
class Config:
"""Basis-Konfigurationsklasse mit gemeinsamen Einstellungen."""
SECRET_KEY = SECRET_KEY
PERMANENT_SESSION_LIFETIME = SESSION_LIFETIME
SESSION_COOKIE_SECURE = SESSION_COOKIE_SECURE
SESSION_COOKIE_HTTPONLY = SESSION_COOKIE_HTTPONLY
SESSION_COOKIE_SAMESITE = SESSION_COOKIE_SAMESITE
SQLALCHEMY_DATABASE_URI = DB_ENGINE
SQLALCHEMY_TRACK_MODIFICATIONS = SQLALCHEMY_TRACK_MODIFICATIONS
SQLALCHEMY_ENGINE_OPTIONS = SQLALCHEMY_ENGINE_OPTIONS
UPLOAD_FOLDER = UPLOAD_FOLDER
MAX_CONTENT_LENGTH = MAX_CONTENT_LENGTH
ALLOWED_EXTENSIONS = ALLOWED_EXTENSIONS
WTF_CSRF_ENABLED = WTF_CSRF_ENABLED
WTF_CSRF_TIME_LIMIT = WTF_CSRF_TIME_LIMIT
LOG_LEVEL = LOG_LEVEL
LOG_FILE_MAX_BYTES = LOG_FILE_MAX_BYTES
LOG_BACKUP_COUNT = LOG_BACKUP_COUNT
SCHEDULER_ENABLED = SCHEDULER_ENABLED
SCHEDULER_INTERVAL = SCHEDULER_INTERVAL
SSL_ENABLED = SSL_ENABLED
SSL_CERT_PATH = SSL_CERT_PATH
SSL_KEY_PATH = SSL_KEY_PATH
DEFAULT_PORT = FLASK_PORT
DEFAULT_HOST = FLASK_HOST
@staticmethod
def init_app(app):
"""Initialisiere Anwendung mit dieser Konfiguration."""
pass
class DevelopmentConfig(Config):
"""Entwicklungsumgebung-Konfiguration."""
DEBUG = True
TESTING = False
LOG_LEVEL = 'DEBUG'
SESSION_COOKIE_SECURE = False
WTF_CSRF_ENABLED = False # Für einfacheres API-Testing
@staticmethod
def init_app(app):
Config.init_app(app)
import logging
logging.basicConfig(level=logging.DEBUG)
class TestingConfig(Config):
"""Test-Umgebung-Konfiguration."""
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///:memory:'
WTF_CSRF_ENABLED = False
PERMANENT_SESSION_LIFETIME = timedelta(minutes=5)
@staticmethod
def init_app(app):
Config.init_app(app)
class ProductionConfig(Config):
"""Produktionsumgebung-Konfiguration."""
DEBUG = False
TESTING = False
SESSION_COOKIE_SECURE = True # Erfordert HTTPS
WTF_CSRF_ENABLED = True
LOG_LEVEL = 'WARNING'
SSL_ENABLED = True
@staticmethod
def init_app(app):
Config.init_app(app)
# Produktions-spezifische Initialisierung
import logging
from logging.handlers import RotatingFileHandler
# Log-Verzeichnis sicherstellen
ensure_log_directories()
# Datei-Logging für Produktion einrichten
file_handler = RotatingFileHandler(
get_log_file('app'),
maxBytes=Config.LOG_FILE_MAX_BYTES,
backupCount=Config.LOG_BACKUP_COUNT
)
file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.WARNING)
# Konfigurations-Dictionary für einfachen Zugriff
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
def get_config_by_name(config_name):
"""
Hole Konfigurationsklasse nach Name.
Args:
config_name (str): Name der Konfiguration ('development', 'testing', 'production')
Returns:
Config: Konfigurationsklasse
"""
return config.get(config_name, config['default'])

View File

@ -8,7 +8,7 @@ import os
import sys
sys.path.append('.')
from config.settings import PRINTERS
from utils.settings import PRINTERS
from database.db_manager import DatabaseManager
from models import Printer
from datetime import datetime

View File

@ -25,7 +25,7 @@ class SSLManager:
cert_path: Pfad zum SSL-Zertifikat
key_path: Pfad zum SSL-Schlüssel
"""
from config.settings import SSL_CERT_PATH, SSL_KEY_PATH
from utils.settings import SSL_CERT_PATH, SSL_KEY_PATH
self.cert_path = cert_path or SSL_CERT_PATH
self.key_path = key_path or SSL_KEY_PATH

View File

@ -16,7 +16,7 @@ app_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, app_dir)
from utils.database_cleanup import DatabaseCleanupManager, safe_database_cleanup
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
from utils.logging_config import get_logger
logger = get_logger("database_cleanup_test")

View File

@ -26,7 +26,7 @@ def test_internal_components():
# Test 1: Importiere kritische Module
try:
from models import User, Printer, Job, get_db_session, init_database
from config.settings import SECRET_KEY, DATABASE_PATH
from utils.settings import SECRET_KEY, DATABASE_PATH
from utils.logging_config import get_logger
results["module_imports"] = {"status": "SUCCESS", "message": "Alle kritischen Module importiert"}
except Exception as e:

View File

@ -398,7 +398,7 @@ class TimeoutForceQuitManager:
def _force_wal_shm_cleanup(self):
"""Aggressive Bereinigung von WAL/SHM-Dateien"""
try:
from config.settings import DATABASE_PATH
from utils.settings import DATABASE_PATH
logger.info("🧹 Force WAL/SHM-Cleanup...")

View File

@ -8,7 +8,7 @@ import sys
import os
sys.path.append('.')
from config.settings import PRINTERS
from utils.settings import PRINTERS
from database.db_manager import DatabaseManager
from models import Printer
from datetime import datetime