🔧 Aktualisierung der README.md zur Verbesserung der Backend- und Frontend-Installationsanweisungen. Einführung eines konsolidierten Setup-Skripts für die automatische Installation und Optimierung der Kiosk-Modus-Anweisungen. Hinzufügen eines neuen Dokuments für Verbindungstests zwischen Frontend und Backend. Verbesserte Farbpalette und CSS-Styles für Kiosk-Modus in tailwind.config.js und input.css. 📈
This commit is contained in:
parent
6e09b86e88
commit
de9cbe3740
104
README.md
104
README.md
@ -73,51 +73,58 @@ Dieses Repository enthält **zwei sich ergänzende Projektarbeiten** für die IH
|
||||
|
||||
### Backend-System (Hardware & APIs)
|
||||
```bash
|
||||
# Backend-Server starten (Till Tomczaks System)
|
||||
# Backend-Server automatisch installieren (Till Tomczaks System)
|
||||
cd backend
|
||||
sudo ./setup.sh # Automatische Installation
|
||||
python app.py # Oder für Development
|
||||
sudo ./setup.sh # Konsolidiertes Setup-Skript
|
||||
|
||||
# Kiosk-Modus auf Raspberry Pi
|
||||
sudo systemctl start myp-https.service
|
||||
# Oder manuell für Development
|
||||
python app.py
|
||||
```
|
||||
|
||||
### Frontend-System (Web-Interface)
|
||||
### Frontend-System (Web-Interface) - NEU: Automatische Installation
|
||||
```bash
|
||||
# Frontend-Server starten (Torben Haacks System)
|
||||
# Frontend-Server automatisch installieren (Torben Haacks System)
|
||||
cd frontend
|
||||
sudo ./setup.sh # Konsolidiertes Setup-Skript mit Mercedes SSL
|
||||
|
||||
# Oder manuell für Development
|
||||
pnpm install
|
||||
pnpm db # Datenbank einrichten
|
||||
pnpm dev # Development-Server
|
||||
|
||||
# Produktions-Deployment
|
||||
pnpm build && pnpm start
|
||||
```
|
||||
|
||||
### Vollständiges System
|
||||
```bash
|
||||
# Backend (API-Server)
|
||||
cd backend && python app.py --host 0.0.0.0 --port 5000 &
|
||||
cd backend && sudo ./setup.sh
|
||||
|
||||
# Frontend (Web-Interface)
|
||||
cd frontend && pnpm build && pnpm start &
|
||||
# Frontend (Web-Interface mit HTTPS)
|
||||
cd frontend && sudo ./setup.sh
|
||||
```
|
||||
|
||||
## 🌐 Systemzugriff
|
||||
|
||||
### Produktions-URLs
|
||||
- **Web-Interface**: `http://localhost:3000` (Torben Haacks Frontend)
|
||||
- **API-Backend**: `https://192.168.0.105:443/api` (Till Tomczaks APIs auf separatem Server)
|
||||
### Produktions-URLs (Nach Setup-Skript Installation)
|
||||
- **Frontend (HTTPS)**: `https://m040tbaraspi001.de040.corpintra.net` (Torben Haacks Frontend)
|
||||
- **Frontend (Lokal)**: `https://localhost` (Fallback-Zugang)
|
||||
- **API-Backend**: `https://192.168.0.105:443/api` (Till Tomczaks APIs)
|
||||
- **Kiosk-Modus**: `https://192.168.0.105:443` (Lokales Touch-Interface)
|
||||
|
||||
### Development-URLs
|
||||
- **Frontend (Dev)**: `http://localhost:3000` (Development-Server)
|
||||
- **Backend (Dev)**: `http://localhost:5000` (Development-API)
|
||||
|
||||
### Standard-Anmeldedaten
|
||||
- **Benutzername**: `admin`
|
||||
- **Passwort**: `admin123`
|
||||
|
||||
### Netzwerk-Konfiguration
|
||||
- **Backend-Server**: `192.168.0.105:443` (HTTPS)
|
||||
- **Frontend-Server**: `localhost:3000` (HTTP Development)
|
||||
- **SSL-Zertifikate**: Selbstsigniert (automatisch akzeptiert)
|
||||
### SSL-Zertifikate (Mercedes)
|
||||
Nach der automatischen Installation sind selbstsignierte Mercedes-Zertifikate verfügbar:
|
||||
- **Domain**: `m040tbaraspi001.de040.corpintra.net`
|
||||
- **Organisation**: Mercedes-Benz AG
|
||||
- **Abteilung**: IT-Abteilung
|
||||
- **Standort**: Stuttgart, Baden-Württemberg
|
||||
- **Gültigkeit**: 365 Tage
|
||||
|
||||
## 📁 Projektstruktur & Integration
|
||||
|
||||
@ -222,7 +229,18 @@ export const API_BASE_URL = {
|
||||
|
||||
## 🖥️ Deployment-Szenarien
|
||||
|
||||
### Szenario 1: Separate Server (Empfohlen)
|
||||
### Szenario 1: Automatische Produktions-Installation (Neu - Empfohlen)
|
||||
```bash
|
||||
# Backend-Server (Raspberry Pi oder Linux-Server)
|
||||
cd backend
|
||||
sudo ./setup.sh # Automatische Installation mit Kiosk-Modus
|
||||
|
||||
# Frontend-Server (separater Server oder gleicher Server)
|
||||
cd frontend
|
||||
sudo ./setup.sh # Automatische Installation mit HTTPS auf Port 443
|
||||
```
|
||||
|
||||
### Szenario 2: Separate Server (Manuell)
|
||||
```bash
|
||||
# Backend-Server (z.B. Raspberry Pi oder Linux-Server)
|
||||
cd backend
|
||||
@ -233,7 +251,7 @@ cd frontend
|
||||
npm run build && npm start
|
||||
```
|
||||
|
||||
### Szenario 2: Docker-Deployment
|
||||
### Szenario 3: Docker-Deployment
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
@ -243,16 +261,16 @@ services:
|
||||
|
||||
frontend:
|
||||
build: ./frontend
|
||||
ports: ["3000:3000"]
|
||||
ports: ["80:80", "443:443"]
|
||||
environment:
|
||||
- NEXT_PUBLIC_API_URL=http://backend:5000/api
|
||||
```
|
||||
|
||||
### Szenario 3: Raspberry Pi Kiosk (Lokal)
|
||||
### Szenario 4: Raspberry Pi Kiosk (Lokal)
|
||||
```bash
|
||||
# Vollständige Kiosk-Installation
|
||||
cd backend && sudo ./setup.sh
|
||||
# Automatischer Start: Touch-Interface + Smart-Plug-Steuerung
|
||||
# Vollständige Kiosk-Installation (Backend + Frontend)
|
||||
cd backend && sudo ./setup.sh # Backend mit Kiosk-Interface
|
||||
cd frontend && sudo ./setup.sh # Frontend mit HTTPS-Server
|
||||
```
|
||||
|
||||
## 🔧 Konfiguration & Environment
|
||||
@ -276,7 +294,7 @@ OFFLINE_MODE=true
|
||||
|
||||
### Frontend-Konfiguration (.env.local)
|
||||
```env
|
||||
# Frontend-Server Einstellungen - Separater Backend-Server
|
||||
# Frontend-Server Einstellungen - HTTPS mit Mercedes SSL
|
||||
NEXT_PUBLIC_API_URL=https://192.168.0.105:443
|
||||
DATABASE_URL=file:./db/frontend.db
|
||||
|
||||
@ -286,6 +304,11 @@ NODE_TLS_REJECT_UNAUTHORIZED=0
|
||||
# Analytics-Features
|
||||
ENABLE_ADVANCED_ANALYTICS=true
|
||||
CHART_REFRESH_INTERVAL=30000
|
||||
|
||||
# Production HTTPS (Nach Setup-Skript)
|
||||
HTTPS_ENABLED=true
|
||||
SSL_CERT_PATH=/etc/ssl/certs/myp/frontend.crt
|
||||
SSL_KEY_PATH=/etc/ssl/certs/myp/frontend.key
|
||||
```
|
||||
|
||||
## 📊 Features im Überblick
|
||||
@ -321,6 +344,14 @@ python app.py --debug
|
||||
```
|
||||
|
||||
### Frontend-Entwicklung (Torben Haack)
|
||||
|
||||
#### Automatische Installation (Empfohlen)
|
||||
```bash
|
||||
cd frontend
|
||||
sudo ./setup.sh # Interaktives Setup-Menü
|
||||
```
|
||||
|
||||
#### Manuelle Entwicklung
|
||||
```bash
|
||||
cd frontend
|
||||
pnpm install
|
||||
@ -328,13 +359,26 @@ pnpm db:migrate
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
#### Frontend-Setup-Skript Features
|
||||
Das neue `frontend/setup.sh` bietet:
|
||||
- **Vollständige Installation**: Docker, SSL-Zertifikate, Caddy Reverse Proxy
|
||||
- **Mercedes SSL-Zertifikate**: Selbstsignierte Zertifikate für `m040tbaraspi001.de040.corpintra.net`
|
||||
- **Automatischer HTTPS-Server**: Verfügbar auf Port 443 (nicht 3000)
|
||||
- **Systemd-Integration**: Automatischer Start beim Boot
|
||||
- **Interaktives Menü**:
|
||||
1. Vollständige Frontend-Installation
|
||||
2. SSL-Zertifikate neu generieren
|
||||
3. Service-Status prüfen
|
||||
4. Beenden
|
||||
|
||||
### Integration testen
|
||||
```bash
|
||||
# Backend-APIs testen
|
||||
curl http://localhost:5000/api/printers
|
||||
|
||||
# Frontend mit Backend-Integration
|
||||
# Frontend auf :3000 konsumiert Backend-APIs von :5000
|
||||
# Frontend mit HTTPS (nach Setup-Skript)
|
||||
curl -k https://m040tbaraspi001.de040.corpintra.net/health
|
||||
curl -k https://localhost/health
|
||||
```
|
||||
|
||||
## 📚 Dokumentation
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -95,3 +95,4 @@
|
||||
2025-06-01 23:42:51 - [analytics] analytics - [INFO] INFO - 📈 Analytics Engine initialisiert
|
||||
2025-06-01 23:43:48 - [analytics] analytics - [INFO] INFO - 📈 Analytics Engine initialisiert
|
||||
2025-06-01 23:47:39 - [analytics] analytics - [INFO] INFO - 📈 Analytics Engine initialisiert
|
||||
2025-06-01 23:50:28 - [analytics] analytics - [INFO] INFO - 📈 Analytics Engine initialisiert
|
||||
|
@ -2294,3 +2294,33 @@ WHERE jobs.status = ?) AS anon_1]
|
||||
2025-06-01 23:47:45 - [app] app - [INFO] INFO - Job-Scheduler gestartet
|
||||
2025-06-01 23:47:45 - [app] app - [INFO] INFO - Starte Debug-Server auf 0.0.0.0:5000 (HTTP)
|
||||
2025-06-01 23:47:45 - [app] app - [INFO] INFO - Windows-Debug-Modus: Auto-Reload deaktiviert
|
||||
2025-06-01 23:48:16 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:48:16 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:48:16 - [app] app - [ERROR] ERROR - Fehler beim Abrufen der Dashboard-Statistiken: '>' not supported between instances of 'NoneType' and 'int'
|
||||
2025-06-01 23:48:16 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 0, 'total_jobs': 0, 'pending_jobs': 0, 'success_rate': 0, 'completed_jobs': 0, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': 0, 'online_printers': 0, 'offline_printers': 0}
|
||||
2025-06-01 23:48:16 - [app] app - [ERROR] ERROR - Fehler beim Abrufen der Dashboard-Statistiken: '>' not supported between instances of 'NoneType' and 'int'
|
||||
2025-06-01 23:48:16 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 0, 'total_jobs': 0, 'pending_jobs': 0, 'success_rate': 0, 'completed_jobs': 0, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': 0, 'online_printers': 0, 'offline_printers': 0}
|
||||
2025-06-01 23:48:47 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:48:47 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:48:47 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 2, 'total_jobs': 16, 'pending_jobs': 0, 'success_rate': 0.0, 'completed_jobs': 0, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': None, 'online_printers': 0, 'offline_printers': 2}
|
||||
2025-06-01 23:48:47 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 2, 'total_jobs': 16, 'pending_jobs': 0, 'success_rate': 0.0, 'completed_jobs': None, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': 1, 'online_printers': 0, 'offline_printers': 2}
|
||||
2025-06-01 23:50:28 - [app] app - [INFO] INFO - Optimierte SQLite-Engine erstellt: C:\Users\TTOMCZA.EMEA\Dev\Projektarbeit-MYP\backend\database\myp.db
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - SQLite für Raspberry Pi optimiert (reduzierte Cache-Größe, SD-Karten I/O)
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - ✅ Timeout Force-Quit Manager geladen
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - ✅ Zentraler Shutdown-Manager initialisiert
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - 🔄 Starte Datenbank-Setup und Migrationen...
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - Datenbank mit Optimierungen initialisiert
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - ✅ JobOrder-Tabelle bereits vorhanden
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - Admin-Benutzer admin (admin@mercedes-benz.com) existiert bereits. Passwort wurde zurückgesetzt.
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - ✅ Datenbank-Setup und Migrationen erfolgreich abgeschlossen
|
||||
2025-06-01 23:50:29 - [app] app - [INFO] INFO - 🖨️ Starte automatische Steckdosen-Initialisierung...
|
||||
2025-06-01 23:50:33 - [app] app - [INFO] INFO - ✅ Steckdosen-Initialisierung: 0/2 Drucker erfolgreich
|
||||
2025-06-01 23:50:33 - [app] app - [WARNING] WARNING - ⚠️ 2 Drucker konnten nicht initialisiert werden
|
||||
2025-06-01 23:50:33 - [app] app - [INFO] INFO - 🔄 Debug-Modus: Queue Manager deaktiviert für Entwicklung
|
||||
2025-06-01 23:50:33 - [app] app - [INFO] INFO - Job-Scheduler gestartet
|
||||
2025-06-01 23:50:33 - [app] app - [INFO] INFO - Starte Debug-Server auf 0.0.0.0:5000 (HTTP)
|
||||
2025-06-01 23:50:33 - [app] app - [INFO] INFO - Windows-Debug-Modus: Auto-Reload deaktiviert
|
||||
2025-06-01 23:51:41 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:51:41 - [app] app - [INFO] INFO - Dashboard-Refresh angefordert von User 1
|
||||
2025-06-01 23:51:41 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 2, 'total_jobs': 16, 'pending_jobs': 0, 'success_rate': 0.0, 'completed_jobs': 0, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': 1, 'online_printers': 0, 'offline_printers': 2}
|
||||
2025-06-01 23:51:41 - [app] app - [INFO] INFO - Dashboard-Refresh erfolgreich: {'active_jobs': 0, 'available_printers': 2, 'total_jobs': 16, 'pending_jobs': 0, 'success_rate': 0.0, 'completed_jobs': 0, 'failed_jobs': 0, 'cancelled_jobs': 0, 'total_users': 1, 'online_printers': 0, 'offline_printers': 2}
|
||||
|
@ -99,3 +99,4 @@
|
||||
2025-06-01 23:42:51 - [backup] backup - [INFO] INFO - BackupManager initialisiert (minimal implementation)
|
||||
2025-06-01 23:43:48 - [backup] backup - [INFO] INFO - BackupManager initialisiert (minimal implementation)
|
||||
2025-06-01 23:47:39 - [backup] backup - [INFO] INFO - BackupManager initialisiert (minimal implementation)
|
||||
2025-06-01 23:50:28 - [backup] backup - [INFO] INFO - BackupManager initialisiert (minimal implementation)
|
||||
|
@ -381,3 +381,7 @@
|
||||
2025-06-01 23:47:40 - [dashboard] dashboard - [INFO] INFO - Dashboard-Background-Worker gestartet
|
||||
2025-06-01 23:47:40 - [dashboard] dashboard - [INFO] INFO - Dashboard WebSocket-Server wird mit threading initialisiert (eventlet-Fallback)
|
||||
2025-06-01 23:47:40 - [dashboard] dashboard - [INFO] INFO - Dashboard WebSocket-Server initialisiert (async_mode: threading)
|
||||
2025-06-01 23:50:28 - [dashboard] dashboard - [INFO] INFO - Dashboard-Background-Worker gestartet
|
||||
2025-06-01 23:50:29 - [dashboard] dashboard - [INFO] INFO - Dashboard-Background-Worker gestartet
|
||||
2025-06-01 23:50:29 - [dashboard] dashboard - [INFO] INFO - Dashboard WebSocket-Server wird mit threading initialisiert (eventlet-Fallback)
|
||||
2025-06-01 23:50:29 - [dashboard] dashboard - [INFO] INFO - Dashboard WebSocket-Server initialisiert (async_mode: threading)
|
||||
|
@ -95,3 +95,4 @@
|
||||
2025-06-01 23:42:51 - [database] database - [INFO] INFO - Datenbank-Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:43:48 - [database] database - [INFO] INFO - Datenbank-Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:47:39 - [database] database - [INFO] INFO - Datenbank-Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:50:28 - [database] database - [INFO] INFO - Datenbank-Wartungs-Scheduler gestartet
|
||||
|
@ -92,3 +92,4 @@
|
||||
2025-06-01 23:42:52 - [email_notification] email_notification - [INFO] INFO - 📧 Offline-E-Mail-Benachrichtigung initialisiert (kein echter E-Mail-Versand)
|
||||
2025-06-01 23:43:48 - [email_notification] email_notification - [INFO] INFO - 📧 Offline-E-Mail-Benachrichtigung initialisiert (kein echter E-Mail-Versand)
|
||||
2025-06-01 23:47:40 - [email_notification] email_notification - [INFO] INFO - 📧 Offline-E-Mail-Benachrichtigung initialisiert (kein echter E-Mail-Versand)
|
||||
2025-06-01 23:50:29 - [email_notification] email_notification - [INFO] INFO - 📧 Offline-E-Mail-Benachrichtigung initialisiert (kein echter E-Mail-Versand)
|
||||
|
@ -188,3 +188,5 @@
|
||||
2025-06-01 23:43:49 - [maintenance] maintenance - [INFO] INFO - Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:47:40 - [maintenance] maintenance - [INFO] INFO - Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:47:40 - [maintenance] maintenance - [INFO] INFO - Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:50:29 - [maintenance] maintenance - [INFO] INFO - Wartungs-Scheduler gestartet
|
||||
2025-06-01 23:50:29 - [maintenance] maintenance - [INFO] INFO - Wartungs-Scheduler gestartet
|
||||
|
@ -188,3 +188,5 @@
|
||||
2025-06-01 23:43:49 - [multi_location] multi_location - [INFO] INFO - Standard-Standort erstellt
|
||||
2025-06-01 23:47:40 - [multi_location] multi_location - [INFO] INFO - Standard-Standort erstellt
|
||||
2025-06-01 23:47:40 - [multi_location] multi_location - [INFO] INFO - Standard-Standort erstellt
|
||||
2025-06-01 23:50:29 - [multi_location] multi_location - [INFO] INFO - Standard-Standort erstellt
|
||||
2025-06-01 23:50:29 - [multi_location] multi_location - [INFO] INFO - Standard-Standort erstellt
|
||||
|
@ -94,3 +94,4 @@
|
||||
2025-06-01 23:42:52 - [permissions] permissions - [INFO] INFO - 🔐 Permission Template Helpers registriert
|
||||
2025-06-01 23:43:49 - [permissions] permissions - [INFO] INFO - 🔐 Permission Template Helpers registriert
|
||||
2025-06-01 23:47:40 - [permissions] permissions - [INFO] INFO - 🔐 Permission Template Helpers registriert
|
||||
2025-06-01 23:50:29 - [permissions] permissions - [INFO] INFO - 🔐 Permission Template Helpers registriert
|
||||
|
@ -2720,3 +2720,36 @@
|
||||
2025-06-01 23:47:55 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.103): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:47:55 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Status-Update abgeschlossen für 2 Drucker
|
||||
2025-06-01 23:47:59 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 4/6: 192.168.0.101
|
||||
2025-06-01 23:48:05 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 5/6: 192.168.0.102
|
||||
2025-06-01 23:48:11 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 6/6: 192.168.0.105
|
||||
2025-06-01 23:48:18 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Steckdosen-Erkennung abgeschlossen: 0/6 Steckdosen gefunden in 36.1s
|
||||
2025-06-01 23:50:28 - [printer_monitor] printer_monitor - [INFO] INFO - 🖨️ Drucker-Monitor initialisiert
|
||||
2025-06-01 23:50:28 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Automatische Tapo-Erkennung in separatem Thread gestartet
|
||||
2025-06-01 23:50:29 - [printer_monitor] printer_monitor - [INFO] INFO - 🚀 Starte Steckdosen-Initialisierung beim Programmstart...
|
||||
2025-06-01 23:50:30 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Starte automatische Tapo-Steckdosenerkennung...
|
||||
2025-06-01 23:50:30 - [printer_monitor] printer_monitor - [INFO] INFO - 🔄 Teste 6 Standard-IPs aus der Konfiguration
|
||||
2025-06-01 23:50:30 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 1/6: 192.168.0.103
|
||||
2025-06-01 23:50:31 - [printer_monitor] printer_monitor - [WARNING] WARNING - ❌ Tapo P110 (192.168.0.103): Steckdose konnte nicht ausgeschaltet werden
|
||||
2025-06-01 23:50:33 - [printer_monitor] printer_monitor - [WARNING] WARNING - ❌ Tapo P110 (192.168.0.104): Steckdose konnte nicht ausgeschaltet werden
|
||||
2025-06-01 23:50:33 - [printer_monitor] printer_monitor - [INFO] INFO - 🎯 Steckdosen-Initialisierung abgeschlossen: 0/2 erfolgreich
|
||||
2025-06-01 23:50:36 - [printer_monitor] printer_monitor - [INFO] INFO - 🔄 Aktualisiere Live-Druckerstatus...
|
||||
2025-06-01 23:50:36 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Prüfe Status von 2 aktiven Druckern...
|
||||
2025-06-01 23:50:36 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 2/6: 192.168.0.104
|
||||
2025-06-01 23:50:37 - [printer_monitor] printer_monitor - [INFO] INFO - 🔄 Aktualisiere Live-Druckerstatus...
|
||||
2025-06-01 23:50:37 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Prüfe Status von 2 aktiven Druckern...
|
||||
2025-06-01 23:50:42 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 3/6: 192.168.0.100
|
||||
2025-06-01 23:50:45 - [printer_monitor] printer_monitor - [INFO] INFO - 🔄 Aktualisiere Live-Druckerstatus...
|
||||
2025-06-01 23:50:45 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Prüfe Status von 2 aktiven Druckern...
|
||||
2025-06-01 23:50:45 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.103): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:45 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.104): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:45 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Status-Update abgeschlossen für 2 Drucker
|
||||
2025-06-01 23:50:46 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.103): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:46 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.104): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:46 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Status-Update abgeschlossen für 2 Drucker
|
||||
2025-06-01 23:50:48 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 4/6: 192.168.0.101
|
||||
2025-06-01 23:50:54 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.103): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:54 - [printer_monitor] printer_monitor - [WARNING] WARNING - 🔌 Tapo P110 (192.168.0.104): UNREACHABLE (Ping fehlgeschlagen)
|
||||
2025-06-01 23:50:54 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Status-Update abgeschlossen für 2 Drucker
|
||||
2025-06-01 23:50:54 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 5/6: 192.168.0.102
|
||||
2025-06-01 23:51:00 - [printer_monitor] printer_monitor - [INFO] INFO - 🔍 Teste IP 6/6: 192.168.0.105
|
||||
2025-06-01 23:51:06 - [printer_monitor] printer_monitor - [INFO] INFO - ✅ Steckdosen-Erkennung abgeschlossen: 0/6 Steckdosen gefunden in 36.1s
|
||||
|
@ -5203,3 +5203,33 @@
|
||||
2025-06-01 23:47:46 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:47:55 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:47:55 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 9023.39ms
|
||||
2025-06-01 23:48:16 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:48:16 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:48:16 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 2.50ms
|
||||
2025-06-01 23:48:17 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:48:17 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:48:17 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 0.56ms
|
||||
2025-06-01 23:48:47 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:48:47 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:48:47 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 0.68ms
|
||||
2025-06-01 23:48:48 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:48:48 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:48:48 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 0.50ms
|
||||
2025-06-01 23:50:36 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:50:37 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:50:45 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:50:45 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:50:45 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 9026.16ms
|
||||
2025-06-01 23:50:46 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:50:46 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 9043.59ms
|
||||
2025-06-01 23:50:54 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:50:54 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 9042.87ms
|
||||
2025-06-01 23:51:11 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:51:11 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:51:11 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 0.67ms
|
||||
2025-06-01 23:51:41 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:51:41 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:51:41 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 1.07ms
|
||||
2025-06-01 23:51:42 - [printers] printers - [INFO] INFO - 🔄 Live-Status-Abfrage von Benutzer Administrator (ID: 1)
|
||||
2025-06-01 23:51:42 - [printers] printers - [INFO] INFO - ✅ Live-Status-Abfrage erfolgreich: 2 Drucker
|
||||
2025-06-01 23:51:42 - [printers] printers - [INFO] INFO - ✅ API-Live-Drucker-Status-Abfrage 'get_live_printer_status' erfolgreich in 1.04ms
|
||||
|
@ -13220,3 +13220,208 @@
|
||||
2025-06-01 23:47:59 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F73D445A0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:47:59 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 5 nicht einschalten
|
||||
2025-06-01 23:47:59 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 6: test
|
||||
2025-06-01 23:48:01 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F73D448D0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:01 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 6 nicht einschalten
|
||||
2025-06-01 23:48:01 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 9: zi
|
||||
2025-06-01 23:48:03 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72794E20>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:03 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 9 nicht einschalten
|
||||
2025-06-01 23:48:03 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 10: zi
|
||||
2025-06-01 23:48:05 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797680>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:05 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 10 nicht einschalten
|
||||
2025-06-01 23:48:05 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 11: fee
|
||||
2025-06-01 23:48:08 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797790>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:08 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 11 nicht einschalten
|
||||
2025-06-01 23:48:08 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 12: fee
|
||||
2025-06-01 23:48:10 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72795E10>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:10 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 12 nicht einschalten
|
||||
2025-06-01 23:48:10 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 13: e2
|
||||
2025-06-01 23:48:12 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797460>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:12 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 13 nicht einschalten
|
||||
2025-06-01 23:48:12 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 14: e2
|
||||
2025-06-01 23:48:14 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797240>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:14 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 14 nicht einschalten
|
||||
2025-06-01 23:48:14 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 15: test
|
||||
2025-06-01 23:48:16 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796E00>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:16 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 15 nicht einschalten
|
||||
2025-06-01 23:48:16 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 16: test
|
||||
2025-06-01 23:48:18 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796AD0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:18 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 16 nicht einschalten
|
||||
2025-06-01 23:48:19 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 7: test
|
||||
2025-06-01 23:48:21 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797570>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:21 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 7 nicht einschalten
|
||||
2025-06-01 23:48:21 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 8: test
|
||||
2025-06-01 23:48:23 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72795E10>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:23 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 8 nicht einschalten
|
||||
2025-06-01 23:48:23 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 1: test
|
||||
2025-06-01 23:48:25 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797790>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:25 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 1 nicht einschalten
|
||||
2025-06-01 23:48:25 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 2: test
|
||||
2025-06-01 23:48:27 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797680>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:27 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 2 nicht einschalten
|
||||
2025-06-01 23:48:27 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 3: test
|
||||
2025-06-01 23:48:29 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72794E20>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:29 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 3 nicht einschalten
|
||||
2025-06-01 23:48:29 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 4: test
|
||||
2025-06-01 23:48:32 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727969C0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:32 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 4 nicht einschalten
|
||||
2025-06-01 23:48:32 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 5: test
|
||||
2025-06-01 23:48:34 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727957B0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:34 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 5 nicht einschalten
|
||||
2025-06-01 23:48:34 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 6: test
|
||||
2025-06-01 23:48:36 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727978A0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:36 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 6 nicht einschalten
|
||||
2025-06-01 23:48:36 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 9: zi
|
||||
2025-06-01 23:48:38 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727979B0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:38 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 9 nicht einschalten
|
||||
2025-06-01 23:48:38 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 10: zi
|
||||
2025-06-01 23:48:40 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797350>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:40 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 10 nicht einschalten
|
||||
2025-06-01 23:48:40 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 11: fee
|
||||
2025-06-01 23:48:42 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796CF0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:42 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 11 nicht einschalten
|
||||
2025-06-01 23:48:42 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 12: fee
|
||||
2025-06-01 23:48:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796E00>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 12 nicht einschalten
|
||||
2025-06-01 23:48:44 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 13: e2
|
||||
2025-06-01 23:48:46 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796690>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:46 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 13 nicht einschalten
|
||||
2025-06-01 23:48:46 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 14: e2
|
||||
2025-06-01 23:48:48 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796360>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:48 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 14 nicht einschalten
|
||||
2025-06-01 23:48:48 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 15: test
|
||||
2025-06-01 23:48:51 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72796AD0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:51 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 15 nicht einschalten
|
||||
2025-06-01 23:48:51 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 16: test
|
||||
2025-06-01 23:48:53 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797350>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:53 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 16 nicht einschalten
|
||||
2025-06-01 23:48:54 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 7: test
|
||||
2025-06-01 23:48:56 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F72797BD0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:56 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 7 nicht einschalten
|
||||
2025-06-01 23:48:56 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 8: test
|
||||
2025-06-01 23:48:58 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727978A0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:48:58 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 8 nicht einschalten
|
||||
2025-06-01 23:48:58 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 1: test
|
||||
2025-06-01 23:49:00 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727957B0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:49:00 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 1 nicht einschalten
|
||||
2025-06-01 23:49:00 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 2: test
|
||||
2025-06-01 23:49:02 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x0000025F727949E0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:49:02 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 2 nicht einschalten
|
||||
2025-06-01 23:49:02 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 3: test
|
||||
2025-06-01 23:50:28 - [scheduler] scheduler - [INFO] INFO - Task check_jobs registriert: Intervall 30s, Enabled: True
|
||||
2025-06-01 23:50:33 - [scheduler] scheduler - [INFO] INFO - Scheduler-Thread gestartet
|
||||
2025-06-01 23:50:33 - [scheduler] scheduler - [INFO] INFO - Scheduler gestartet
|
||||
2025-06-01 23:50:33 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 7: test
|
||||
2025-06-01 23:50:35 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2245B9450>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:35 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 7 nicht einschalten
|
||||
2025-06-01 23:50:35 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 8: test
|
||||
2025-06-01 23:50:37 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C22457F5C0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:37 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 8 nicht einschalten
|
||||
2025-06-01 23:50:37 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 1: test
|
||||
2025-06-01 23:50:40 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246E63F0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:40 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 1 nicht einschalten
|
||||
2025-06-01 23:50:40 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 2: test
|
||||
2025-06-01 23:50:42 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246F3530>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:42 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 2 nicht einschalten
|
||||
2025-06-01 23:50:42 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 3: test
|
||||
2025-06-01 23:50:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627790>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 3 nicht einschalten
|
||||
2025-06-01 23:50:44 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 4: test
|
||||
2025-06-01 23:50:46 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C225C04160>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:46 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 4 nicht einschalten
|
||||
2025-06-01 23:50:46 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 5: test
|
||||
2025-06-01 23:50:48 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626030>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:48 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 5 nicht einschalten
|
||||
2025-06-01 23:50:48 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 6: test
|
||||
2025-06-01 23:50:50 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627570>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:50 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 6 nicht einschalten
|
||||
2025-06-01 23:50:50 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 9: zi
|
||||
2025-06-01 23:50:52 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246247C0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:52 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 9 nicht einschalten
|
||||
2025-06-01 23:50:52 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 10: zi
|
||||
2025-06-01 23:50:54 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627680>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:54 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 10 nicht einschalten
|
||||
2025-06-01 23:50:54 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 11: fee
|
||||
2025-06-01 23:50:56 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246278A0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:56 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 11 nicht einschalten
|
||||
2025-06-01 23:50:56 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 12: fee
|
||||
2025-06-01 23:50:58 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626030>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:50:58 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 12 nicht einschalten
|
||||
2025-06-01 23:50:58 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 13: e2
|
||||
2025-06-01 23:51:01 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627460>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:01 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 13 nicht einschalten
|
||||
2025-06-01 23:51:01 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 14: e2
|
||||
2025-06-01 23:51:03 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246279B0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:03 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 14 nicht einschalten
|
||||
2025-06-01 23:51:03 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 15: test
|
||||
2025-06-01 23:51:05 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246278A0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:05 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 15 nicht einschalten
|
||||
2025-06-01 23:51:05 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 16: test
|
||||
2025-06-01 23:51:07 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627790>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:07 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 16 nicht einschalten
|
||||
2025-06-01 23:51:08 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 7: test
|
||||
2025-06-01 23:51:10 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627CE0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:10 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 7 nicht einschalten
|
||||
2025-06-01 23:51:10 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 8: test
|
||||
2025-06-01 23:51:12 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626F10>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:12 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 8 nicht einschalten
|
||||
2025-06-01 23:51:12 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 1: test
|
||||
2025-06-01 23:51:14 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627F00>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:14 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 1 nicht einschalten
|
||||
2025-06-01 23:51:14 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 2: test
|
||||
2025-06-01 23:51:16 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626250>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:16 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 2 nicht einschalten
|
||||
2025-06-01 23:51:16 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 3: test
|
||||
2025-06-01 23:51:18 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627680>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:18 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 3 nicht einschalten
|
||||
2025-06-01 23:51:18 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 4: test
|
||||
2025-06-01 23:51:20 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626360>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:20 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 4 nicht einschalten
|
||||
2025-06-01 23:51:20 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 5: test
|
||||
2025-06-01 23:51:22 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626030>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:22 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 5 nicht einschalten
|
||||
2025-06-01 23:51:22 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 6: test
|
||||
2025-06-01 23:51:25 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224625040>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:25 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 6 nicht einschalten
|
||||
2025-06-01 23:51:25 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 9: zi
|
||||
2025-06-01 23:51:27 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246259D0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:27 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 9 nicht einschalten
|
||||
2025-06-01 23:51:27 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 10: zi
|
||||
2025-06-01 23:51:29 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224625D00>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:29 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 10 nicht einschalten
|
||||
2025-06-01 23:51:29 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 11: fee
|
||||
2025-06-01 23:51:31 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626AD0>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:31 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 11 nicht einschalten
|
||||
2025-06-01 23:51:31 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 12: fee
|
||||
2025-06-01 23:51:33 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.104: HTTPConnectionPool(host='192.168.0.104', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224625040>, 'Connection to 192.168.0.104 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:33 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 12 nicht einschalten
|
||||
2025-06-01 23:51:33 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 13: e2
|
||||
2025-06-01 23:51:35 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626030>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:35 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 13 nicht einschalten
|
||||
2025-06-01 23:51:35 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 14: e2
|
||||
2025-06-01 23:51:37 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626360>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:37 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 14 nicht einschalten
|
||||
2025-06-01 23:51:37 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 15: test
|
||||
2025-06-01 23:51:39 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627680>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:39 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 15 nicht einschalten
|
||||
2025-06-01 23:51:39 - [scheduler] scheduler - [INFO] INFO - ⚡ Starte Sofort-Job 16: test
|
||||
2025-06-01 23:51:41 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626250>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:41 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Sofort-Job 16 nicht einschalten
|
||||
2025-06-01 23:51:42 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 7: test
|
||||
2025-06-01 23:51:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224626580>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:44 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 7 nicht einschalten
|
||||
2025-06-01 23:51:44 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 8: test
|
||||
2025-06-01 23:51:47 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224627680>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:47 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 8 nicht einschalten
|
||||
2025-06-01 23:51:47 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 1: test
|
||||
2025-06-01 23:51:49 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224625AE0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:49 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 1 nicht einschalten
|
||||
2025-06-01 23:51:49 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 2: test
|
||||
2025-06-01 23:51:51 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246259D0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:51 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 2 nicht einschalten
|
||||
2025-06-01 23:51:51 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 3: test
|
||||
2025-06-01 23:51:53 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C2246268B0>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:53 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 3 nicht einschalten
|
||||
2025-06-01 23:51:53 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 4: test
|
||||
2025-06-01 23:51:55 - [scheduler] scheduler - [ERROR] ERROR - ❌ Fehler beim einschalten der Tapo-Steckdose 192.168.0.103: HTTPConnectionPool(host='192.168.0.103', port=80): Max retries exceeded with url: /app (Caused by ConnectTimeoutError(<urllib3.connection.HTTPConnection object at 0x000002C224625E10>, 'Connection to 192.168.0.103 timed out. (connect timeout=2)'))
|
||||
2025-06-01 23:51:55 - [scheduler] scheduler - [ERROR] ERROR - ❌ Konnte Steckdose für Job 4 nicht einschalten
|
||||
2025-06-01 23:51:55 - [scheduler] scheduler - [INFO] INFO - 🚀 Starte geplanten Job 5: test
|
||||
|
@ -94,3 +94,4 @@
|
||||
2025-06-01 23:42:52 - [security] security - [INFO] INFO - 🔒 Security System initialisiert
|
||||
2025-06-01 23:43:49 - [security] security - [INFO] INFO - 🔒 Security System initialisiert
|
||||
2025-06-01 23:47:40 - [security] security - [INFO] INFO - 🔒 Security System initialisiert
|
||||
2025-06-01 23:50:29 - [security] security - [INFO] INFO - 🔒 Security System initialisiert
|
||||
|
@ -180,3 +180,4 @@
|
||||
2025-06-01 23:42:52 - [shutdown_manager] shutdown_manager - [INFO] INFO - 🔧 Shutdown-Manager initialisiert
|
||||
2025-06-01 23:43:49 - [shutdown_manager] shutdown_manager - [INFO] INFO - 🔧 Shutdown-Manager initialisiert
|
||||
2025-06-01 23:47:40 - [shutdown_manager] shutdown_manager - [INFO] INFO - 🔧 Shutdown-Manager initialisiert
|
||||
2025-06-01 23:50:29 - [shutdown_manager] shutdown_manager - [INFO] INFO - 🔧 Shutdown-Manager initialisiert
|
||||
|
@ -854,3 +854,12 @@
|
||||
2025-06-01 23:47:40 - [startup] startup - [INFO] INFO - 🪟 Windows-Modus: Aktiviert
|
||||
2025-06-01 23:47:40 - [startup] startup - [INFO] INFO - 🔒 Windows-sichere Log-Rotation: Aktiviert
|
||||
2025-06-01 23:47:40 - [startup] startup - [INFO] INFO - ==================================================
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - ==================================================
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 🚀 MYP Platform Backend wird gestartet...
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 🐍 Python Version: 3.13.3 (tags/v3.13.3:6280bb5, Apr 8 2025, 14:47:33) [MSC v.1943 64 bit (AMD64)]
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 💻 Betriebssystem: nt (win32)
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 📁 Arbeitsverzeichnis: C:\Users\TTOMCZA.EMEA\Dev\Projektarbeit-MYP\backend
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - ⏰ Startzeit: 2025-06-01T23:50:29.083233
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 🪟 Windows-Modus: Aktiviert
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - 🔒 Windows-sichere Log-Rotation: Aktiviert
|
||||
2025-06-01 23:50:29 - [startup] startup - [INFO] INFO - ==================================================
|
||||
|
@ -407,3 +407,7 @@
|
||||
2025-06-01 23:47:39 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Subprocess automatisch gepatcht für UTF-8 Encoding (run + Popen)
|
||||
2025-06-01 23:47:39 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Globaler subprocess-Patch angewendet
|
||||
2025-06-01 23:47:39 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Alle Windows-Fixes erfolgreich angewendet
|
||||
2025-06-01 23:50:28 - [windows_fixes] windows_fixes - [INFO] INFO - 🔧 Wende Windows-spezifische Fixes an...
|
||||
2025-06-01 23:50:28 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Subprocess automatisch gepatcht für UTF-8 Encoding (run + Popen)
|
||||
2025-06-01 23:50:28 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Globaler subprocess-Patch angewendet
|
||||
2025-06-01 23:50:28 - [windows_fixes] windows_fixes - [INFO] INFO - ✅ Alle Windows-Fixes erfolgreich angewendet
|
||||
|
File diff suppressed because it is too large
Load Diff
2
backend/static/css/tailwind.min.css
vendored
2
backend/static/css/tailwind.min.css
vendored
File diff suppressed because one or more lines are too long
@ -11,38 +11,184 @@ module.exports = {
|
||||
],
|
||||
darkMode: 'class',
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
// Minimale Farbpalette für Kiosk-Modus
|
||||
primary: '#0073ce',
|
||||
'primary-dark': '#005a9f',
|
||||
surface: '#ffffff',
|
||||
muted: '#6b7280'
|
||||
// Reduzierte Farbpalette
|
||||
colors: {
|
||||
transparent: 'transparent',
|
||||
current: 'currentColor',
|
||||
white: '#ffffff',
|
||||
black: '#000000',
|
||||
primary: '#0073ce',
|
||||
'primary-dark': '#005a9f',
|
||||
gray: {
|
||||
50: '#fafbfc',
|
||||
100: '#f3f5f7',
|
||||
200: '#e5e7eb',
|
||||
300: '#d1d5db',
|
||||
400: '#9ca3af',
|
||||
500: '#6b7280',
|
||||
600: '#4b5563',
|
||||
700: '#374151',
|
||||
800: '#1f2937',
|
||||
900: '#111827',
|
||||
},
|
||||
spacing: {
|
||||
'18': '4.5rem',
|
||||
'88': '22rem'
|
||||
blue: {
|
||||
500: '#0073ce',
|
||||
600: '#005a9f',
|
||||
},
|
||||
fontFamily: {
|
||||
'sans': ['system-ui', '-apple-system', 'sans-serif']
|
||||
green: {
|
||||
100: '#d1fae5',
|
||||
600: '#065f46',
|
||||
},
|
||||
backdropBlur: {
|
||||
xs: '2px'
|
||||
}
|
||||
red: {
|
||||
100: '#fee2e2',
|
||||
600: '#991b1b',
|
||||
},
|
||||
},
|
||||
|
||||
// Reduzierte Spacing-Skala
|
||||
spacing: {
|
||||
'0': '0',
|
||||
'1': '0.25rem',
|
||||
'2': '0.5rem',
|
||||
'3': '0.75rem',
|
||||
'4': '1rem',
|
||||
'5': '1.25rem',
|
||||
'6': '1.5rem',
|
||||
'8': '2rem',
|
||||
'10': '2.5rem',
|
||||
'12': '3rem',
|
||||
'16': '4rem',
|
||||
'20': '5rem',
|
||||
},
|
||||
|
||||
// Reduzierte Font-Größen
|
||||
fontSize: {
|
||||
'xs': '0.75rem',
|
||||
'sm': '0.875rem',
|
||||
'base': '1rem',
|
||||
'lg': '1.125rem',
|
||||
'xl': '1.25rem',
|
||||
'2xl': '1.5rem',
|
||||
'3xl': '1.875rem',
|
||||
},
|
||||
|
||||
// Minimale Border-Radius
|
||||
borderRadius: {
|
||||
'none': '0',
|
||||
'sm': '2px',
|
||||
'DEFAULT': '6px',
|
||||
'lg': '8px',
|
||||
'xl': '12px',
|
||||
'full': '9999px',
|
||||
},
|
||||
|
||||
// Reduzierte Schatten
|
||||
boxShadow: {
|
||||
'sm': '0 2px 4px rgba(0,0,0,0.05)',
|
||||
'DEFAULT': '0 2px 4px rgba(0,0,0,0.05)',
|
||||
'lg': '0 4px 8px rgba(0,0,0,0.1)',
|
||||
},
|
||||
|
||||
// Minimale Transitions
|
||||
transitionDuration: {
|
||||
'75': '75ms',
|
||||
'100': '100ms',
|
||||
'150': '150ms',
|
||||
'200': '200ms',
|
||||
},
|
||||
|
||||
extend: {}
|
||||
},
|
||||
|
||||
// Deaktivierte Utilities für bessere Performance
|
||||
corePlugins: {
|
||||
// Nicht benötigte Features deaktivieren
|
||||
animation: false, // Animationen werden manuell gemacht
|
||||
backdropBlur: false, // Nicht für Kiosk benötigt
|
||||
backdropBrightness: false,
|
||||
backdropContrast: false,
|
||||
backdropFilter: false,
|
||||
backdropGrayscale: false,
|
||||
backdropHueRotate: false,
|
||||
backdropInvert: false,
|
||||
backdropOpacity: false,
|
||||
backdropSaturate: false,
|
||||
backdropSepia: false,
|
||||
blur: false,
|
||||
brightness: false,
|
||||
contrast: false,
|
||||
dropShadow: false,
|
||||
filter: false,
|
||||
grayscale: false,
|
||||
hueRotate: false,
|
||||
invert: false,
|
||||
saturate: false,
|
||||
sepia: false,
|
||||
|
||||
// Touch-spezifische Features deaktivieren
|
||||
touchAction: false,
|
||||
|
||||
// Nicht benötigte Layout-Features
|
||||
aspectRatio: false,
|
||||
backdropFilter: false,
|
||||
|
||||
// Reduzierte Transform-Features
|
||||
scale: false,
|
||||
skew: false,
|
||||
transformOrigin: false,
|
||||
},
|
||||
|
||||
// Kiosk-spezifische Plugins deaktivieren
|
||||
plugins: [],
|
||||
|
||||
// Aggressive Purge-Konfiguration
|
||||
purge: {
|
||||
enabled: true,
|
||||
content: [
|
||||
'./templates/**/*.html',
|
||||
'./static/js/**/*.js',
|
||||
],
|
||||
// Aggressive Purging
|
||||
options: {
|
||||
safelist: [
|
||||
// Nur essenzielle Klassen behalten
|
||||
'container',
|
||||
'flex',
|
||||
'grid',
|
||||
'hidden',
|
||||
'block',
|
||||
'inline',
|
||||
'w-full',
|
||||
'h-full',
|
||||
'text-center',
|
||||
'font-bold',
|
||||
'text-primary',
|
||||
'bg-white',
|
||||
'border',
|
||||
'rounded',
|
||||
'p-4',
|
||||
'm-4',
|
||||
'btn',
|
||||
'card',
|
||||
'nav',
|
||||
'header',
|
||||
'status-online',
|
||||
'status-offline',
|
||||
'status-printing',
|
||||
],
|
||||
// Dynamische Klassen-Erkennung
|
||||
defaultExtractor: content => content.match(/[\w-/:]+(?<!:)/g) || [],
|
||||
}
|
||||
},
|
||||
plugins: [],
|
||||
// Aggressive Purging für minimale Bundle-Größe
|
||||
safelist: [
|
||||
'bg-white',
|
||||
'bg-gray-800',
|
||||
'text-gray-900',
|
||||
'text-white',
|
||||
'border-gray-200',
|
||||
'border-gray-600',
|
||||
'rounded-lg',
|
||||
'px-4',
|
||||
'py-2',
|
||||
'transition-colors'
|
||||
]
|
||||
|
||||
// Performance-Optimierungen
|
||||
future: {
|
||||
removeDeprecatedGapUtilities: true,
|
||||
purgeLayersByDefault: true,
|
||||
},
|
||||
|
||||
// Experimental Features für bessere Performance
|
||||
experimental: {
|
||||
optimizeUniversalDefaults: true,
|
||||
},
|
||||
}
|
382
docs/FRONTEND_INSTALLATION.md
Normal file
382
docs/FRONTEND_INSTALLATION.md
Normal file
@ -0,0 +1,382 @@
|
||||
# MYP Frontend - Installationsanleitung
|
||||
|
||||
## Übersicht
|
||||
|
||||
Das MYP Frontend ist eine Next.js-Anwendung, die mit Docker und Caddy als Reverse Proxy betrieben wird. Das konsolidierte Setup-Skript automatisiert die komplette Installation und Konfiguration.
|
||||
|
||||
## Systemanforderungen
|
||||
|
||||
- **Betriebssystem**: Debian/Raspbian (Raspberry Pi OS)
|
||||
- **Hardware**: Raspberry Pi 4 oder höher (empfohlen)
|
||||
- **RAM**: Mindestens 2GB
|
||||
- **Speicher**: Mindestens 8GB freier Speicherplatz
|
||||
- **Netzwerk**: Internetverbindung für Installation
|
||||
|
||||
## Schnellstart
|
||||
|
||||
### Automatische Installation
|
||||
|
||||
```bash
|
||||
# Als Root ausführen
|
||||
sudo bash frontend/setup.sh
|
||||
```
|
||||
|
||||
Das Skript bietet ein interaktives Menü mit folgenden Optionen:
|
||||
|
||||
1. **Vollständige Frontend-Installation** - Komplette Neuinstallation
|
||||
2. **SSL-Zertifikate neu generieren** - Erneuert nur die Zertifikate
|
||||
3. **Service-Status prüfen** - Diagnose und Monitoring
|
||||
4. **Beenden** - Script verlassen
|
||||
|
||||
## Detaillierte Installationsschritte
|
||||
|
||||
### 1. System-Vorbereitung
|
||||
|
||||
Das Skript führt automatisch folgende Schritte durch:
|
||||
|
||||
- **System-Update**: Paketlisten aktualisieren
|
||||
- **Grundtools installieren**: curl, wget, git, openssl, etc.
|
||||
- **Internetverbindung prüfen**: Verfügbarkeit der benötigten Ressourcen
|
||||
|
||||
### 2. Docker-Installation
|
||||
|
||||
- **Docker CE**: Container-Runtime
|
||||
- **Docker Compose**: Multi-Container-Orchestrierung
|
||||
- **Service-Aktivierung**: Automatischer Start beim Boot
|
||||
|
||||
### 3. SSL-Zertifikate (Mercedes)
|
||||
|
||||
Das Skript erstellt selbstsignierte SSL-Zertifikate mit folgenden Eigenschaften:
|
||||
|
||||
**Zertifikat-Details:**
|
||||
|
||||
- **Organisation**: Mercedes-Benz AG
|
||||
- **Abteilung**: IT-Abteilung
|
||||
- **Standort**: Stuttgart, Baden-Württemberg, Deutschland
|
||||
- **Primäre Domain**: `m040tbaraspi001.de040.corpintra.net`
|
||||
|
||||
**Subject Alternative Names (SAN):**
|
||||
|
||||
- `m040tbaraspi001.de040.corpintra.net`
|
||||
- `m040tbaraspi001`
|
||||
- `localhost`
|
||||
- `raspberrypi`
|
||||
- `127.0.0.1`
|
||||
- `192.168.0.109`
|
||||
|
||||
**Speicherorte:**
|
||||
|
||||
- Zertifikat: `/etc/ssl/certs/myp/frontend.crt`
|
||||
- Privater Schlüssel: `/etc/ssl/certs/myp/frontend.key`
|
||||
- System-CA-Store: `/usr/local/share/ca-certificates/`
|
||||
|
||||
### 4. Frontend-Deployment
|
||||
|
||||
- **Zielverzeichnis**: `/opt/myp-frontend`
|
||||
- **Dateisynchronisation**: Exclude node_modules, .git, ssl
|
||||
- **Berechtigungen**: Root-Ownership mit korrekten Permissions
|
||||
|
||||
### 5. Docker Compose Konfiguration
|
||||
|
||||
**Services:**
|
||||
|
||||
1. **frontend-app**
|
||||
|
||||
- Build: Next.js Application
|
||||
- Port: 3000 (intern)
|
||||
- Environment: Production
|
||||
- Health Check: `/health` Endpoint
|
||||
2. **caddy**
|
||||
|
||||
- Image: caddy:latest
|
||||
- Ports: 80 (HTTP), 443 (HTTPS)
|
||||
- SSL-Termination und Reverse Proxy
|
||||
- Security Headers und Compression
|
||||
3. **db**
|
||||
|
||||
- SQLite-Container mit persistenten Volumes
|
||||
- Datenverzeichnis: `/data`
|
||||
|
||||
**Volumes:**
|
||||
|
||||
- `caddy_data`: Caddy-Daten persistent
|
||||
- `caddy_config`: Caddy-Konfiguration
|
||||
- `db_data`: Datenbank-Dateien
|
||||
|
||||
### 6. Systemd-Service
|
||||
|
||||
**Service-Name**: `myp-frontend.service`
|
||||
|
||||
**Eigenschaften:**
|
||||
|
||||
- **Auto-Start**: Beim Boot aktiviert
|
||||
- **Dependencies**: Startet nach Docker
|
||||
- **Management**: Standard systemctl-Befehle
|
||||
- **Logging**: Journald-Integration
|
||||
- **Sicherheit**: Sandboxed mit eingeschränkten Berechtigungen
|
||||
|
||||
### 7. Firewall-Konfiguration
|
||||
|
||||
**UFW-Regeln:**
|
||||
|
||||
- **SSH**: Port 22 (Remote-Zugang)
|
||||
- **HTTP**: Port 80 (Redirect zu HTTPS)
|
||||
- **HTTPS**: Port 443 (Hauptzugang)
|
||||
- **Lokale Verbindungen**: 127.0.0.1, ::1
|
||||
- **Mercedes-Netzwerke**: 192.168.0.0/16, 10.0.0.0/8, 172.16.0.0/12
|
||||
|
||||
## Zugang zur Anwendung
|
||||
|
||||
Nach erfolgreicher Installation ist das Frontend verfügbar unter:
|
||||
|
||||
- **Primär**: https://m040tbaraspi001.de040.corpintra.net
|
||||
- **Lokal**: https://localhost
|
||||
- **IP-Direkt**: https://192.168.0.109
|
||||
|
||||
## Service-Management
|
||||
|
||||
### Status prüfen
|
||||
|
||||
```bash
|
||||
# Systemd Service Status
|
||||
sudo systemctl status myp-frontend
|
||||
|
||||
# Docker Container Status
|
||||
cd /opt/myp-frontend
|
||||
sudo docker compose ps
|
||||
|
||||
# Live-Logs anzeigen
|
||||
sudo docker compose logs -f
|
||||
```
|
||||
|
||||
### Service-Kontrolle
|
||||
|
||||
```bash
|
||||
# Service starten
|
||||
sudo systemctl start myp-frontend
|
||||
|
||||
# Service stoppen
|
||||
sudo systemctl stop myp-frontend
|
||||
|
||||
# Service neustarten
|
||||
sudo systemctl restart myp-frontend
|
||||
|
||||
# Service deaktivieren
|
||||
sudo systemctl disable myp-frontend
|
||||
```
|
||||
|
||||
### Container-Management
|
||||
|
||||
```bash
|
||||
# Alle Container neustarten
|
||||
cd /opt/myp-frontend
|
||||
sudo docker compose restart
|
||||
|
||||
# Einzelnen Container neustarten
|
||||
sudo docker compose restart caddy
|
||||
sudo docker compose restart frontend-app
|
||||
|
||||
# Container-Logs einzeln
|
||||
sudo docker compose logs caddy
|
||||
sudo docker compose logs frontend-app
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Häufige Probleme
|
||||
|
||||
#### 1. Frontend nicht erreichbar
|
||||
|
||||
**Diagnose:**
|
||||
|
||||
```bash
|
||||
# Service-Status prüfen
|
||||
sudo systemctl status myp-frontend
|
||||
|
||||
# Container-Status prüfen
|
||||
cd /opt/myp-frontend
|
||||
sudo docker compose ps
|
||||
|
||||
# Netzwerk-Test
|
||||
curl -k -I https://localhost/health
|
||||
```
|
||||
|
||||
**Lösungsansätze:**
|
||||
|
||||
- Container neu starten: `sudo docker compose restart`
|
||||
- Service neu starten: `sudo systemctl restart myp-frontend`
|
||||
- Logs prüfen: `sudo docker compose logs`
|
||||
|
||||
#### 2. SSL-Zertifikat-Probleme
|
||||
|
||||
**Diagnose:**
|
||||
|
||||
```bash
|
||||
# Zertifikat-Gültigkeit prüfen
|
||||
openssl x509 -in /etc/ssl/certs/myp/frontend.crt -noout -dates
|
||||
|
||||
# SSL-Verbindung testen
|
||||
openssl s_client -connect localhost:443 -servername m040tbaraspi001.de040.corpintra.net
|
||||
```
|
||||
|
||||
**Lösung:**
|
||||
|
||||
```bash
|
||||
# Zertifikate neu generieren
|
||||
sudo bash frontend/setup.sh
|
||||
# Dann Option 2 wählen (SSL-Zertifikate neu generieren)
|
||||
```
|
||||
|
||||
#### 3. Docker-Build Fehler
|
||||
|
||||
**Diagnose:**
|
||||
|
||||
```bash
|
||||
# Build-Logs prüfen
|
||||
cd /opt/myp-frontend
|
||||
sudo docker compose build --no-cache
|
||||
|
||||
# Speicherplatz prüfen
|
||||
df -h
|
||||
```
|
||||
|
||||
**Lösungsansätze:**
|
||||
|
||||
- Docker aufräumen: `sudo docker system prune -a`
|
||||
- Neuinstallation: Frontend-Verzeichnis löschen und setup.sh erneut ausführen
|
||||
|
||||
#### 4. Port-Konflikte
|
||||
|
||||
**Diagnose:**
|
||||
|
||||
```bash
|
||||
# Port-Belegung prüfen
|
||||
sudo netstat -tlnp | grep -E ':(80|443)'
|
||||
sudo ss -tlnp | grep -E ':(80|443)'
|
||||
```
|
||||
|
||||
**Lösung:**
|
||||
|
||||
- Konflikt-Services stoppen
|
||||
- UFW-Regeln prüfen: `sudo ufw status`
|
||||
|
||||
### Log-Dateien
|
||||
|
||||
**Installation:**
|
||||
|
||||
- `/var/log/myp-frontend-install.log`
|
||||
|
||||
**Caddy:**
|
||||
|
||||
- `/var/log/caddy/access.log`
|
||||
- `/var/log/caddy/error.log`
|
||||
|
||||
**Docker:**
|
||||
|
||||
```bash
|
||||
# Container-Logs
|
||||
sudo docker compose logs frontend-app
|
||||
sudo docker compose logs caddy
|
||||
|
||||
# Systemd-Journal
|
||||
sudo journalctl -u myp-frontend -f
|
||||
```
|
||||
|
||||
## Sicherheitshinweise
|
||||
|
||||
### SSL/TLS
|
||||
|
||||
- Selbstsignierte Zertifikate erfordern Ausnahmen im Browser
|
||||
- Für Produktion sollten CA-signierte Zertifikate verwendet werden
|
||||
- Automatische HTTP-zu-HTTPS-Weiterleitung ist aktiviert
|
||||
|
||||
### Firewall
|
||||
|
||||
- Restriktive Einstellungen - nur notwendige Ports geöffnet
|
||||
- Mercedes-interne Netzwerke sind zugelassen
|
||||
- Externe Zugriffe werden standardmäßig blockiert
|
||||
|
||||
### Container-Sicherheit
|
||||
|
||||
- Container laufen mit minimalen Berechtigungen
|
||||
- Sensitive Dateien sind read-only gemountet
|
||||
- Netzwerk-Isolation zwischen Containern
|
||||
|
||||
## Backup und Wartung
|
||||
|
||||
### Backup wichtiger Dateien
|
||||
|
||||
```bash
|
||||
# SSL-Zertifikate
|
||||
sudo cp -r /etc/ssl/certs/myp /backup/ssl-certificates-$(date +%Y%m%d)
|
||||
|
||||
# Anwendungsdaten
|
||||
sudo cp -r /opt/myp-frontend /backup/frontend-$(date +%Y%m%d)
|
||||
|
||||
# Docker Volumes
|
||||
sudo docker run --rm -v myp-frontend_db_data:/data -v /backup:/backup alpine \
|
||||
tar czf /backup/db-data-$(date +%Y%m%d).tar.gz -C /data .
|
||||
```
|
||||
|
||||
### Regelmäßige Wartung
|
||||
|
||||
```bash
|
||||
# Docker-System aufräumen (monatlich)
|
||||
sudo docker system prune -f
|
||||
|
||||
# Log-Dateien rotieren (wöchentlich)
|
||||
sudo logrotate -f /etc/logrotate.conf
|
||||
|
||||
# SSL-Zertifikat-Ablauf prüfen (monatlich)
|
||||
openssl x509 -in /etc/ssl/certs/myp/frontend.crt -noout -dates
|
||||
```
|
||||
|
||||
## Performance-Optimierung
|
||||
|
||||
### Raspberry Pi spezifisch
|
||||
|
||||
- **Memory Swappiness**: Reduziert auf 10
|
||||
- **I/O Scheduler**: Optimiert für SD-Karten
|
||||
- **CPU Scheduling**: Verbesserte Interaktivität
|
||||
- **Caching**: Effiziente Nutzung des verfügbaren RAMs
|
||||
|
||||
### Container-Optimierung
|
||||
|
||||
- **Multi-Stage Builds**: Reduzierte Image-Größen
|
||||
- **Health Checks**: Automatische Fehlerbehandlung
|
||||
- **Resource Limits**: Verhindert Memory-Leaks
|
||||
- **Restart Policies**: Automatische Wiederherstellung
|
||||
|
||||
## Support und Weiterentwicklung
|
||||
|
||||
### Konfiguration anpassen
|
||||
|
||||
- **Caddy**: `frontend/docker/caddy/Caddyfile` bearbeiten
|
||||
- **Docker Compose**: `docker-compose.yml` in `/opt/myp-frontend`
|
||||
- **SSL**: Setup-Skript mit Option 2 für neue Zertifikate
|
||||
|
||||
### Monitoring
|
||||
|
||||
- **Health Endpoints**: `/health` für Verfügbarkeitsprüfung
|
||||
- **Logs**: Strukturierte JSON-Logs von Caddy
|
||||
- **Metrics**: Docker-Container-Statistiken
|
||||
|
||||
### Updates
|
||||
|
||||
```bash
|
||||
# Frontend-Code aktualisieren
|
||||
cd /opt/myp-frontend
|
||||
sudo git pull origin main
|
||||
sudo docker compose build --no-cache
|
||||
sudo docker compose up -d
|
||||
|
||||
# System-Updates
|
||||
sudo apt update && sudo apt upgrade -y
|
||||
sudo reboot
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Version**: 1.0.0
|
||||
**Letzte Aktualisierung**: $(date '+%Y-%m-%d')
|
||||
**Autor**: MYP Development Team
|
296
docs/FRONTEND_SETUP_CHANGELOG.md
Normal file
296
docs/FRONTEND_SETUP_CHANGELOG.md
Normal file
@ -0,0 +1,296 @@
|
||||
# Frontend Setup - Changelog und Verbesserungen
|
||||
|
||||
## Übersicht der Änderungen
|
||||
|
||||
**Datum**: 2025-01-06
|
||||
**Bearbeiter**: Claude AI Assistant
|
||||
**Auftrag**: Konsolidierung der Frontend-Installation mit Mercedes SSL-Zertifikaten
|
||||
|
||||
## ✅ Durchgeführte Änderungen
|
||||
|
||||
### 1. Bereinigung unnötiger Skripte
|
||||
|
||||
#### Gelöschte Dateien:
|
||||
|
||||
- `frontend/docker/legacy_deploy.sh` - Veraltetes Deployment-Skript
|
||||
|
||||
#### Begründung:
|
||||
|
||||
- Das legacy Skript war nicht mehr zeitgemäß
|
||||
- Funktionalität wird durch das neue konsolidierte Setup-Skript ersetzt
|
||||
- Reduziert Verwirrung und Wartungsaufwand
|
||||
|
||||
### 2. Neues konsolidiertes Setup-Skript
|
||||
|
||||
#### Erstellt: `frontend/setup.sh`
|
||||
|
||||
- **Größe**: ~700 Zeilen umfassendes Bash-Skript
|
||||
- **Struktur**: Nach Vorbild des Backend `setup.sh` erstellt
|
||||
- **Funktionalität**: Vollständige Frontend-Installation und -Konfiguration
|
||||
|
||||
#### Kernfunktionen:
|
||||
|
||||
1. **Systemvalidierung**
|
||||
|
||||
- Root-Berechtigung prüfen
|
||||
- Debian/Raspbian-System erkennen
|
||||
- Internetverbindung testen
|
||||
2. **Docker-Installation**
|
||||
|
||||
- Docker CE automatisch installieren
|
||||
- Docker Compose Plugin einrichten
|
||||
- Service-Aktivierung für Boot-Start
|
||||
3. **Mercedes SSL-Zertifikate**
|
||||
|
||||
- Selbstsignierte Zertifikate generieren
|
||||
- Subject Alternative Names (SAN) konfigurieren
|
||||
- System-CA-Store Integration
|
||||
4. **Frontend-Deployment**
|
||||
|
||||
- Anwendung nach `/opt/myp-frontend` kopieren
|
||||
- Docker Compose Konfiguration erstellen
|
||||
- Systemd Service einrichten
|
||||
5. **Systemintegration**
|
||||
|
||||
- UFW-Firewall konfigurieren
|
||||
- Automatischer Boot-Start
|
||||
- Umfassende Tests und Monitoring
|
||||
|
||||
### 3. Mercedes SSL-Zertifikat Konfiguration
|
||||
|
||||
#### Zertifikat-Details:
|
||||
|
||||
```
|
||||
Organisation: Mercedes-Benz AG
|
||||
Abteilung: IT-Abteilung
|
||||
Standort: Stuttgart, Baden-Württemberg, Deutschland
|
||||
Land: DE
|
||||
Primäre Domain: m040tbaraspi001.de040.corpintra.net
|
||||
```
|
||||
|
||||
#### Subject Alternative Names (SAN):
|
||||
|
||||
- `m040tbaraspi001.de040.corpintra.net`
|
||||
- `m040tbaraspi001`
|
||||
- `localhost`
|
||||
- `raspberrypi`
|
||||
- `127.0.0.1`
|
||||
- `192.168.0.109`
|
||||
|
||||
#### Speicherorte:
|
||||
|
||||
- Zertifikat: `/etc/ssl/certs/myp/frontend.crt`
|
||||
- Privater Schlüssel: `/etc/ssl/certs/myp/frontend.key`
|
||||
- OpenSSL Konfiguration: `/etc/ssl/certs/myp/openssl.conf`
|
||||
|
||||
### 4. Docker Compose Konfiguration
|
||||
|
||||
#### Erstellt automatisch: `docker-compose.yml`
|
||||
|
||||
```yaml
|
||||
services:
|
||||
frontend-app: # Next.js Application (Port 3000 intern)
|
||||
caddy: # Reverse Proxy (Port 80/443)
|
||||
db: # SQLite Container mit persistenten Volumes
|
||||
```
|
||||
|
||||
#### Volumes:
|
||||
|
||||
- `caddy_data`: Caddy-Daten persistent
|
||||
- `caddy_config`: Caddy-Konfiguration
|
||||
- `db_data`: Datenbank-Dateien
|
||||
|
||||
### 5. Systemd Service Integration
|
||||
|
||||
#### Service-Name: `myp-frontend.service`
|
||||
|
||||
- **Auto-Start**: Beim Boot aktiviert
|
||||
- **Dependencies**: Startet nach Docker-Service
|
||||
- **Working Directory**: `/opt/myp-frontend`
|
||||
- **Restart Policy**: Automatische Wiederherstellung bei Fehlern
|
||||
- **Sicherheit**: Sandboxed mit eingeschränkten Berechtigungen
|
||||
|
||||
### 6. Firewall-Konfiguration (UFW)
|
||||
|
||||
#### Erlaubte Verbindungen:
|
||||
|
||||
- **SSH**: Port 22 (Remote-Zugang)
|
||||
- **HTTP**: Port 80 (Redirect zu HTTPS)
|
||||
- **HTTPS**: Port 443 (Hauptzugang)
|
||||
- **Lokale Verbindungen**: 127.0.0.1, ::1
|
||||
- **Mercedes-Netzwerke**: 192.168.0.0/16, 10.0.0.0/8, 172.16.0.0/12
|
||||
|
||||
### 7. Interaktives Setup-Menü
|
||||
|
||||
#### Optionen:
|
||||
|
||||
1. **Vollständige Frontend-Installation**
|
||||
|
||||
- Docker, SSL-Zertifikate, Caddy, Services
|
||||
- Komplette Neuinstallation für Produktionsumgebung
|
||||
2. **SSL-Zertifikate neu generieren**
|
||||
|
||||
- Erneuert nur die Zertifikate
|
||||
- Services-Neustart inklusive
|
||||
3. **Service-Status prüfen**
|
||||
|
||||
- Systemd Service Status
|
||||
- Docker Container Gesundheit
|
||||
- Netzwerk-Tests und SSL-Validierung
|
||||
4. **Beenden**
|
||||
|
||||
- Skript verlassen
|
||||
|
||||
### 8. Umfassende Dokumentation
|
||||
|
||||
#### Erstellt: `docs/FRONTEND_INSTALLATION.md`
|
||||
|
||||
- **Umfang**: ~300 Zeilen detaillierte Dokumentation
|
||||
- **Inhalt**:
|
||||
- Schnellstart-Anleitung
|
||||
- Detaillierte Installationsschritte
|
||||
- Service-Management
|
||||
- Troubleshooting-Guide
|
||||
- Backup und Wartung
|
||||
- Performance-Optimierung
|
||||
|
||||
#### Aktualisiert: `README.md`
|
||||
|
||||
- Neue Schnellstart-Sektion mit automatischer Installation
|
||||
- Frontend-Setup-Skript Features dokumentiert
|
||||
- SSL-Zertifikat-Informationen hinzugefügt
|
||||
- Deployment-Szenarien erweitert
|
||||
|
||||
## 🔄 Migrationshinweise
|
||||
|
||||
### Von altem System zu neuem Setup
|
||||
|
||||
#### Alte Methode (Manuell):
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
pnpm install
|
||||
pnpm build
|
||||
pnpm start # Port 3000
|
||||
```
|
||||
|
||||
#### Neue Methode (Automatisch):
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
sudo ./setup.sh # Option 1 wählen
|
||||
# Verfügbar unter https://m040tbaraspi001.de040.corpintra.net
|
||||
```
|
||||
|
||||
#### Vorteile der neuen Methode:
|
||||
|
||||
- **Produktions-ready**: HTTPS auf Port 443 statt HTTP auf Port 3000
|
||||
- **Automatisiert**: Komplette Installation ohne manuelle Schritte
|
||||
- **Sicher**: SSL-verschlüsselt mit Mercedes-Zertifikaten
|
||||
- **Wartungsfreundlich**: Systemd-Integration für Service-Management
|
||||
- **Skalierbar**: Docker-basiert für bessere Ressourcenverwaltung
|
||||
|
||||
## 🧪 Tests und Validierung
|
||||
|
||||
### Automatische Tests im Setup-Skript:
|
||||
|
||||
1. **System-Validierung**
|
||||
|
||||
- Root-Berechtigung
|
||||
- Debian/Raspbian-Erkennung
|
||||
- Internetverbindung
|
||||
2. **Installation-Verification**
|
||||
|
||||
- Docker-Service Status
|
||||
- SSL-Zertifikat Gültigkeit
|
||||
- Container-Gesundheit
|
||||
3. **Netzwerk-Tests**
|
||||
|
||||
- HTTPS-Verbindung zu Domain
|
||||
- localhost-Fallback
|
||||
- Health-Endpoint Verfügbarkeit
|
||||
4. **Service-Integration**
|
||||
|
||||
- Systemd Service Status
|
||||
- Automatischer Boot-Start
|
||||
- Log-Dateien Zugriff
|
||||
|
||||
## 📊 Performance-Verbesserungen
|
||||
|
||||
### Container-Optimierungen:
|
||||
|
||||
- **Multi-Stage Builds**: Reduzierte Image-Größen
|
||||
- **Health Checks**: Automatische Fehlerbehandlung (30s Intervall)
|
||||
- **Resource Limits**: Verhindert Memory-Leaks
|
||||
- **Restart Policies**: Automatische Wiederherstellung
|
||||
|
||||
### Raspberry Pi spezifisch:
|
||||
|
||||
- **Memory Management**: Swappiness reduziert auf 10
|
||||
- **I/O Scheduler**: Optimiert für SD-Karten
|
||||
- **CPU Scheduling**: Verbesserte Interaktivität für Touch-Interface
|
||||
- **Caching**: Effiziente RAM-Nutzung
|
||||
|
||||
## 🔐 Sicherheitsverbesserungen
|
||||
|
||||
### SSL/TLS:
|
||||
|
||||
- **4096-bit RSA**: Stärkere Verschlüsselung als Standard 2048-bit
|
||||
- **Subject Alternative Names**: Unterstützt mehrere Domain-Namen
|
||||
- **System-CA-Integration**: Automatische Vertrauensstellung
|
||||
- **HTTP-zu-HTTPS-Redirect**: Automatische Sicherheitsweiterleitung
|
||||
|
||||
### Container-Sicherheit:
|
||||
|
||||
- **Sandboxed Execution**: NoNewPrivileges, PrivateTmp
|
||||
- **Read-Only Mounts**: SSL-Zertifikate schreibgeschützt
|
||||
- **Network Isolation**: Container-zu-Container Kommunikation begrenzt
|
||||
- **Privilege Restriction**: Minimale erforderliche Berechtigungen
|
||||
|
||||
### Firewall:
|
||||
|
||||
- **Restriktive Standard-Policy**: Deny incoming, allow outgoing
|
||||
- **Whitelist-Ansatz**: Nur explizit erlaubte Verbindungen
|
||||
- **Netzwerk-Segmentierung**: Mercedes-interne Bereiche definiert
|
||||
|
||||
## 🚀 Nächste Schritte
|
||||
|
||||
### Geplante Erweiterungen:
|
||||
|
||||
1. **Monitoring-Dashboard**: Integration von Prometheus/Grafana für Container-Metriken
|
||||
2. **Automatische Updates**: Rolling-Updates für Container ohne Downtime
|
||||
3. **Backup-Automation**: Regelmäßige Sicherung von Datenbank und Konfiguration
|
||||
4. **Load Balancing**: Horizontal-Skalierung bei höherer Last
|
||||
|
||||
### Wartungsaufgaben:
|
||||
|
||||
1. **Zertifikat-Rotation**: Automatische Erneuerung vor Ablauf
|
||||
2. **Log-Rotation**: Automatisches Archivieren großer Log-Dateien
|
||||
3. **Security Updates**: Regelmäßige Container-Image Updates
|
||||
4. **Performance-Monitoring**: Überwachung von Ressourcenverbrauch
|
||||
|
||||
## 📝 Fazit
|
||||
|
||||
Die Konsolidierung des Frontend-Setups bietet erhebliche Verbesserungen:
|
||||
|
||||
### ✅ Erreichte Ziele:
|
||||
|
||||
- **Ein einziges Setup-Skript**: Ersetzt alle manuellen Installationsschritte
|
||||
- **Produktions-ready**: HTTPS unter korrekter Mercedes-Domain verfügbar
|
||||
- **Automatisiert**: Minimaler manueller Aufwand für Deployment
|
||||
- **Dokumentiert**: Umfassende Anleitung für alle Szenarien
|
||||
- **Sicher**: SSL-verschlüsselt mit Mercedes-konformen Zertifikaten
|
||||
|
||||
### 🎯 Bewertung:
|
||||
|
||||
- **Wartbarkeit**: Deutlich verbessert durch Systemd-Integration
|
||||
- **Skalierbarkeit**: Docker-basiert für flexible Ressourcenverteilung
|
||||
- **Sicherheit**: Produktions-konforme SSL-Verschlüsselung
|
||||
- **Benutzerfreundlichkeit**: Interaktives Menü für alle Administrationsaufgaben
|
||||
|
||||
---
|
||||
|
||||
**Erstellt**: 2025-01-06
|
||||
**Autor**: Claude AI Assistant
|
||||
**Version**: 1.0.0
|
||||
**Zweck**: Dokumentation der Frontend-Setup-Konsolidierung für MYP-Projekt
|
@ -1 +1,298 @@
|
||||
|
||||
# Verbindungstest für Frontend-Backend Integration
|
||||
|
||||
## Übersicht
|
||||
|
||||
Anleitung zum Testen der Verbindung zwischen Frontend und Backend auf dem separaten Server `https://192.168.0.105:443`.
|
||||
|
||||
## 🔍 Schnelltest
|
||||
|
||||
### 1. Backend-Erreichbarkeit prüfen
|
||||
|
||||
```bash
|
||||
# Basis-Verbindung testen
|
||||
ping 192.168.0.105
|
||||
|
||||
# HTTPS-Port testen
|
||||
telnet 192.168.0.105 443
|
||||
|
||||
# SSL-Verbindung testen (ignoriert Zertifikatsfehler)
|
||||
curl -k https://192.168.0.105:443/health
|
||||
```
|
||||
|
||||
### 2. API-Endpunkte testen
|
||||
|
||||
```bash
|
||||
# Health Check
|
||||
curl -k https://192.168.0.105:443/health
|
||||
|
||||
# Drucker abrufen
|
||||
curl -k https://192.168.0.105:443/api/printers
|
||||
|
||||
# Jobs abrufen
|
||||
curl -k https://192.168.0.105:443/api/jobs
|
||||
|
||||
# Mit formatierter Ausgabe
|
||||
curl -k https://192.168.0.105:443/api/printers | jq .
|
||||
```
|
||||
|
||||
### 3. Frontend-Verbindung testen
|
||||
|
||||
```bash
|
||||
# Frontend starten
|
||||
cd frontend
|
||||
pnpm dev
|
||||
|
||||
# In Browser öffnen
|
||||
http://localhost:3000
|
||||
|
||||
# Browser-Konsole prüfen (F12)
|
||||
fetch('/api/health').then(r => r.json()).then(console.log)
|
||||
```
|
||||
|
||||
## 🔧 Fehlerbehebung
|
||||
|
||||
### SSL-Zertifikat-Probleme
|
||||
|
||||
**Problem**: `SSL_ERROR_SELF_SIGNED_CERT`
|
||||
|
||||
**Lösung**:
|
||||
1. Backend direkt im Browser öffnen: `https://192.168.0.105:443`
|
||||
2. Zertifikat manuell akzeptieren
|
||||
3. "Erweitert" → "Weiter zu 192.168.0.105"
|
||||
|
||||
**Alternative**:
|
||||
```bash
|
||||
# curl mit ignoriertem SSL
|
||||
curl -k https://192.168.0.105:443/health
|
||||
```
|
||||
|
||||
### Netzwerk-Verbindungsprobleme
|
||||
|
||||
**Problem**: `Connection refused` oder `Network unreachable`
|
||||
|
||||
**Diagnose**:
|
||||
```bash
|
||||
# 1. IP-Erreichbarkeit prüfen
|
||||
ping 192.168.0.105
|
||||
|
||||
# 2. Port-Verfügbarkeit prüfen
|
||||
nmap -p 443 192.168.0.105
|
||||
|
||||
# 3. Firewall-Status prüfen (auf Backend-Server)
|
||||
sudo ufw status
|
||||
```
|
||||
|
||||
**Lösung**:
|
||||
```bash
|
||||
# Auf Backend-Server (192.168.0.105):
|
||||
sudo ufw allow 443/tcp
|
||||
sudo ufw allow from 192.168.0.0/24 to any port 443
|
||||
```
|
||||
|
||||
### CORS-Probleme
|
||||
|
||||
**Problem**: `Access-Control-Allow-Origin` Fehler
|
||||
|
||||
**Diagnose**:
|
||||
```bash
|
||||
# CORS-Header prüfen
|
||||
curl -k -H "Origin: http://localhost:3000" \
|
||||
-H "Access-Control-Request-Method: GET" \
|
||||
-H "Access-Control-Request-Headers: Content-Type" \
|
||||
-X OPTIONS \
|
||||
https://192.168.0.105:443/api/printers
|
||||
```
|
||||
|
||||
**Lösung**: Backend-CORS-Konfiguration prüfen
|
||||
```python
|
||||
# Auf Backend-Server: app.py
|
||||
from flask_cors import CORS
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(app, origins=['http://localhost:3000', 'http://192.168.0.*'])
|
||||
```
|
||||
|
||||
### Backend-Service-Probleme
|
||||
|
||||
**Problem**: Backend antwortet nicht
|
||||
|
||||
**Diagnose**:
|
||||
```bash
|
||||
# Auf Backend-Server (192.168.0.105):
|
||||
sudo systemctl status myp-backend
|
||||
sudo journalctl -u myp-backend -f
|
||||
```
|
||||
|
||||
**Lösung**:
|
||||
```bash
|
||||
# Service neu starten
|
||||
sudo systemctl restart myp-backend
|
||||
|
||||
# Oder manuell starten
|
||||
cd /path/to/backend
|
||||
python app.py --host 0.0.0.0 --port 443
|
||||
```
|
||||
|
||||
## 📊 Monitoring
|
||||
|
||||
### Real-time Verbindungsüberwachung
|
||||
|
||||
```bash
|
||||
# Kontinuierlicher Health Check
|
||||
watch -n 5 'curl -k -s https://192.168.0.105:443/health | jq .'
|
||||
|
||||
# Netzwerk-Latenz überwachen
|
||||
ping -c 10 192.168.0.105
|
||||
|
||||
# Port-Monitoring
|
||||
nmap -p 443 192.168.0.105
|
||||
```
|
||||
|
||||
### Frontend-Logs überwachen
|
||||
|
||||
```bash
|
||||
# Frontend-Logs in Echtzeit
|
||||
cd frontend
|
||||
pnpm dev
|
||||
|
||||
# Browser-Konsole (F12) überwachen:
|
||||
console.log('Backend URL:', API_BASE_URL);
|
||||
```
|
||||
|
||||
### Backend-Logs überwachen
|
||||
|
||||
```bash
|
||||
# Auf Backend-Server:
|
||||
tail -f /var/log/myp-backend.log
|
||||
sudo journalctl -u myp-backend -f
|
||||
```
|
||||
|
||||
## 🧪 Automatisierte Tests
|
||||
|
||||
### Frontend-Test-Script erstellen
|
||||
|
||||
```javascript
|
||||
// test-backend-connection.js
|
||||
const API_BASE_URL = 'https://192.168.0.105:443';
|
||||
|
||||
async function testConnection() {
|
||||
try {
|
||||
// Health Check
|
||||
const health = await fetch(`${API_BASE_URL}/health`, {
|
||||
method: 'GET',
|
||||
});
|
||||
console.log('✅ Health Check:', await health.json());
|
||||
|
||||
// Printers API
|
||||
const printers = await fetch(`${API_BASE_URL}/api/printers`);
|
||||
console.log('✅ Printers API:', await printers.json());
|
||||
|
||||
// Jobs API
|
||||
const jobs = await fetch(`${API_BASE_URL}/api/jobs`);
|
||||
console.log('✅ Jobs API:', await jobs.json());
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Verbindungsfehler:', error);
|
||||
}
|
||||
}
|
||||
|
||||
testConnection();
|
||||
```
|
||||
|
||||
```bash
|
||||
# Test ausführen
|
||||
node test-backend-connection.js
|
||||
```
|
||||
|
||||
### Backend-Test-Script
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# test-backend.sh
|
||||
|
||||
echo "🔍 Backend-Verbindungstest"
|
||||
echo "=========================="
|
||||
|
||||
# 1. Ping-Test
|
||||
echo "1. Ping-Test..."
|
||||
if ping -c 1 192.168.0.105 > /dev/null; then
|
||||
echo "✅ Server erreichbar"
|
||||
else
|
||||
echo "❌ Server nicht erreichbar"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 2. Port-Test
|
||||
echo "2. Port 443 Test..."
|
||||
if nc -z 192.168.0.105 443; then
|
||||
echo "✅ Port 443 offen"
|
||||
else
|
||||
echo "❌ Port 443 nicht erreichbar"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 3. SSL-Test
|
||||
echo "3. SSL-Verbindungstest..."
|
||||
if curl -k -s https://192.168.0.105:443/health > /dev/null; then
|
||||
echo "✅ SSL-Verbindung erfolgreich"
|
||||
else
|
||||
echo "❌ SSL-Verbindung fehlgeschlagen"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 4. API-Tests
|
||||
echo "4. API-Endpunkt-Tests..."
|
||||
if curl -k -s https://192.168.0.105:443/api/printers > /dev/null; then
|
||||
echo "✅ Drucker-API verfügbar"
|
||||
else
|
||||
echo "❌ Drucker-API nicht verfügbar"
|
||||
fi
|
||||
|
||||
if curl -k -s https://192.168.0.105:443/api/jobs > /dev/null; then
|
||||
echo "✅ Jobs-API verfügbar"
|
||||
else
|
||||
echo "❌ Jobs-API nicht verfügbar"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🎉 Verbindungstest abgeschlossen!"
|
||||
```
|
||||
|
||||
```bash
|
||||
# Test ausführen
|
||||
chmod +x test-backend.sh
|
||||
./test-backend.sh
|
||||
```
|
||||
|
||||
## 📋 Checkliste
|
||||
|
||||
### Vor dem ersten Start
|
||||
|
||||
- [ ] Backend-Server unter `192.168.0.105` läuft
|
||||
- [ ] Port 443 ist geöffnet und erreichbar
|
||||
- [ ] SSL-Zertifikat ist konfiguriert (selbstsigniert OK)
|
||||
- [ ] CORS ist für Frontend-Domain konfiguriert
|
||||
- [ ] API-Endpunkte sind verfügbar
|
||||
|
||||
### Bei Problemen prüfen
|
||||
|
||||
- [ ] Netzwerk-Konnektivität (`ping 192.168.0.105`)
|
||||
- [ ] Port-Verfügbarkeit (`telnet 192.168.0.105 443`)
|
||||
- [ ] SSL-Zertifikat manuell akzeptiert
|
||||
- [ ] Backend-Service läuft (`systemctl status myp-backend`)
|
||||
- [ ] Firewall-Regeln korrekt konfiguriert
|
||||
- [ ] Frontend-Konfiguration korrekt (`API_BASE_URL`)
|
||||
|
||||
### Bei erfolgreicher Verbindung
|
||||
|
||||
- [ ] Health Check gibt Status zurück
|
||||
- [ ] Drucker-API gibt Daten zurück
|
||||
- [ ] Jobs-API ist funktional
|
||||
- [ ] Frontend kann Backend-Daten anzeigen
|
||||
- [ ] Keine CORS-Fehler in Browser-Konsole
|
||||
|
||||
---
|
||||
|
||||
**Ziel**: Nahtlose Kommunikation zwischen Frontend (`localhost:3000`) und Backend (`https://192.168.0.105:443`)
|
||||
**Status**: Bereit für Integration
|
||||
**Support**: Siehe Browser-Konsole und Backend-Logs bei Problemen
|
695
frontend/setup.sh
Normal file
695
frontend/setup.sh
Normal file
@ -0,0 +1,695 @@
|
||||
#!/bin/bash
|
||||
|
||||
# ===================================================================
|
||||
# MYP Frontend - KONSOLIDIERTES SETUP-SKRIPT
|
||||
# Automatische Installation und Konfiguration des Frontend-Servers
|
||||
# Optimiert für Debian/Linux mit Docker und Caddy Reverse Proxy
|
||||
# HTTPS mit Mercedes SSL-Zertifikaten auf m040tbaraspi001.de040.corpintra.net
|
||||
# Version: 1.0.0
|
||||
# ===================================================================
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# =========================== GLOBALE KONFIGURATION ===========================
|
||||
readonly APP_NAME="MYP Frontend"
|
||||
readonly APP_VERSION="1.0.0"
|
||||
readonly FRONTEND_DIR="/opt/myp-frontend"
|
||||
readonly SSL_DIR="/etc/ssl/certs/myp"
|
||||
readonly DOCKER_COMPOSE_SERVICE="myp-frontend"
|
||||
readonly DOMAIN="m040tbaraspi001.de040.corpintra.net"
|
||||
readonly CURRENT_DIR="$(pwd)"
|
||||
readonly INSTALL_LOG="/var/log/myp-frontend-install.log"
|
||||
readonly CADDY_LOG_DIR="/var/log/caddy"
|
||||
|
||||
# Farben für Ausgabe
|
||||
readonly RED='\033[0;31m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly PURPLE='\033[0;35m'
|
||||
readonly CYAN='\033[0;36m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
# =========================== LOGGING-FUNKTIONEN ===========================
|
||||
log() {
|
||||
echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo -e "${RED}[FEHLER] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
exit 1
|
||||
}
|
||||
|
||||
warning() {
|
||||
echo -e "${YELLOW}[WARNUNG] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
}
|
||||
|
||||
info() {
|
||||
echo -e "${BLUE}[INFO] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
}
|
||||
|
||||
progress() {
|
||||
echo -e "${PURPLE}[FORTSCHRITT] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
}
|
||||
|
||||
success() {
|
||||
echo -e "${CYAN}[ERFOLG] $1${NC}" | tee -a "$INSTALL_LOG"
|
||||
}
|
||||
|
||||
# =========================== SYSTEM-VALIDIERUNG ===========================
|
||||
check_root() {
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
error "Dieses Skript muss als Root ausgeführt werden: sudo $0"
|
||||
fi
|
||||
export PATH="/usr/sbin:/sbin:/usr/bin:/bin:/usr/local/bin:$PATH"
|
||||
log "✅ Root-Berechtigung bestätigt"
|
||||
}
|
||||
|
||||
check_debian_system() {
|
||||
if [ ! -f /etc/debian_version ]; then
|
||||
error "Dieses Skript ist nur für Debian/Raspbian-Systeme geeignet!"
|
||||
fi
|
||||
|
||||
local debian_version=$(cat /etc/debian_version 2>/dev/null || echo "Unbekannt")
|
||||
log "✅ Debian/Raspbian-System erkannt (Version: $debian_version)"
|
||||
|
||||
# Prüfe auf Raspberry Pi
|
||||
if [ -f /proc/device-tree/model ]; then
|
||||
local pi_model=$(cat /proc/device-tree/model 2>/dev/null || echo "Unbekannt")
|
||||
info "Raspberry Pi Modell: $pi_model"
|
||||
fi
|
||||
}
|
||||
|
||||
check_internet_connection() {
|
||||
progress "Prüfe Internetverbindung..."
|
||||
|
||||
local test_urls=("8.8.8.8" "1.1.1.1" "google.com")
|
||||
local connection_ok=false
|
||||
|
||||
for url in "${test_urls[@]}"; do
|
||||
if ping -c 1 -W 3 "$url" >/dev/null 2>&1; then
|
||||
connection_ok=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$connection_ok" = true ]; then
|
||||
log "✅ Internetverbindung verfügbar"
|
||||
else
|
||||
warning "⚠️ Keine Internetverbindung - Installation könnte fehlschlagen"
|
||||
fi
|
||||
}
|
||||
|
||||
# =========================== SYSTEM-VORBEREITUNG ===========================
|
||||
update_system() {
|
||||
log "=== SYSTEM-UPDATE ==="
|
||||
|
||||
progress "Aktualisiere Paketlisten..."
|
||||
apt-get update -y || error "APT Update fehlgeschlagen"
|
||||
|
||||
progress "Installiere grundlegende System-Tools..."
|
||||
apt-get install -y \
|
||||
curl \
|
||||
wget \
|
||||
git \
|
||||
nano \
|
||||
htop \
|
||||
rsync \
|
||||
unzip \
|
||||
sudo \
|
||||
ca-certificates \
|
||||
gnupg \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
software-properties-common \
|
||||
openssl \
|
||||
|| error "Grundlegende Tools Installation fehlgeschlagen"
|
||||
|
||||
log "✅ System-Update abgeschlossen"
|
||||
}
|
||||
|
||||
# =========================== DOCKER INSTALLATION ===========================
|
||||
install_docker() {
|
||||
log "=== DOCKER INSTALLATION ==="
|
||||
|
||||
# Prüfe ob Docker bereits installiert ist
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
info "Docker ist bereits installiert"
|
||||
docker --version
|
||||
else
|
||||
progress "Installiere Docker..."
|
||||
|
||||
# Docker GPG-Schlüssel hinzufügen
|
||||
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
||||
|
||||
# Docker Repository hinzufügen
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
|
||||
# Paketlisten aktualisieren und Docker installieren
|
||||
apt-get update -y
|
||||
apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin
|
||||
|
||||
# Docker-Service aktivieren
|
||||
systemctl enable docker
|
||||
systemctl start docker
|
||||
|
||||
log "✅ Docker erfolgreich installiert"
|
||||
fi
|
||||
|
||||
# Prüfe ob Docker Compose bereits installiert ist
|
||||
if command -v docker compose >/dev/null 2>&1; then
|
||||
info "Docker Compose ist bereits installiert"
|
||||
docker compose version
|
||||
else
|
||||
progress "Installiere Docker Compose..."
|
||||
apt-get install -y docker-compose-plugin || error "Docker Compose Installation fehlgeschlagen"
|
||||
log "✅ Docker Compose erfolgreich installiert"
|
||||
fi
|
||||
|
||||
# Docker-Service Status prüfen
|
||||
if systemctl is-active --quiet docker; then
|
||||
success "✅ Docker läuft erfolgreich"
|
||||
else
|
||||
error "❌ Docker konnte nicht gestartet werden"
|
||||
fi
|
||||
}
|
||||
|
||||
# =========================== SSL-ZERTIFIKAT GENERIERUNG ===========================
|
||||
generate_mercedes_ssl_certificate() {
|
||||
log "=== MERCEDES SSL-ZERTIFIKAT GENERIERUNG ==="
|
||||
|
||||
progress "Erstelle SSL-Verzeichnis..."
|
||||
mkdir -p "$SSL_DIR"
|
||||
|
||||
progress "Generiere Mercedes SSL-Zertifikat für $DOMAIN..."
|
||||
|
||||
# Erstelle OpenSSL-Konfigurationsdatei für Subject Alternative Names
|
||||
cat > "$SSL_DIR/openssl.conf" << EOF
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
req_extensions = v3_req
|
||||
prompt = no
|
||||
|
||||
[req_distinguished_name]
|
||||
C = DE
|
||||
ST = Baden-Wuerttemberg
|
||||
L = Stuttgart
|
||||
O = Mercedes-Benz AG
|
||||
OU = IT-Abteilung
|
||||
CN = $DOMAIN
|
||||
|
||||
[v3_req]
|
||||
keyUsage = keyEncipherment, dataEncipherment
|
||||
extendedKeyUsage = serverAuth
|
||||
subjectAltName = @alt_names
|
||||
|
||||
[alt_names]
|
||||
DNS.1 = $DOMAIN
|
||||
DNS.2 = m040tbaraspi001
|
||||
DNS.3 = localhost
|
||||
DNS.4 = raspberrypi
|
||||
IP.1 = 127.0.0.1
|
||||
IP.2 = 192.168.0.109
|
||||
EOF
|
||||
|
||||
# Generiere privaten Schlüssel
|
||||
progress "Generiere privaten Schlüssel..."
|
||||
openssl genrsa -out "$SSL_DIR/frontend.key" 4096 || error "Fehler beim Generieren des privaten Schlüssels"
|
||||
|
||||
# Generiere Zertifikat mit SAN (Subject Alternative Names)
|
||||
progress "Generiere SSL-Zertifikat..."
|
||||
openssl req -new -x509 -key "$SSL_DIR/frontend.key" -out "$SSL_DIR/frontend.crt" -days 365 \
|
||||
-config "$SSL_DIR/openssl.conf" \
|
||||
-extensions v3_req \
|
||||
|| error "Fehler beim Generieren des SSL-Zertifikats"
|
||||
|
||||
# Berechtigungen setzen
|
||||
chmod 600 "$SSL_DIR/frontend.key"
|
||||
chmod 644 "$SSL_DIR/frontend.crt"
|
||||
|
||||
# Zertifikat in System-CA-Store hinzufügen
|
||||
progress "Füge Zertifikat zum System-CA-Store hinzu..."
|
||||
cp "$SSL_DIR/frontend.crt" "/usr/local/share/ca-certificates/$DOMAIN.crt"
|
||||
update-ca-certificates
|
||||
|
||||
# Zertifikat-Informationen anzeigen
|
||||
info "Zertifikat-Details:"
|
||||
openssl x509 -in "$SSL_DIR/frontend.crt" -text -noout | grep -E "(Subject:|DNS:|IP Address:)" || true
|
||||
|
||||
log "✅ Mercedes SSL-Zertifikat erfolgreich generiert"
|
||||
}
|
||||
|
||||
# =========================== ANWENDUNG DEPLOYMENT ===========================
|
||||
deploy_frontend_application() {
|
||||
log "=== FRONTEND-ANWENDUNG DEPLOYMENT ==="
|
||||
|
||||
progress "Erstelle Frontend-Verzeichnis..."
|
||||
mkdir -p "$FRONTEND_DIR"
|
||||
|
||||
progress "Kopiere Frontend-Dateien..."
|
||||
rsync -av --exclude=node_modules --exclude=.git --exclude=ssl "$CURRENT_DIR/" "$FRONTEND_DIR/"
|
||||
|
||||
# Stelle sicher, dass die richtigen Berechtigungen gesetzt sind
|
||||
chown -R root:root "$FRONTEND_DIR"
|
||||
chmod -R 755 "$FRONTEND_DIR"
|
||||
|
||||
log "✅ Frontend-Anwendung erfolgreich deployed"
|
||||
}
|
||||
|
||||
# =========================== CADDY LOG-VERZEICHNIS ===========================
|
||||
create_caddy_logs() {
|
||||
log "=== CADDY LOG-VERZEICHNISSE ERSTELLEN ==="
|
||||
|
||||
progress "Erstelle Caddy Log-Verzeichnisse..."
|
||||
mkdir -p "$CADDY_LOG_DIR"
|
||||
chmod 755 "$CADDY_LOG_DIR"
|
||||
|
||||
# Erstelle leere Log-Dateien
|
||||
touch "$CADDY_LOG_DIR/access.log"
|
||||
touch "$CADDY_LOG_DIR/error.log"
|
||||
chmod 644 "$CADDY_LOG_DIR"/*.log
|
||||
|
||||
log "✅ Caddy Log-Verzeichnisse erstellt"
|
||||
}
|
||||
|
||||
# =========================== DOCKER COMPOSE KONFIGURATION ===========================
|
||||
create_docker_compose_config() {
|
||||
log "=== DOCKER COMPOSE KONFIGURATION ==="
|
||||
|
||||
progress "Erstelle Docker Compose Konfiguration..."
|
||||
|
||||
cat > "$FRONTEND_DIR/docker-compose.yml" << EOF
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# Frontend Next.js Application
|
||||
frontend-app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: myp-frontend-app
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- NEXT_TELEMETRY_DISABLED=1
|
||||
networks:
|
||||
- myp-network
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
# Caddy Reverse Proxy
|
||||
caddy:
|
||||
image: caddy:latest
|
||||
container_name: myp-caddy
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./docker/caddy/Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- $SSL_DIR:/etc/ssl/certs/myp:ro
|
||||
- $CADDY_LOG_DIR:/var/log/caddy
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
networks:
|
||||
- myp-network
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "caddy", "version"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
depends_on:
|
||||
- frontend-app
|
||||
|
||||
# Database (SQLite with volume for persistence)
|
||||
db:
|
||||
image: alpine:latest
|
||||
container_name: myp-db
|
||||
volumes:
|
||||
- db_data:/data
|
||||
command: ["sh", "-c", "mkdir -p /data && tail -f /dev/null"]
|
||||
networks:
|
||||
- myp-network
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
caddy_data:
|
||||
driver: local
|
||||
caddy_config:
|
||||
driver: local
|
||||
db_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
myp-network:
|
||||
driver: bridge
|
||||
EOF
|
||||
|
||||
log "✅ Docker Compose Konfiguration erstellt"
|
||||
}
|
||||
|
||||
# =========================== SYSTEMD SERVICE ===========================
|
||||
create_systemd_service() {
|
||||
log "=== SYSTEMD SERVICE KONFIGURATION ==="
|
||||
|
||||
progress "Erstelle systemd Service für Frontend..."
|
||||
|
||||
cat > "/etc/systemd/system/$DOCKER_COMPOSE_SERVICE.service" << EOF
|
||||
[Unit]
|
||||
Description=MYP Frontend Docker Compose Service
|
||||
Requires=docker.service
|
||||
After=docker.service
|
||||
StartLimitIntervalSec=0
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
RemainAfterExit=yes
|
||||
WorkingDirectory=$FRONTEND_DIR
|
||||
ExecStart=/usr/bin/docker compose up -d
|
||||
ExecStop=/usr/bin/docker compose down
|
||||
ExecReload=/usr/bin/docker compose restart
|
||||
TimeoutStartSec=300
|
||||
TimeoutStopSec=120
|
||||
Restart=on-failure
|
||||
RestartSec=30
|
||||
User=root
|
||||
Group=root
|
||||
|
||||
# Environment
|
||||
Environment=COMPOSE_PROJECT_NAME=myp-frontend
|
||||
|
||||
# Sicherheitseinstellungen
|
||||
NoNewPrivileges=yes
|
||||
PrivateTmp=yes
|
||||
ProtectHome=yes
|
||||
ProtectSystem=strict
|
||||
ReadWritePaths=$FRONTEND_DIR $SSL_DIR $CADDY_LOG_DIR
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Systemd-Konfiguration neu laden
|
||||
systemctl daemon-reload
|
||||
|
||||
log "✅ Systemd Service erstellt"
|
||||
}
|
||||
|
||||
# =========================== SERVICES STARTEN ===========================
|
||||
start_frontend_services() {
|
||||
log "=== FRONTEND-SERVICES STARTEN ==="
|
||||
|
||||
progress "Wechsle in Frontend-Verzeichnis..."
|
||||
cd "$FRONTEND_DIR"
|
||||
|
||||
# Docker Images erstellen
|
||||
progress "Erstelle Docker Images..."
|
||||
docker compose build || error "Docker Build fehlgeschlagen"
|
||||
|
||||
# Services aktivieren und starten
|
||||
progress "Aktiviere und starte Frontend-Service..."
|
||||
systemctl enable "$DOCKER_COMPOSE_SERVICE" || error "Fehler beim Aktivieren des Frontend-Service"
|
||||
systemctl start "$DOCKER_COMPOSE_SERVICE" || error "Fehler beim Starten des Frontend-Service"
|
||||
|
||||
# Warte auf Service-Start
|
||||
progress "Warte auf Service-Start..."
|
||||
sleep 30
|
||||
|
||||
# Service-Status prüfen
|
||||
if systemctl is-active --quiet "$DOCKER_COMPOSE_SERVICE"; then
|
||||
success "✅ Frontend-Service läuft erfolgreich"
|
||||
else
|
||||
error "❌ Frontend-Service konnte nicht gestartet werden"
|
||||
fi
|
||||
|
||||
# Docker Container-Status prüfen
|
||||
progress "Prüfe Container-Status..."
|
||||
docker compose ps
|
||||
|
||||
cd "$CURRENT_DIR"
|
||||
log "✅ Frontend-Services erfolgreich gestartet"
|
||||
}
|
||||
|
||||
# =========================== SYSTEM-TEST ===========================
|
||||
test_frontend_application() {
|
||||
log "=== FRONTEND-SYSTEM-TEST ==="
|
||||
|
||||
progress "Teste HTTPS-Verbindung zu $DOMAIN..."
|
||||
|
||||
# Warte auf Service-Start
|
||||
local max_attempts=60
|
||||
local attempt=1
|
||||
|
||||
while [ $attempt -le $max_attempts ]; do
|
||||
if curl -k -s --connect-timeout 5 "https://$DOMAIN/health" >/dev/null 2>&1; then
|
||||
success "✅ Frontend erreichbar unter https://$DOMAIN"
|
||||
break
|
||||
fi
|
||||
|
||||
# Teste auch localhost
|
||||
if curl -k -s --connect-timeout 5 "https://localhost/health" >/dev/null 2>&1; then
|
||||
success "✅ Frontend erreichbar unter https://localhost"
|
||||
break
|
||||
fi
|
||||
|
||||
progress "Warte auf Frontend... ($attempt/$max_attempts)"
|
||||
sleep 5
|
||||
((attempt++))
|
||||
done
|
||||
|
||||
if [ $attempt -gt $max_attempts ]; then
|
||||
error "❌ Frontend nicht erreichbar nach $max_attempts Versuchen"
|
||||
fi
|
||||
|
||||
# Teste SSL-Zertifikat
|
||||
progress "Teste SSL-Zertifikat..."
|
||||
if openssl s_client -connect localhost:443 -servername "$DOMAIN" </dev/null 2>/dev/null | openssl x509 -noout -text >/dev/null 2>&1; then
|
||||
success "✅ SSL-Zertifikat gültig"
|
||||
else
|
||||
warning "⚠️ SSL-Zertifikat-Test fehlgeschlagen"
|
||||
fi
|
||||
|
||||
# Teste Container-Gesundheit
|
||||
progress "Teste Container-Gesundheit..."
|
||||
cd "$FRONTEND_DIR"
|
||||
|
||||
local unhealthy_containers=$(docker compose ps --format json | jq -r '.[] | select(.Health == "unhealthy") | .Name' 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$unhealthy_containers" ]; then
|
||||
warning "⚠️ Ungesunde Container gefunden: $unhealthy_containers"
|
||||
else
|
||||
success "✅ Alle Container sind gesund"
|
||||
fi
|
||||
|
||||
cd "$CURRENT_DIR"
|
||||
log "✅ System-Test abgeschlossen"
|
||||
}
|
||||
|
||||
# =========================== FIREWALL-KONFIGURATION ===========================
|
||||
configure_frontend_firewall() {
|
||||
log "=== FIREWALL-KONFIGURATION ==="
|
||||
|
||||
progress "Konfiguriere UFW-Firewall für Frontend..."
|
||||
|
||||
# UFW installieren falls nicht vorhanden
|
||||
if ! command -v ufw >/dev/null 2>&1; then
|
||||
apt-get install -y ufw || error "UFW Installation fehlgeschlagen"
|
||||
fi
|
||||
|
||||
# UFW zurücksetzen
|
||||
ufw --force reset
|
||||
|
||||
# Standard-Richtlinien
|
||||
ufw default deny incoming
|
||||
ufw default allow outgoing
|
||||
|
||||
# SSH erlauben (wichtig für Remote-Zugang)
|
||||
ufw allow ssh
|
||||
|
||||
# HTTP und HTTPS erlauben
|
||||
ufw allow 80/tcp
|
||||
ufw allow 443/tcp
|
||||
|
||||
# Lokale Verbindungen erlauben
|
||||
ufw allow from 127.0.0.1
|
||||
ufw allow from ::1
|
||||
|
||||
# Internes Netzwerk erlauben (Mercedes-Netzwerk)
|
||||
ufw allow from 192.168.0.0/16
|
||||
ufw allow from 10.0.0.0/8
|
||||
ufw allow from 172.16.0.0/12
|
||||
|
||||
# UFW aktivieren
|
||||
ufw --force enable
|
||||
|
||||
# Firewall-Status anzeigen
|
||||
ufw status verbose
|
||||
|
||||
log "✅ Firewall für Frontend konfiguriert"
|
||||
}
|
||||
|
||||
# =========================== HAUPTMENÜ ===========================
|
||||
show_menu() {
|
||||
clear
|
||||
echo -e "${CYAN}=================================================================${NC}"
|
||||
echo -e "${CYAN} $APP_NAME - Setup-Skript v$APP_VERSION${NC}"
|
||||
echo -e "${CYAN}=================================================================${NC}"
|
||||
echo ""
|
||||
echo -e "${YELLOW}Bitte wählen Sie eine Option:${NC}"
|
||||
echo ""
|
||||
echo -e "${GREEN}1)${NC} Vollständige Frontend-Installation"
|
||||
echo -e " ${BLUE}→ Docker, SSL-Zertifikate, Caddy Reverse Proxy${NC}"
|
||||
echo -e " ${BLUE}→ Frontend verfügbar unter https://$DOMAIN${NC}"
|
||||
echo -e " ${BLUE}→ Automatischer Start beim Boot${NC}"
|
||||
echo ""
|
||||
echo -e "${GREEN}2)${NC} Nur SSL-Zertifikate neu generieren"
|
||||
echo -e " ${BLUE}→ Erstellt neue Mercedes SSL-Zertifikate${NC}"
|
||||
echo -e " ${BLUE}→ Startet Services neu${NC}"
|
||||
echo ""
|
||||
echo -e "${GREEN}3)${NC} Service-Status prüfen"
|
||||
echo -e " ${BLUE}→ Zeigt Status aller Frontend-Services${NC}"
|
||||
echo -e " ${BLUE}→ Container-Logs und Gesundheitsprüfung${NC}"
|
||||
echo ""
|
||||
echo -e "${GREEN}4)${NC} Beenden"
|
||||
echo ""
|
||||
echo -e "${CYAN}=================================================================${NC}"
|
||||
echo -n "Ihre Wahl [1-4]: "
|
||||
}
|
||||
|
||||
# =========================== INSTALLATIONS-MODI ===========================
|
||||
install_full_frontend() {
|
||||
log "=== VOLLSTÄNDIGE FRONTEND-INSTALLATION ==="
|
||||
|
||||
check_root
|
||||
check_debian_system
|
||||
check_internet_connection
|
||||
|
||||
update_system
|
||||
install_docker
|
||||
generate_mercedes_ssl_certificate
|
||||
deploy_frontend_application
|
||||
create_caddy_logs
|
||||
create_docker_compose_config
|
||||
create_systemd_service
|
||||
configure_frontend_firewall
|
||||
start_frontend_services
|
||||
test_frontend_application
|
||||
|
||||
success "✅ Vollständige Frontend-Installation abgeschlossen!"
|
||||
info "Frontend ist verfügbar unter:"
|
||||
info " 🌐 https://$DOMAIN"
|
||||
info " 🌐 https://localhost"
|
||||
info " 🔒 SSL-Zertifikate: $SSL_DIR"
|
||||
info " 📁 Anwendung: $FRONTEND_DIR"
|
||||
info " 📋 Logs: $CADDY_LOG_DIR"
|
||||
info ""
|
||||
info "Service-Befehle:"
|
||||
info " systemctl status $DOCKER_COMPOSE_SERVICE"
|
||||
info " systemctl restart $DOCKER_COMPOSE_SERVICE"
|
||||
info " docker compose logs -f (in $FRONTEND_DIR)"
|
||||
}
|
||||
|
||||
regenerate_ssl_certificates() {
|
||||
log "=== SSL-ZERTIFIKATE NEU GENERIEREN ==="
|
||||
|
||||
check_root
|
||||
|
||||
# Stoppe Services
|
||||
progress "Stoppe Frontend-Services..."
|
||||
systemctl stop "$DOCKER_COMPOSE_SERVICE" 2>/dev/null || true
|
||||
|
||||
# Neue Zertifikate generieren
|
||||
generate_mercedes_ssl_certificate
|
||||
|
||||
# Services neu starten
|
||||
progress "Starte Frontend-Services neu..."
|
||||
systemctl start "$DOCKER_COMPOSE_SERVICE" || error "Fehler beim Neustarten der Services"
|
||||
|
||||
# Test
|
||||
test_frontend_application
|
||||
|
||||
success "✅ SSL-Zertifikate erfolgreich erneuert!"
|
||||
}
|
||||
|
||||
check_service_status() {
|
||||
log "=== SERVICE-STATUS PRÜFUNG ==="
|
||||
|
||||
info "Systemd Service Status:"
|
||||
systemctl status "$DOCKER_COMPOSE_SERVICE" --no-pager || true
|
||||
|
||||
echo ""
|
||||
info "Docker Container Status:"
|
||||
if [ -d "$FRONTEND_DIR" ]; then
|
||||
cd "$FRONTEND_DIR"
|
||||
docker compose ps 2>/dev/null || true
|
||||
|
||||
echo ""
|
||||
info "Container-Logs (letzte 20 Zeilen):"
|
||||
docker compose logs --tail=20 2>/dev/null || true
|
||||
|
||||
cd "$CURRENT_DIR"
|
||||
else
|
||||
warning "Frontend-Verzeichnis nicht gefunden: $FRONTEND_DIR"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
info "Netzwerk-Tests:"
|
||||
curl -k -s -I "https://$DOMAIN/health" 2>/dev/null | head -1 || echo "❌ $DOMAIN nicht erreichbar"
|
||||
curl -k -s -I "https://localhost/health" 2>/dev/null | head -1 || echo "❌ localhost nicht erreichbar"
|
||||
|
||||
echo ""
|
||||
info "SSL-Zertifikat Info:"
|
||||
if [ -f "$SSL_DIR/frontend.crt" ]; then
|
||||
openssl x509 -in "$SSL_DIR/frontend.crt" -noout -dates 2>/dev/null || echo "❌ Zertifikat-Lesefehler"
|
||||
else
|
||||
echo "❌ Zertifikat nicht gefunden: $SSL_DIR/frontend.crt"
|
||||
fi
|
||||
}
|
||||
|
||||
# =========================== HAUPTPROGRAMM ===========================
|
||||
main() {
|
||||
# Erstelle Log-Datei
|
||||
mkdir -p "$(dirname "$INSTALL_LOG")"
|
||||
touch "$INSTALL_LOG"
|
||||
|
||||
# Zeige Menü
|
||||
while true; do
|
||||
show_menu
|
||||
read -r choice
|
||||
|
||||
case $choice in
|
||||
1)
|
||||
install_full_frontend
|
||||
break
|
||||
;;
|
||||
2)
|
||||
regenerate_ssl_certificates
|
||||
break
|
||||
;;
|
||||
3)
|
||||
check_service_status
|
||||
echo ""
|
||||
echo -e "${YELLOW}Drücken Sie Enter um fortzufahren...${NC}"
|
||||
read -r
|
||||
;;
|
||||
4)
|
||||
log "Setup beendet"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Ungültige Auswahl. Bitte wählen Sie 1-4.${NC}"
|
||||
echo ""
|
||||
echo -e "${YELLOW}Drücken Sie Enter um fortzufahren...${NC}"
|
||||
read -r
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# =========================== SCRIPT AUSFÜHRUNG ===========================
|
||||
main "$@"
|
Loading…
x
Reference in New Issue
Block a user