"feat: Integrate new charting library in frontend"
This commit is contained in:
@@ -822,7 +822,7 @@ def user_update_profile():
|
||||
@login_required
|
||||
def user_api_update_settings():
|
||||
"""API-Endpunkt für Einstellungen-Updates (JSON)"""
|
||||
return user_update_settings()
|
||||
return user_update_profile()
|
||||
|
||||
@app.route("/user/update-settings", methods=["POST"])
|
||||
@login_required
|
||||
@@ -2743,6 +2743,223 @@ def get_stats():
|
||||
app_logger.error(f"Fehler beim Abrufen der Statistiken: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/job-status", methods=["GET"])
|
||||
@login_required
|
||||
def get_job_status_chart_data():
|
||||
"""Gibt Diagrammdaten für Job-Status-Verteilung zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Job-Status zählen
|
||||
job_status_counts = {
|
||||
'completed': db_session.query(Job).filter(Job.status == 'completed').count(),
|
||||
'failed': db_session.query(Job).filter(Job.status == 'failed').count(),
|
||||
'cancelled': db_session.query(Job).filter(Job.status == 'cancelled').count(),
|
||||
'running': db_session.query(Job).filter(Job.status == 'running').count(),
|
||||
'scheduled': db_session.query(Job).filter(Job.status == 'scheduled').count()
|
||||
}
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': ['Abgeschlossen', 'Fehlgeschlagen', 'Abgebrochen', 'Läuft', 'Geplant'],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [
|
||||
job_status_counts['completed'],
|
||||
job_status_counts['failed'],
|
||||
job_status_counts['cancelled'],
|
||||
job_status_counts['running'],
|
||||
job_status_counts['scheduled']
|
||||
],
|
||||
'backgroundColor': [
|
||||
'#10b981', # Grün für abgeschlossen
|
||||
'#ef4444', # Rot für fehlgeschlagen
|
||||
'#6b7280', # Grau für abgebrochen
|
||||
'#3b82f6', # Blau für läuft
|
||||
'#f59e0b' # Orange für geplant
|
||||
]
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Job-Status-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/printer-usage", methods=["GET"])
|
||||
@login_required
|
||||
def get_printer_usage_chart_data():
|
||||
"""Gibt Diagrammdaten für Drucker-Nutzung zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Drucker mit Job-Anzahl
|
||||
printer_usage = db_session.query(
|
||||
Printer.name,
|
||||
func.count(Job.id).label('job_count')
|
||||
).outerjoin(Job).group_by(Printer.id, Printer.name).all()
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': [usage[0] for usage in printer_usage],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [usage[1] for usage in printer_usage],
|
||||
'backgroundColor': '#3b82f6',
|
||||
'borderColor': '#1d4ed8',
|
||||
'borderWidth': 1
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Drucker-Nutzung-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/jobs-timeline", methods=["GET"])
|
||||
@login_required
|
||||
def get_jobs_timeline_chart_data():
|
||||
"""Gibt Diagrammdaten für Jobs-Timeline der letzten 30 Tage zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Letzte 30 Tage
|
||||
end_date = datetime.now().date()
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Jobs pro Tag der letzten 30 Tage
|
||||
daily_jobs = db_session.query(
|
||||
func.date(Job.created_at).label('date'),
|
||||
func.count(Job.id).label('count')
|
||||
).filter(
|
||||
func.date(Job.created_at) >= start_date,
|
||||
func.date(Job.created_at) <= end_date
|
||||
).group_by(func.date(Job.created_at)).all()
|
||||
|
||||
# Alle Tage füllen (auch ohne Jobs)
|
||||
date_dict = {job_date: count for job_date, count in daily_jobs}
|
||||
|
||||
labels = []
|
||||
data = []
|
||||
current_date = start_date
|
||||
|
||||
while current_date <= end_date:
|
||||
labels.append(current_date.strftime('%d.%m'))
|
||||
data.append(date_dict.get(current_date, 0))
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': labels,
|
||||
'datasets': [{
|
||||
'label': 'Jobs pro Tag',
|
||||
'data': data,
|
||||
'fill': True,
|
||||
'backgroundColor': 'rgba(59, 130, 246, 0.1)',
|
||||
'borderColor': '#3b82f6',
|
||||
'tension': 0.4
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Jobs-Timeline-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/user-activity", methods=["GET"])
|
||||
@login_required
|
||||
def get_user_activity_chart_data():
|
||||
"""Gibt Diagrammdaten für Top-Benutzer-Aktivität zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Top 10 Benutzer nach Job-Anzahl
|
||||
top_users = db_session.query(
|
||||
User.username,
|
||||
func.count(Job.id).label('job_count')
|
||||
).join(Job).group_by(
|
||||
User.id, User.username
|
||||
).order_by(
|
||||
func.count(Job.id).desc()
|
||||
).limit(10).all()
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': [user[0] for user in top_users],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [user[1] for user in top_users],
|
||||
'backgroundColor': '#8b5cf6',
|
||||
'borderColor': '#7c3aed',
|
||||
'borderWidth': 1
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Benutzer-Aktivität-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/export", methods=["GET"])
|
||||
@login_required
|
||||
def export_stats():
|
||||
"""Exportiert Statistiken als CSV."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Basis-Statistiken sammeln
|
||||
total_users = db_session.query(User).count()
|
||||
total_printers = db_session.query(Printer).count()
|
||||
total_jobs = db_session.query(Job).count()
|
||||
completed_jobs = db_session.query(Job).filter(Job.status == "completed").count()
|
||||
failed_jobs = db_session.query(Job).filter(Job.status == "failed").count()
|
||||
|
||||
# CSV-Inhalt erstellen
|
||||
import io
|
||||
import csv
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Header
|
||||
writer.writerow(['Metrik', 'Wert'])
|
||||
|
||||
# Daten
|
||||
writer.writerow(['Gesamte Benutzer', total_users])
|
||||
writer.writerow(['Gesamte Drucker', total_printers])
|
||||
writer.writerow(['Gesamte Jobs', total_jobs])
|
||||
writer.writerow(['Abgeschlossene Jobs', completed_jobs])
|
||||
writer.writerow(['Fehlgeschlagene Jobs', failed_jobs])
|
||||
writer.writerow(['Erfolgsrate (%)', round((completed_jobs / total_jobs * 100), 2) if total_jobs > 0 else 0])
|
||||
writer.writerow(['Exportiert am', datetime.now().strftime('%d.%m.%Y %H:%M:%S')])
|
||||
|
||||
db_session.close()
|
||||
|
||||
# Response vorbereiten
|
||||
output.seek(0)
|
||||
|
||||
response = Response(
|
||||
output.getvalue(),
|
||||
mimetype='text/csv',
|
||||
headers={
|
||||
'Content-Disposition': f'attachment; filename=statistiken_{datetime.now().strftime("%Y%m%d_%H%M%S")}.csv'
|
||||
}
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Exportieren der Statistiken: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/admin/users", methods=["GET"])
|
||||
@login_required
|
||||
def get_users():
|
||||
@@ -3895,137 +4112,57 @@ def admin_update_user_form(user_id):
|
||||
def admin_update_printer_form(printer_id):
|
||||
"""Aktualisiert einen Drucker über HTML-Form (nur für Admins)."""
|
||||
if not current_user.is_admin:
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import atexit
|
||||
from datetime import datetime, timedelta
|
||||
from flask import Flask, render_template, request, jsonify, redirect, url_for, flash, send_file, abort, session, make_response
|
||||
from flask_login import LoginManager, login_user, logout_user, login_required, current_user
|
||||
from flask_wtf import CSRFProtect
|
||||
from flask_wtf.csrf import CSRFError
|
||||
from werkzeug.utils import secure_filename
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
from sqlalchemy.orm import sessionmaker, joinedload
|
||||
from sqlalchemy import func, text
|
||||
from functools import wraps
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from typing import List, Dict, Tuple
|
||||
import time
|
||||
import subprocess
|
||||
import json
|
||||
import signal
|
||||
from contextlib import contextmanager
|
||||
|
||||
# Windows-spezifische Fixes früh importieren (sichere Version)
|
||||
if os.name == 'nt':
|
||||
try:
|
||||
from utils.windows_fixes import get_windows_thread_manager
|
||||
# apply_all_windows_fixes() wird automatisch beim Import ausgeführt
|
||||
print("✅ Windows-Fixes (sichere Version) geladen")
|
||||
except ImportError as e:
|
||||
# Fallback falls windows_fixes nicht verfügbar
|
||||
get_windows_thread_manager = None
|
||||
print(f"⚠️ Windows-Fixes nicht verfügbar: {str(e)}")
|
||||
else:
|
||||
get_windows_thread_manager = None
|
||||
|
||||
# Lokale Imports
|
||||
from models import init_database, create_initial_admin, User, Printer, Job, Stats, SystemLog, get_db_session, GuestRequest, UserPermission, Notification
|
||||
from utils.logging_config import setup_logging, get_logger, measure_execution_time, log_startup_info, debug_request, debug_response
|
||||
from utils.job_scheduler import JobScheduler, get_job_scheduler
|
||||
from utils.queue_manager import start_queue_manager, stop_queue_manager, get_queue_manager
|
||||
from config.settings import SECRET_KEY, UPLOAD_FOLDER, ALLOWED_EXTENSIONS, ENVIRONMENT, SESSION_LIFETIME, SCHEDULER_ENABLED, SCHEDULER_INTERVAL, TAPO_USERNAME, TAPO_PASSWORD
|
||||
from utils.file_manager import file_manager, save_job_file, save_guest_file, save_avatar_file, delete_file as delete_file_safe
|
||||
|
||||
# Blueprints importieren
|
||||
from blueprints.guest import guest_blueprint
|
||||
from blueprints.calendar import calendar_blueprint
|
||||
from blueprints.users import users_blueprint
|
||||
|
||||
# Scheduler importieren falls verfügbar
|
||||
try:
|
||||
from utils.job_scheduler import scheduler
|
||||
except ImportError:
|
||||
scheduler = None
|
||||
|
||||
# SSL-Kontext importieren falls verfügbar
|
||||
try:
|
||||
from utils.ssl_config import get_ssl_context
|
||||
except ImportError:
|
||||
def get_ssl_context():
|
||||
return None
|
||||
|
||||
# Template-Helfer importieren falls verfügbar
|
||||
try:
|
||||
from utils.template_helpers import register_template_helpers
|
||||
except ImportError:
|
||||
def register_template_helpers(app):
|
||||
pass
|
||||
|
||||
# Datenbank-Monitor und Backup-Manager importieren falls verfügbar
|
||||
try:
|
||||
from utils.database_monitor import DatabaseMonitor
|
||||
database_monitor = DatabaseMonitor()
|
||||
except ImportError:
|
||||
database_monitor = None
|
||||
|
||||
try:
|
||||
from utils.backup_manager import BackupManager
|
||||
backup_manager = BackupManager()
|
||||
except ImportError:
|
||||
backup_manager = None
|
||||
|
||||
# Import neuer Systeme
|
||||
from utils.rate_limiter import limit_requests, rate_limiter, cleanup_rate_limiter
|
||||
from utils.security import init_security, require_secure_headers, security_check
|
||||
from utils.permissions import init_permission_helpers, require_permission, Permission, check_permission
|
||||
from utils.analytics import analytics_engine, track_event, get_dashboard_stats
|
||||
|
||||
# Drucker-Monitor importieren
|
||||
from utils.printer_monitor import printer_monitor
|
||||
|
||||
# Flask-App initialisieren
|
||||
app = Flask(__name__)
|
||||
app.secret_key = SECRET_KEY
|
||||
app.config["PERMANENT_SESSION_LIFETIME"] = SESSION_LIFETIME
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
app.config["WTF_CSRF_ENABLED"] = True
|
||||
|
||||
# CSRF-Schutz initialisieren
|
||||
csrf = CSRFProtect(app)
|
||||
|
||||
# Security-System initialisieren
|
||||
app = init_security(app)
|
||||
|
||||
# Permission Template Helpers registrieren
|
||||
init_permission_helpers(app)
|
||||
|
||||
# Template-Helper registrieren
|
||||
register_template_helpers(app)
|
||||
|
||||
# CSRF-Error-Handler - Korrigierte Version für Flask-WTF 1.2.1+
|
||||
@app.errorhandler(CSRFError)
|
||||
def csrf_error(error):
|
||||
"""Behandelt CSRF-Fehler und gibt detaillierte Informationen zurück."""
|
||||
app_logger.error(f"CSRF-Fehler für {request.path}: {error}")
|
||||
flash("Sie haben keine Berechtigung für den Admin-Bereich.", "error")
|
||||
return redirect(url_for("index"))
|
||||
|
||||
if request.path.startswith('/api/'):
|
||||
# Für API-Anfragen: JSON-Response
|
||||
return jsonify({
|
||||
"error": "CSRF-Token fehlt oder ungültig",
|
||||
"reason": str(error),
|
||||
"help": "Fügen Sie ein gültiges CSRF-Token zu Ihrer Anfrage hinzu"
|
||||
}), 400
|
||||
else:
|
||||
# Für normale Anfragen: Weiterleitung zur Fehlerseite
|
||||
flash("Sicherheitsfehler: Anfrage wurde abgelehnt. Bitte versuchen Sie es erneut.", "error")
|
||||
return redirect(request.url)
|
||||
|
||||
# Blueprints registrieren
|
||||
app.register_blueprint(guest_blueprint)
|
||||
app.register_blueprint(calendar_blueprint)
|
||||
app.register_blueprint(users_blueprint)
|
||||
try:
|
||||
# Form-Daten lesen
|
||||
name = request.form.get("name", "").strip()
|
||||
ip_address = request.form.get("ip_address", "").strip()
|
||||
model = request.form.get("model", "").strip()
|
||||
location = request.form.get("location", "").strip()
|
||||
description = request.form.get("description", "").strip()
|
||||
status = request.form.get("status", "available").strip()
|
||||
|
||||
# Pflichtfelder prüfen
|
||||
if not name or not ip_address:
|
||||
flash("Name und IP-Adresse sind erforderlich.", "error")
|
||||
return redirect(url_for("admin_edit_printer_page", printer_id=printer_id))
|
||||
|
||||
# IP-Adresse validieren
|
||||
import re
|
||||
ip_pattern = r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'
|
||||
if not re.match(ip_pattern, ip_address):
|
||||
flash("Ungültige IP-Adresse.", "error")
|
||||
return redirect(url_for("admin_edit_printer_page", printer_id=printer_id))
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
printer = db_session.query(Printer).get(printer_id)
|
||||
if not printer:
|
||||
db_session.close()
|
||||
flash("Drucker nicht gefunden.", "error")
|
||||
return redirect(url_for("admin_page", tab="printers"))
|
||||
|
||||
# Drucker aktualisieren
|
||||
printer.name = name
|
||||
printer.model = model
|
||||
printer.location = location
|
||||
printer.description = description
|
||||
printer.plug_ip = ip_address
|
||||
printer.status = status
|
||||
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
printers_logger.info(f"Drucker '{printer.name}' (ID: {printer_id}) aktualisiert von Admin {current_user.id}")
|
||||
flash(f"Drucker '{printer.name}' erfolgreich aktualisiert.", "success")
|
||||
return redirect(url_for("admin_page", tab="printers"))
|
||||
|
||||
except Exception as e:
|
||||
printers_logger.error(f"Fehler beim Aktualisieren eines Druckers über Form: {str(e)}")
|
||||
flash("Fehler beim Aktualisieren des Druckers.", "error")
|
||||
return redirect(url_for("admin_edit_printer_page", printer_id=printer_id))
|
||||
|
||||
# Login-Manager initialisieren
|
||||
login_manager = LoginManager()
|
||||
@@ -4719,7 +4856,7 @@ def user_update_profile():
|
||||
@login_required
|
||||
def user_api_update_settings():
|
||||
"""API-Endpunkt für Einstellungen-Updates (JSON)"""
|
||||
return user_update_settings()
|
||||
return user_update_profile()
|
||||
|
||||
@app.route("/user/update-settings", methods=["POST"])
|
||||
@login_required
|
||||
@@ -6640,6 +6777,223 @@ def get_stats():
|
||||
app_logger.error(f"Fehler beim Abrufen der Statistiken: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/job-status", methods=["GET"])
|
||||
@login_required
|
||||
def get_job_status_chart_data():
|
||||
"""Gibt Diagrammdaten für Job-Status-Verteilung zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Job-Status zählen
|
||||
job_status_counts = {
|
||||
'completed': db_session.query(Job).filter(Job.status == 'completed').count(),
|
||||
'failed': db_session.query(Job).filter(Job.status == 'failed').count(),
|
||||
'cancelled': db_session.query(Job).filter(Job.status == 'cancelled').count(),
|
||||
'running': db_session.query(Job).filter(Job.status == 'running').count(),
|
||||
'scheduled': db_session.query(Job).filter(Job.status == 'scheduled').count()
|
||||
}
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': ['Abgeschlossen', 'Fehlgeschlagen', 'Abgebrochen', 'Läuft', 'Geplant'],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [
|
||||
job_status_counts['completed'],
|
||||
job_status_counts['failed'],
|
||||
job_status_counts['cancelled'],
|
||||
job_status_counts['running'],
|
||||
job_status_counts['scheduled']
|
||||
],
|
||||
'backgroundColor': [
|
||||
'#10b981', # Grün für abgeschlossen
|
||||
'#ef4444', # Rot für fehlgeschlagen
|
||||
'#6b7280', # Grau für abgebrochen
|
||||
'#3b82f6', # Blau für läuft
|
||||
'#f59e0b' # Orange für geplant
|
||||
]
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Job-Status-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/printer-usage", methods=["GET"])
|
||||
@login_required
|
||||
def get_printer_usage_chart_data():
|
||||
"""Gibt Diagrammdaten für Drucker-Nutzung zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Drucker mit Job-Anzahl
|
||||
printer_usage = db_session.query(
|
||||
Printer.name,
|
||||
func.count(Job.id).label('job_count')
|
||||
).outerjoin(Job).group_by(Printer.id, Printer.name).all()
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': [usage[0] for usage in printer_usage],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [usage[1] for usage in printer_usage],
|
||||
'backgroundColor': '#3b82f6',
|
||||
'borderColor': '#1d4ed8',
|
||||
'borderWidth': 1
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Drucker-Nutzung-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/jobs-timeline", methods=["GET"])
|
||||
@login_required
|
||||
def get_jobs_timeline_chart_data():
|
||||
"""Gibt Diagrammdaten für Jobs-Timeline der letzten 30 Tage zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Letzte 30 Tage
|
||||
end_date = datetime.now().date()
|
||||
start_date = end_date - timedelta(days=30)
|
||||
|
||||
# Jobs pro Tag der letzten 30 Tage
|
||||
daily_jobs = db_session.query(
|
||||
func.date(Job.created_at).label('date'),
|
||||
func.count(Job.id).label('count')
|
||||
).filter(
|
||||
func.date(Job.created_at) >= start_date,
|
||||
func.date(Job.created_at) <= end_date
|
||||
).group_by(func.date(Job.created_at)).all()
|
||||
|
||||
# Alle Tage füllen (auch ohne Jobs)
|
||||
date_dict = {job_date: count for job_date, count in daily_jobs}
|
||||
|
||||
labels = []
|
||||
data = []
|
||||
current_date = start_date
|
||||
|
||||
while current_date <= end_date:
|
||||
labels.append(current_date.strftime('%d.%m'))
|
||||
data.append(date_dict.get(current_date, 0))
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': labels,
|
||||
'datasets': [{
|
||||
'label': 'Jobs pro Tag',
|
||||
'data': data,
|
||||
'fill': True,
|
||||
'backgroundColor': 'rgba(59, 130, 246, 0.1)',
|
||||
'borderColor': '#3b82f6',
|
||||
'tension': 0.4
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Jobs-Timeline-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/charts/user-activity", methods=["GET"])
|
||||
@login_required
|
||||
def get_user_activity_chart_data():
|
||||
"""Gibt Diagrammdaten für Top-Benutzer-Aktivität zurück."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Top 10 Benutzer nach Job-Anzahl
|
||||
top_users = db_session.query(
|
||||
User.username,
|
||||
func.count(Job.id).label('job_count')
|
||||
).join(Job).group_by(
|
||||
User.id, User.username
|
||||
).order_by(
|
||||
func.count(Job.id).desc()
|
||||
).limit(10).all()
|
||||
|
||||
db_session.close()
|
||||
|
||||
chart_data = {
|
||||
'labels': [user[0] for user in top_users],
|
||||
'datasets': [{
|
||||
'label': 'Anzahl Jobs',
|
||||
'data': [user[1] for user in top_users],
|
||||
'backgroundColor': '#8b5cf6',
|
||||
'borderColor': '#7c3aed',
|
||||
'borderWidth': 1
|
||||
}]
|
||||
}
|
||||
|
||||
return jsonify(chart_data)
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Abrufen der Benutzer-Aktivität-Diagrammdaten: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/stats/export", methods=["GET"])
|
||||
@login_required
|
||||
def export_stats():
|
||||
"""Exportiert Statistiken als CSV."""
|
||||
try:
|
||||
db_session = get_db_session()
|
||||
|
||||
# Basis-Statistiken sammeln
|
||||
total_users = db_session.query(User).count()
|
||||
total_printers = db_session.query(Printer).count()
|
||||
total_jobs = db_session.query(Job).count()
|
||||
completed_jobs = db_session.query(Job).filter(Job.status == "completed").count()
|
||||
failed_jobs = db_session.query(Job).filter(Job.status == "failed").count()
|
||||
|
||||
# CSV-Inhalt erstellen
|
||||
import io
|
||||
import csv
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Header
|
||||
writer.writerow(['Metrik', 'Wert'])
|
||||
|
||||
# Daten
|
||||
writer.writerow(['Gesamte Benutzer', total_users])
|
||||
writer.writerow(['Gesamte Drucker', total_printers])
|
||||
writer.writerow(['Gesamte Jobs', total_jobs])
|
||||
writer.writerow(['Abgeschlossene Jobs', completed_jobs])
|
||||
writer.writerow(['Fehlgeschlagene Jobs', failed_jobs])
|
||||
writer.writerow(['Erfolgsrate (%)', round((completed_jobs / total_jobs * 100), 2) if total_jobs > 0 else 0])
|
||||
writer.writerow(['Exportiert am', datetime.now().strftime('%d.%m.%Y %H:%M:%S')])
|
||||
|
||||
db_session.close()
|
||||
|
||||
# Response vorbereiten
|
||||
output.seek(0)
|
||||
|
||||
response = Response(
|
||||
output.getvalue(),
|
||||
mimetype='text/csv',
|
||||
headers={
|
||||
'Content-Disposition': f'attachment; filename=statistiken_{datetime.now().strftime("%Y%m%d_%H%M%S")}.csv'
|
||||
}
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler beim Exportieren der Statistiken: {str(e)}")
|
||||
return jsonify({"error": "Interner Serverfehler"}), 500
|
||||
|
||||
@app.route("/api/admin/users", methods=["GET"])
|
||||
@login_required
|
||||
def get_users():
|
||||
@@ -9158,132 +9512,6 @@ def auto_optimize_jobs():
|
||||
'error': f'Optimierung fehlgeschlagen: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@app.route('/api/jobs/batch-operation', methods=['POST'])
|
||||
@login_required
|
||||
def perform_batch_operation():
|
||||
"""Batch-Operationen auf mehrere Jobs anwenden"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
job_ids = data.get('job_ids', [])
|
||||
operation = data.get('operation', '')
|
||||
|
||||
if not job_ids:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Keine Job-IDs für Batch-Operation angegeben'
|
||||
}), 400
|
||||
|
||||
if not operation:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Keine Operation für Batch-Verarbeitung angegeben'
|
||||
}), 400
|
||||
|
||||
db_session = get_db_session()
|
||||
|
||||
# Jobs abrufen (nur eigene oder Admin-Rechte prüfen)
|
||||
if current_user.is_admin:
|
||||
jobs = db_session.query(Job).filter(Job.id.in_(job_ids)).all()
|
||||
else:
|
||||
jobs = db_session.query(Job).filter(
|
||||
Job.id.in_(job_ids),
|
||||
Job.user_id == int(current_user.id)
|
||||
).all()
|
||||
|
||||
if not jobs:
|
||||
db_session.close()
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Keine berechtigten Jobs für Batch-Operation gefunden'
|
||||
}), 403
|
||||
|
||||
processed_count = 0
|
||||
error_count = 0
|
||||
|
||||
# Batch-Operation durchführen
|
||||
for job in jobs:
|
||||
try:
|
||||
if operation == 'start':
|
||||
if job.status in ['queued', 'pending']:
|
||||
job.status = 'running'
|
||||
job.start_time = datetime.now()
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'pause':
|
||||
if job.status == 'running':
|
||||
job.status = 'paused'
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'cancel':
|
||||
if job.status in ['queued', 'pending', 'running', 'paused']:
|
||||
job.status = 'cancelled'
|
||||
job.end_time = datetime.now()
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'delete':
|
||||
if job.status in ['completed', 'cancelled', 'failed']:
|
||||
db_session.delete(job)
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'restart':
|
||||
if job.status in ['failed', 'cancelled']:
|
||||
job.status = 'queued'
|
||||
job.start_time = None
|
||||
job.end_time = None
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'priority_high':
|
||||
job.priority = 'high'
|
||||
processed_count += 1
|
||||
|
||||
elif operation == 'priority_normal':
|
||||
job.priority = 'normal'
|
||||
processed_count += 1
|
||||
|
||||
else:
|
||||
jobs_logger.warning(f"Unbekannte Batch-Operation: {operation}")
|
||||
error_count += 1
|
||||
|
||||
except Exception as job_error:
|
||||
jobs_logger.error(f"Fehler bei Job {job.id} in Batch-Operation {operation}: {str(job_error)}")
|
||||
error_count += 1
|
||||
|
||||
db_session.commit()
|
||||
|
||||
# System-Log erstellen
|
||||
log_entry = SystemLog(
|
||||
level='INFO',
|
||||
component='batch_operations',
|
||||
message=f'Batch-Operation "{operation}" durchgeführt: {processed_count} Jobs verarbeitet',
|
||||
user_id=current_user.id,
|
||||
details=json.dumps({
|
||||
'operation': operation,
|
||||
'processed_jobs': processed_count,
|
||||
'error_count': error_count,
|
||||
'job_ids': job_ids
|
||||
})
|
||||
)
|
||||
db_session.add(log_entry)
|
||||
db_session.commit()
|
||||
db_session.close()
|
||||
|
||||
jobs_logger.info(f"Batch-Operation {operation} durchgeführt: {processed_count} Jobs verarbeitet, {error_count} Fehler")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'processed_jobs': processed_count,
|
||||
'error_count': error_count,
|
||||
'operation': operation,
|
||||
'message': f'Batch-Operation erfolgreich: {processed_count} Jobs verarbeitet'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
app_logger.error(f"Fehler bei Batch-Operation: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'Batch-Operation fehlgeschlagen: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@app.route('/api/optimization/settings', methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def optimization_settings():
|
||||
|
Reference in New Issue
Block a user