919 Zeilen
30 KiB
Python
919 Zeilen
30 KiB
Python
import os
|
|
from datetime import datetime, timedelta
|
|
from zoneinfo import ZoneInfo
|
|
from pathlib import Path
|
|
from flask import Blueprint, render_template, request, redirect, session, url_for, flash, send_file, jsonify, current_app
|
|
|
|
import config
|
|
from auth.decorators import login_required
|
|
from utils.audit import log_audit
|
|
from utils.backup import create_backup, restore_backup
|
|
from utils.network import get_client_ip
|
|
from db import get_connection, get_db_connection, get_db_cursor, execute_query
|
|
from utils.export import create_excel_export, prepare_audit_export_data
|
|
|
|
# Create Blueprint
|
|
admin_bp = Blueprint('admin', __name__)
|
|
|
|
|
|
@admin_bp.route("/")
|
|
@login_required
|
|
def dashboard():
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
try:
|
|
# Hole Statistiken mit sicheren Defaults
|
|
# Anzahl aktiver Lizenzen
|
|
cur.execute("SELECT COUNT(*) FROM licenses WHERE is_active = true")
|
|
active_licenses = cur.fetchone()[0] if cur.rowcount > 0 else 0
|
|
|
|
# Anzahl Kunden
|
|
cur.execute("SELECT COUNT(*) FROM customers")
|
|
total_customers = cur.fetchone()[0] if cur.rowcount > 0 else 0
|
|
|
|
# Anzahl aktiver Sessions
|
|
cur.execute("SELECT COUNT(*) FROM sessions WHERE is_active = true")
|
|
active_sessions = cur.fetchone()[0] if cur.rowcount > 0 else 0
|
|
|
|
# Top 10 Lizenzen - vereinfacht
|
|
cur.execute("""
|
|
SELECT
|
|
l.license_key,
|
|
c.name as customer_name,
|
|
COUNT(s.id) as session_count
|
|
FROM licenses l
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
LEFT JOIN sessions s ON l.id = s.license_id
|
|
GROUP BY l.license_key, c.name
|
|
ORDER BY session_count DESC
|
|
LIMIT 10
|
|
""")
|
|
top_licenses = cur.fetchall() if cur.rowcount > 0 else []
|
|
|
|
# Letzte Aktivitäten - vereinfacht
|
|
cur.execute("""
|
|
SELECT
|
|
id,
|
|
timestamp,
|
|
username,
|
|
action,
|
|
additional_info
|
|
FROM audit_log
|
|
ORDER BY timestamp DESC
|
|
LIMIT 10
|
|
""")
|
|
recent_activities = cur.fetchall() if cur.rowcount > 0 else []
|
|
|
|
# Stats Objekt für Template erstellen
|
|
stats = {
|
|
'total_customers': total_customers,
|
|
'total_licenses': active_licenses,
|
|
'active_sessions': active_sessions,
|
|
'active_licenses': active_licenses,
|
|
'full_licenses': 0,
|
|
'test_licenses': 0,
|
|
'test_data_count': 0,
|
|
'test_customers_count': 0,
|
|
'test_resources_count': 0,
|
|
'expired_licenses': 0,
|
|
'inactive_licenses': 0,
|
|
'last_backup': None,
|
|
'security_level': 'success',
|
|
'security_level_text': 'Sicher',
|
|
'blocked_ips_count': 0,
|
|
'failed_attempts_today': 0,
|
|
'recent_security_events': [],
|
|
'expiring_licenses': [],
|
|
'recent_licenses': []
|
|
}
|
|
|
|
# Resource stats als Dictionary mit allen benötigten Feldern
|
|
resource_stats = {
|
|
'domain': {
|
|
'available': 0,
|
|
'allocated': 0,
|
|
'quarantine': 0,
|
|
'total': 0,
|
|
'available_percent': 100
|
|
},
|
|
'ipv4': {
|
|
'available': 0,
|
|
'allocated': 0,
|
|
'quarantine': 0,
|
|
'total': 0,
|
|
'available_percent': 100
|
|
},
|
|
'phone': {
|
|
'available': 0,
|
|
'allocated': 0,
|
|
'quarantine': 0,
|
|
'total': 0,
|
|
'available_percent': 100
|
|
}
|
|
}
|
|
|
|
license_distribution = []
|
|
hourly_sessions = []
|
|
|
|
return render_template('dashboard.html',
|
|
stats=stats,
|
|
top_licenses=top_licenses,
|
|
recent_activities=recent_activities,
|
|
license_distribution=license_distribution,
|
|
hourly_sessions=hourly_sessions,
|
|
resource_stats=resource_stats,
|
|
username=session.get('username'))
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
except Exception as e:
|
|
current_app.logger.error(f"Dashboard error: {str(e)}")
|
|
current_app.logger.error(f"Error type: {type(e).__name__}")
|
|
import traceback
|
|
current_app.logger.error(f"Traceback: {traceback.format_exc()}")
|
|
flash(f'Dashboard-Fehler: {str(e)}', 'error')
|
|
return redirect(url_for('auth.login'))
|
|
|
|
|
|
@admin_bp.route("/audit")
|
|
@login_required
|
|
def audit_log():
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 50
|
|
search = request.args.get('search', '')
|
|
action_filter = request.args.get('action', '')
|
|
entity_filter = request.args.get('entity', '')
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Base query
|
|
query = """
|
|
SELECT
|
|
id,
|
|
timestamp AT TIME ZONE 'Europe/Berlin' as timestamp,
|
|
username,
|
|
action,
|
|
entity_type,
|
|
entity_id,
|
|
old_values::text,
|
|
new_values::text,
|
|
ip_address,
|
|
user_agent,
|
|
additional_info
|
|
FROM audit_log
|
|
WHERE 1=1
|
|
"""
|
|
params = []
|
|
|
|
# Suchfilter
|
|
if search:
|
|
query += """ AND (
|
|
username ILIKE %s OR
|
|
action ILIKE %s OR
|
|
entity_type ILIKE %s OR
|
|
additional_info ILIKE %s OR
|
|
ip_address ILIKE %s
|
|
)"""
|
|
search_param = f"%{search}%"
|
|
params.extend([search_param] * 5)
|
|
|
|
# Action Filter
|
|
if action_filter:
|
|
query += " AND action = %s"
|
|
params.append(action_filter)
|
|
|
|
# Entity Filter
|
|
if entity_filter:
|
|
query += " AND entity_type = %s"
|
|
params.append(entity_filter)
|
|
|
|
# Count total
|
|
count_query = f"SELECT COUNT(*) FROM ({query}) as filtered"
|
|
cur.execute(count_query, params)
|
|
total_count = cur.fetchone()[0]
|
|
|
|
# Add pagination
|
|
query += " ORDER BY timestamp DESC LIMIT %s OFFSET %s"
|
|
params.extend([per_page, (page - 1) * per_page])
|
|
|
|
cur.execute(query, params)
|
|
logs = cur.fetchall()
|
|
|
|
# Get unique actions and entities for filters
|
|
cur.execute("SELECT DISTINCT action FROM audit_log ORDER BY action")
|
|
actions = [row[0] for row in cur.fetchall()]
|
|
|
|
cur.execute("SELECT DISTINCT entity_type FROM audit_log ORDER BY entity_type")
|
|
entities = [row[0] for row in cur.fetchall()]
|
|
|
|
# Pagination info
|
|
total_pages = (total_count + per_page - 1) // per_page
|
|
|
|
# Convert to dictionaries for easier template access
|
|
audit_logs = []
|
|
for log in logs:
|
|
# Parse JSON strings for old_values and new_values
|
|
old_values = None
|
|
new_values = None
|
|
try:
|
|
if log[6]:
|
|
import json
|
|
old_values = json.loads(log[6])
|
|
except:
|
|
old_values = log[6]
|
|
try:
|
|
if log[7]:
|
|
import json
|
|
new_values = json.loads(log[7])
|
|
except:
|
|
new_values = log[7]
|
|
|
|
audit_logs.append({
|
|
'id': log[0],
|
|
'timestamp': log[1],
|
|
'username': log[2],
|
|
'action': log[3],
|
|
'entity_type': log[4],
|
|
'entity_id': log[5],
|
|
'old_values': old_values,
|
|
'new_values': new_values,
|
|
'ip_address': log[8],
|
|
'user_agent': log[9],
|
|
'additional_info': log[10]
|
|
})
|
|
|
|
return render_template('audit_log.html',
|
|
logs=audit_logs,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total_count=total_count,
|
|
search=search,
|
|
action_filter=action_filter,
|
|
entity_filter=entity_filter,
|
|
actions=actions,
|
|
entities=entities,
|
|
username=session.get('username'))
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/backups")
|
|
@login_required
|
|
def backups():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Hole alle Backups
|
|
cur.execute("""
|
|
SELECT
|
|
id,
|
|
filename,
|
|
created_at AT TIME ZONE 'Europe/Berlin' as created_at,
|
|
filesize,
|
|
backup_type,
|
|
status,
|
|
created_by,
|
|
duration_seconds,
|
|
tables_count,
|
|
records_count,
|
|
error_message,
|
|
is_encrypted
|
|
FROM backup_history
|
|
ORDER BY created_at DESC
|
|
""")
|
|
backups = cur.fetchall()
|
|
|
|
# Prüfe ob Dateien noch existieren
|
|
backups_with_status = []
|
|
for backup in backups:
|
|
backup_dict = {
|
|
'id': backup[0],
|
|
'filename': backup[1],
|
|
'created_at': backup[2],
|
|
'filesize': backup[3],
|
|
'backup_type': backup[4],
|
|
'status': backup[5],
|
|
'created_by': backup[6],
|
|
'duration_seconds': backup[7],
|
|
'tables_count': backup[8],
|
|
'records_count': backup[9],
|
|
'error_message': backup[10],
|
|
'is_encrypted': backup[11],
|
|
'file_exists': False
|
|
}
|
|
|
|
# Prüfe ob Datei existiert
|
|
if backup[1]: # filename
|
|
filepath = config.BACKUP_DIR / backup[1]
|
|
backup_dict['file_exists'] = filepath.exists()
|
|
|
|
backups_with_status.append(backup_dict)
|
|
|
|
return render_template('backups.html',
|
|
backups=backups_with_status,
|
|
username=session.get('username'))
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/backup/create", methods=["POST"])
|
|
@login_required
|
|
def create_backup_route():
|
|
"""Manuelles Backup erstellen"""
|
|
from flask import jsonify
|
|
success, result = create_backup(backup_type="manual", created_by=session.get('username'))
|
|
|
|
if success:
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Backup erfolgreich erstellt: {result}'
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Backup fehlgeschlagen: {result}'
|
|
}), 500
|
|
|
|
|
|
@admin_bp.route("/backup/restore/<int:backup_id>", methods=["POST"])
|
|
@login_required
|
|
def restore_backup_route(backup_id):
|
|
"""Backup wiederherstellen"""
|
|
from flask import jsonify
|
|
encryption_key = request.form.get('encryption_key')
|
|
|
|
success, message = restore_backup(backup_id, encryption_key)
|
|
|
|
if success:
|
|
return jsonify({
|
|
'success': True,
|
|
'message': message
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Wiederherstellung fehlgeschlagen: {message}'
|
|
}), 500
|
|
|
|
|
|
@admin_bp.route("/backup/download/<int:backup_id>")
|
|
@login_required
|
|
def download_backup(backup_id):
|
|
"""Backup herunterladen"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Hole Backup-Info
|
|
cur.execute("SELECT filename, filepath FROM backup_history WHERE id = %s", (backup_id,))
|
|
result = cur.fetchone()
|
|
|
|
if not result:
|
|
flash('Backup nicht gefunden', 'error')
|
|
return redirect(url_for('admin.backups'))
|
|
|
|
filename, filepath = result
|
|
filepath = Path(filepath)
|
|
|
|
if not filepath.exists():
|
|
flash('Backup-Datei nicht gefunden', 'error')
|
|
return redirect(url_for('admin.backups'))
|
|
|
|
# Audit-Log
|
|
log_audit('BACKUP_DOWNLOAD', 'backup', backup_id,
|
|
additional_info=f"Backup heruntergeladen: {filename}")
|
|
|
|
return send_file(filepath, as_attachment=True, download_name=filename)
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/backup/delete/<int:backup_id>", methods=["DELETE"])
|
|
@login_required
|
|
def delete_backup(backup_id):
|
|
"""Backup löschen"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Hole Backup-Info
|
|
cur.execute("SELECT filename, filepath FROM backup_history WHERE id = %s", (backup_id,))
|
|
result = cur.fetchone()
|
|
|
|
if not result:
|
|
return jsonify({'success': False, 'message': 'Backup nicht gefunden'}), 404
|
|
|
|
filename, filepath = result
|
|
filepath = Path(filepath)
|
|
|
|
# Lösche Datei wenn vorhanden
|
|
if filepath.exists():
|
|
try:
|
|
filepath.unlink()
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'message': f'Fehler beim Löschen der Datei: {str(e)}'}), 500
|
|
|
|
# Lösche Datenbank-Eintrag
|
|
cur.execute("DELETE FROM backup_history WHERE id = %s", (backup_id,))
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('BACKUP_DELETE', 'backup', backup_id,
|
|
additional_info=f"Backup gelöscht: {filename}")
|
|
|
|
return jsonify({'success': True, 'message': 'Backup erfolgreich gelöscht'})
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
return jsonify({'success': False, 'message': str(e)}), 500
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/security/blocked-ips")
|
|
@login_required
|
|
def blocked_ips():
|
|
"""Zeigt gesperrte IP-Adressen"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
cur.execute("""
|
|
SELECT
|
|
ip_address,
|
|
attempt_count,
|
|
last_attempt AT TIME ZONE 'Europe/Berlin' as last_attempt,
|
|
blocked_until AT TIME ZONE 'Europe/Berlin' as blocked_until,
|
|
last_username_tried,
|
|
last_error_message
|
|
FROM login_attempts
|
|
WHERE blocked_until IS NOT NULL AND blocked_until > CURRENT_TIMESTAMP
|
|
ORDER BY blocked_until DESC
|
|
""")
|
|
blocked = cur.fetchall()
|
|
|
|
# Alle Login-Versuche (auch nicht gesperrte)
|
|
cur.execute("""
|
|
SELECT
|
|
ip_address,
|
|
attempt_count,
|
|
last_attempt AT TIME ZONE 'Europe/Berlin' as last_attempt,
|
|
blocked_until AT TIME ZONE 'Europe/Berlin' as blocked_until,
|
|
last_username_tried,
|
|
last_error_message
|
|
FROM login_attempts
|
|
ORDER BY last_attempt DESC
|
|
LIMIT 100
|
|
""")
|
|
all_attempts = cur.fetchall()
|
|
|
|
return render_template('blocked_ips.html',
|
|
blocked_ips=blocked,
|
|
all_attempts=all_attempts,
|
|
username=session.get('username'))
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/security/unblock-ip", methods=["POST"])
|
|
@login_required
|
|
def unblock_ip():
|
|
"""Entsperrt eine IP-Adresse"""
|
|
ip_address = request.form.get('ip_address')
|
|
|
|
if not ip_address:
|
|
flash('Keine IP-Adresse angegeben', 'error')
|
|
return redirect(url_for('admin.blocked_ips'))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
cur.execute("""
|
|
UPDATE login_attempts
|
|
SET blocked_until = NULL
|
|
WHERE ip_address = %s
|
|
""", (ip_address,))
|
|
|
|
if cur.rowcount > 0:
|
|
conn.commit()
|
|
flash(f'IP-Adresse {ip_address} wurde entsperrt', 'success')
|
|
log_audit('UNBLOCK_IP', 'security',
|
|
additional_info=f"IP-Adresse entsperrt: {ip_address}")
|
|
else:
|
|
flash(f'IP-Adresse {ip_address} nicht gefunden', 'warning')
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
flash(f'Fehler beim Entsperren: {str(e)}', 'error')
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return redirect(url_for('admin.blocked_ips'))
|
|
|
|
|
|
@admin_bp.route("/security/clear-attempts", methods=["POST"])
|
|
@login_required
|
|
def clear_attempts():
|
|
"""Löscht alle Login-Versuche"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
cur.execute("DELETE FROM login_attempts")
|
|
count = cur.rowcount
|
|
conn.commit()
|
|
|
|
flash(f'{count} Login-Versuche wurden gelöscht', 'success')
|
|
log_audit('CLEAR_LOGIN_ATTEMPTS', 'security',
|
|
additional_info=f"{count} Login-Versuche gelöscht")
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
flash(f'Fehler beim Löschen: {str(e)}', 'error')
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return redirect(url_for('admin.blocked_ips'))
|
|
|
|
|
|
# ===================== LICENSE SERVER MONITORING ROUTES =====================
|
|
|
|
@admin_bp.route("/lizenzserver/monitor")
|
|
@login_required
|
|
def license_monitor():
|
|
"""Redirect to new analytics page"""
|
|
return redirect(url_for('monitoring.analytics'))
|
|
|
|
|
|
@admin_bp.route("/lizenzserver/analytics")
|
|
@login_required
|
|
def license_analytics():
|
|
"""License usage analytics"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Time range from query params
|
|
days = int(request.args.get('days', 30))
|
|
|
|
# Usage trends over time
|
|
cur.execute("""
|
|
SELECT DATE(timestamp) as date,
|
|
COUNT(DISTINCT license_id) as unique_licenses,
|
|
COUNT(DISTINCT hardware_id) as unique_devices,
|
|
COUNT(*) as total_validations
|
|
FROM license_heartbeats
|
|
WHERE timestamp > NOW() - INTERVAL '%s days'
|
|
GROUP BY date
|
|
ORDER BY date
|
|
""", (days,))
|
|
usage_trends = cur.fetchall()
|
|
|
|
# License performance metrics
|
|
cur.execute("""
|
|
SELECT l.id, l.license_key, c.name as customer_name,
|
|
COUNT(DISTINCT lh.hardware_id) as device_count,
|
|
l.max_devices,
|
|
COUNT(*) as total_validations,
|
|
COUNT(DISTINCT DATE(lh.timestamp)) as active_days,
|
|
MIN(lh.timestamp) as first_seen,
|
|
MAX(lh.timestamp) as last_seen
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
LEFT JOIN license_heartbeats lh ON l.id = lh.license_id
|
|
WHERE lh.timestamp > NOW() - INTERVAL '%s days'
|
|
GROUP BY l.id, l.license_key, c.name, l.max_devices
|
|
ORDER BY total_validations DESC
|
|
""", (days,))
|
|
license_metrics = cur.fetchall()
|
|
|
|
# Device distribution
|
|
cur.execute("""
|
|
SELECT l.max_devices as limit,
|
|
COUNT(*) as license_count,
|
|
AVG(device_count) as avg_usage
|
|
FROM licenses l
|
|
LEFT JOIN (
|
|
SELECT license_id, COUNT(DISTINCT hardware_id) as device_count
|
|
FROM license_heartbeats
|
|
WHERE timestamp > NOW() - INTERVAL '30 days'
|
|
GROUP BY license_id
|
|
) usage ON l.id = usage.license_id
|
|
WHERE l.is_active = true
|
|
GROUP BY l.max_devices
|
|
ORDER BY l.max_devices
|
|
""")
|
|
device_distribution = cur.fetchall()
|
|
|
|
# Revenue analysis
|
|
cur.execute("""
|
|
SELECT l.license_type,
|
|
COUNT(DISTINCT l.id) as license_count,
|
|
COUNT(DISTINCT CASE WHEN lh.license_id IS NOT NULL THEN l.id END) as active_licenses,
|
|
COUNT(DISTINCT lh.hardware_id) as total_devices
|
|
FROM licenses l
|
|
LEFT JOIN license_heartbeats lh ON l.id = lh.license_id
|
|
AND lh.timestamp > NOW() - INTERVAL '%s days'
|
|
GROUP BY l.license_type
|
|
""", (days,))
|
|
revenue_analysis = cur.fetchall()
|
|
|
|
return render_template('license_analytics.html',
|
|
days=days,
|
|
usage_trends=usage_trends,
|
|
license_metrics=license_metrics,
|
|
device_distribution=device_distribution,
|
|
revenue_analysis=revenue_analysis
|
|
)
|
|
|
|
except Exception as e:
|
|
flash(f'Fehler beim Laden der Analytics-Daten: {str(e)}', 'error')
|
|
return render_template('license_analytics.html', days=30)
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/lizenzserver/anomalies")
|
|
@login_required
|
|
def license_anomalies():
|
|
"""Anomaly detection and management"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Filter parameters
|
|
severity = request.args.get('severity', 'all')
|
|
resolved = request.args.get('resolved', 'false')
|
|
|
|
# Build query
|
|
query = """
|
|
SELECT ad.*, l.license_key, c.name as customer_name, c.email
|
|
FROM anomaly_detections ad
|
|
LEFT JOIN licenses l ON ad.license_id = l.id
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
WHERE 1=1
|
|
"""
|
|
params = []
|
|
|
|
if severity != 'all':
|
|
query += " AND ad.severity = %s"
|
|
params.append(severity)
|
|
|
|
if resolved == 'false':
|
|
query += " AND ad.resolved = false"
|
|
elif resolved == 'true':
|
|
query += " AND ad.resolved = true"
|
|
|
|
query += " ORDER BY ad.detected_at DESC LIMIT 100"
|
|
|
|
cur.execute(query, params)
|
|
anomalies = cur.fetchall()
|
|
|
|
# Get anomaly statistics
|
|
cur.execute("""
|
|
SELECT anomaly_type, severity, COUNT(*) as count
|
|
FROM anomaly_detections
|
|
WHERE resolved = false
|
|
GROUP BY anomaly_type, severity
|
|
ORDER BY count DESC
|
|
""")
|
|
anomaly_stats = cur.fetchall()
|
|
|
|
return render_template('license_anomalies.html',
|
|
anomalies=anomalies,
|
|
anomaly_stats=anomaly_stats,
|
|
severity=severity,
|
|
resolved=resolved
|
|
)
|
|
|
|
except Exception as e:
|
|
flash(f'Fehler beim Laden der Anomalie-Daten: {str(e)}', 'error')
|
|
return render_template('license_anomalies.html',
|
|
anomalies=[],
|
|
anomaly_stats=[],
|
|
severity='all',
|
|
resolved='false'
|
|
)
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/lizenzserver/anomaly/<anomaly_id>/resolve", methods=["POST"])
|
|
@login_required
|
|
def resolve_anomaly(anomaly_id):
|
|
"""Resolve an anomaly"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
action_taken = request.form.get('action_taken', '')
|
|
|
|
cur.execute("""
|
|
UPDATE anomaly_detections
|
|
SET resolved = true,
|
|
resolved_at = NOW(),
|
|
resolved_by = %s,
|
|
action_taken = %s
|
|
WHERE id = %s
|
|
""", (session.get('username'), action_taken, str(anomaly_id)))
|
|
|
|
conn.commit()
|
|
|
|
flash('Anomalie wurde als behoben markiert', 'success')
|
|
log_audit('RESOLVE_ANOMALY', 'license_server', entity_id=str(anomaly_id),
|
|
additional_info=f"Action: {action_taken}")
|
|
|
|
except Exception as e:
|
|
if 'conn' in locals():
|
|
conn.rollback()
|
|
flash(f'Fehler beim Beheben der Anomalie: {str(e)}', 'error')
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
return redirect(url_for('admin.license_anomalies'))
|
|
|
|
|
|
@admin_bp.route("/lizenzserver/config")
|
|
@login_required
|
|
def license_config():
|
|
"""License server configuration"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Get feature flags
|
|
cur.execute("""
|
|
SELECT * FROM feature_flags
|
|
ORDER BY feature_name
|
|
""")
|
|
feature_flags = cur.fetchall()
|
|
|
|
# Get API clients
|
|
cur.execute("""
|
|
SELECT id, client_name, api_key, is_active, created_at
|
|
FROM api_clients
|
|
ORDER BY created_at DESC
|
|
""")
|
|
api_clients = cur.fetchall()
|
|
|
|
# Get rate limits
|
|
cur.execute("""
|
|
SELECT * FROM api_rate_limits
|
|
ORDER BY api_key
|
|
""")
|
|
rate_limits = cur.fetchall()
|
|
|
|
return render_template('license_config.html',
|
|
feature_flags=feature_flags,
|
|
api_clients=api_clients,
|
|
rate_limits=rate_limits
|
|
)
|
|
|
|
except Exception as e:
|
|
flash(f'Fehler beim Laden der Konfiguration: {str(e)}', 'error')
|
|
return render_template('license_config.html')
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/lizenzserver/config/feature-flag/<int:flag_id>", methods=["POST"])
|
|
@login_required
|
|
def update_feature_flag(flag_id):
|
|
"""Update feature flag settings"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
is_enabled = request.form.get('is_enabled') == 'on'
|
|
rollout_percentage = int(request.form.get('rollout_percentage', 0))
|
|
|
|
cur.execute("""
|
|
UPDATE feature_flags
|
|
SET is_enabled = %s,
|
|
rollout_percentage = %s,
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
""", (is_enabled, rollout_percentage, flag_id))
|
|
|
|
conn.commit()
|
|
|
|
flash('Feature Flag wurde aktualisiert', 'success')
|
|
log_audit('UPDATE_FEATURE_FLAG', 'license_server', entity_id=flag_id)
|
|
|
|
except Exception as e:
|
|
if 'conn' in locals():
|
|
conn.rollback()
|
|
flash(f'Fehler beim Aktualisieren: {str(e)}', 'error')
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
return redirect(url_for('admin.license_config'))
|
|
|
|
|
|
@admin_bp.route("/api/admin/lizenzserver/live-stats")
|
|
@login_required
|
|
def license_live_stats():
|
|
"""API endpoint for live statistics (for AJAX updates)"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Get real-time stats
|
|
cur.execute("""
|
|
SELECT COUNT(DISTINCT license_id) as active_licenses,
|
|
COUNT(*) as validations_per_minute,
|
|
COUNT(DISTINCT hardware_id) as active_devices
|
|
FROM license_heartbeats
|
|
WHERE timestamp > NOW() - INTERVAL '1 minute'
|
|
""")
|
|
stats = cur.fetchone()
|
|
|
|
# Get latest validations
|
|
cur.execute("""
|
|
SELECT l.license_key, lh.hardware_id, lh.ip_address, lh.timestamp
|
|
FROM license_heartbeats lh
|
|
JOIN licenses l ON lh.license_id = l.id
|
|
ORDER BY lh.timestamp DESC
|
|
LIMIT 5
|
|
""")
|
|
latest_validations = cur.fetchall()
|
|
|
|
return jsonify({
|
|
'active_licenses': stats[0] or 0,
|
|
'validations_per_minute': stats[1] or 0,
|
|
'active_devices': stats[2] or 0,
|
|
'latest_validations': [
|
|
{
|
|
'license_key': v[0][:8] + '...',
|
|
'hardware_id': v[1][:8] + '...',
|
|
'ip_address': v[2] or 'Unknown',
|
|
'timestamp': v[3].strftime('%H:%M:%S')
|
|
} for v in latest_validations
|
|
]
|
|
})
|
|
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
finally:
|
|
if 'cur' in locals():
|
|
cur.close()
|
|
if 'conn' in locals():
|
|
conn.close()
|
|
|
|
|
|
@admin_bp.route("/api/admin/license/auth-token")
|
|
@login_required
|
|
def get_analytics_token():
|
|
"""Get JWT token for accessing Analytics Service"""
|
|
import jwt
|
|
from datetime import datetime, timedelta
|
|
|
|
# Generate a short-lived token for the analytics service
|
|
payload = {
|
|
'sub': session.get('user_id', 'admin'),
|
|
'type': 'analytics_access',
|
|
'exp': datetime.utcnow() + timedelta(hours=1),
|
|
'iat': datetime.utcnow()
|
|
}
|
|
|
|
# Use the same secret as configured in the analytics service
|
|
jwt_secret = os.environ.get('JWT_SECRET', 'your-secret-key')
|
|
token = jwt.encode(payload, jwt_secret, algorithm='HS256')
|
|
|
|
return jsonify({'token': token})
|