5033 Zeilen
181 KiB
Plaintext
5033 Zeilen
181 KiB
Plaintext
import os
|
|
import psycopg2
|
|
from psycopg2.extras import Json
|
|
from flask import Flask, render_template, request, redirect, session, url_for, send_file, jsonify, flash
|
|
from flask_session import Session
|
|
from functools import wraps
|
|
from dotenv import load_dotenv
|
|
import pandas as pd
|
|
from datetime import datetime, timedelta
|
|
from zoneinfo import ZoneInfo
|
|
import io
|
|
import subprocess
|
|
import gzip
|
|
from cryptography.fernet import Fernet
|
|
from pathlib import Path
|
|
import time
|
|
from apscheduler.schedulers.background import BackgroundScheduler
|
|
import logging
|
|
import random
|
|
import hashlib
|
|
import requests
|
|
import secrets
|
|
import string
|
|
import re
|
|
import bcrypt
|
|
import pyotp
|
|
import qrcode
|
|
from io import BytesIO
|
|
import base64
|
|
import json
|
|
from werkzeug.middleware.proxy_fix import ProxyFix
|
|
from openpyxl.utils import get_column_letter
|
|
|
|
load_dotenv()
|
|
|
|
app = Flask(__name__)
|
|
app.config['SECRET_KEY'] = os.urandom(24)
|
|
app.config['SESSION_TYPE'] = 'filesystem'
|
|
app.config['JSON_AS_ASCII'] = False # JSON-Ausgabe mit UTF-8
|
|
app.config['JSONIFY_MIMETYPE'] = 'application/json; charset=utf-8'
|
|
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=5) # 5 Minuten Session-Timeout
|
|
app.config['SESSION_COOKIE_HTTPONLY'] = True
|
|
app.config['SESSION_COOKIE_SECURE'] = False # Wird auf True gesetzt wenn HTTPS (intern läuft HTTP)
|
|
app.config['SESSION_COOKIE_SAMESITE'] = 'Lax'
|
|
app.config['SESSION_COOKIE_NAME'] = 'admin_session'
|
|
# WICHTIG: Session-Cookie soll auch nach 5 Minuten ablaufen
|
|
app.config['SESSION_REFRESH_EACH_REQUEST'] = False
|
|
Session(app)
|
|
|
|
# ProxyFix für korrekte IP-Adressen hinter Nginx
|
|
app.wsgi_app = ProxyFix(
|
|
app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1
|
|
)
|
|
|
|
# Backup-Konfiguration
|
|
BACKUP_DIR = Path("/app/backups")
|
|
BACKUP_DIR.mkdir(exist_ok=True)
|
|
|
|
# Rate-Limiting Konfiguration
|
|
FAIL_MESSAGES = [
|
|
"NOPE!",
|
|
"ACCESS DENIED, TRY HARDER",
|
|
"WRONG! 🚫",
|
|
"COMPUTER SAYS NO",
|
|
"YOU FAILED"
|
|
]
|
|
|
|
MAX_LOGIN_ATTEMPTS = 5
|
|
BLOCK_DURATION_HOURS = 24
|
|
CAPTCHA_AFTER_ATTEMPTS = 2
|
|
|
|
# Scheduler für automatische Backups
|
|
scheduler = BackgroundScheduler()
|
|
scheduler.start()
|
|
|
|
# Logging konfigurieren
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
|
# Login decorator
|
|
def login_required(f):
|
|
@wraps(f)
|
|
def decorated_function(*args, **kwargs):
|
|
if 'logged_in' not in session:
|
|
return redirect(url_for('login'))
|
|
|
|
# Prüfe ob Session abgelaufen ist
|
|
if 'last_activity' in session:
|
|
last_activity = datetime.fromisoformat(session['last_activity'])
|
|
time_since_activity = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None) - last_activity
|
|
|
|
# Debug-Logging
|
|
app.logger.info(f"Session check for {session.get('username', 'unknown')}: "
|
|
f"Last activity: {last_activity}, "
|
|
f"Time since: {time_since_activity.total_seconds()} seconds")
|
|
|
|
if time_since_activity > timedelta(minutes=5):
|
|
# Session abgelaufen - Logout
|
|
username = session.get('username', 'unbekannt')
|
|
app.logger.info(f"Session timeout for user {username} - auto logout")
|
|
# Audit-Log für automatischen Logout (vor session.clear()!)
|
|
try:
|
|
log_audit('AUTO_LOGOUT', 'session', additional_info={'reason': 'Session timeout (5 minutes)', 'username': username})
|
|
except:
|
|
pass
|
|
session.clear()
|
|
flash('Ihre Sitzung ist abgelaufen. Bitte melden Sie sich erneut an.', 'warning')
|
|
return redirect(url_for('login'))
|
|
|
|
# Aktivität NICHT automatisch aktualisieren
|
|
# Nur bei expliziten Benutzeraktionen (wird vom Heartbeat gemacht)
|
|
return f(*args, **kwargs)
|
|
return decorated_function
|
|
|
|
# DB-Verbindung mit UTF-8 Encoding
|
|
def get_connection():
|
|
conn = psycopg2.connect(
|
|
host=os.getenv("POSTGRES_HOST", "postgres"),
|
|
port=os.getenv("POSTGRES_PORT", "5432"),
|
|
dbname=os.getenv("POSTGRES_DB"),
|
|
user=os.getenv("POSTGRES_USER"),
|
|
password=os.getenv("POSTGRES_PASSWORD"),
|
|
options='-c client_encoding=UTF8'
|
|
)
|
|
conn.set_client_encoding('UTF8')
|
|
return conn
|
|
|
|
# User Authentication Helper Functions
|
|
def hash_password(password):
|
|
"""Hash a password using bcrypt"""
|
|
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
|
|
|
|
def verify_password(password, hashed):
|
|
"""Verify a password against its hash"""
|
|
return bcrypt.checkpw(password.encode('utf-8'), hashed.encode('utf-8'))
|
|
|
|
def get_user_by_username(username):
|
|
"""Get user from database by username"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
try:
|
|
cur.execute("""
|
|
SELECT id, username, password_hash, email, totp_secret, totp_enabled,
|
|
backup_codes, last_password_change, failed_2fa_attempts
|
|
FROM users WHERE username = %s
|
|
""", (username,))
|
|
user = cur.fetchone()
|
|
if user:
|
|
return {
|
|
'id': user[0],
|
|
'username': user[1],
|
|
'password_hash': user[2],
|
|
'email': user[3],
|
|
'totp_secret': user[4],
|
|
'totp_enabled': user[5],
|
|
'backup_codes': user[6],
|
|
'last_password_change': user[7],
|
|
'failed_2fa_attempts': user[8]
|
|
}
|
|
return None
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
def generate_totp_secret():
|
|
"""Generate a new TOTP secret"""
|
|
return pyotp.random_base32()
|
|
|
|
def generate_qr_code(username, totp_secret):
|
|
"""Generate QR code for TOTP setup"""
|
|
totp_uri = pyotp.totp.TOTP(totp_secret).provisioning_uri(
|
|
name=username,
|
|
issuer_name='V2 Admin Panel'
|
|
)
|
|
|
|
qr = qrcode.QRCode(version=1, box_size=10, border=5)
|
|
qr.add_data(totp_uri)
|
|
qr.make(fit=True)
|
|
|
|
img = qr.make_image(fill_color="black", back_color="white")
|
|
buf = BytesIO()
|
|
img.save(buf, format='PNG')
|
|
buf.seek(0)
|
|
|
|
return base64.b64encode(buf.getvalue()).decode()
|
|
|
|
def verify_totp(totp_secret, token):
|
|
"""Verify a TOTP token"""
|
|
totp = pyotp.TOTP(totp_secret)
|
|
return totp.verify(token, valid_window=1)
|
|
|
|
def generate_backup_codes(count=8):
|
|
"""Generate backup codes for 2FA recovery"""
|
|
codes = []
|
|
for _ in range(count):
|
|
code = ''.join(random.choices(string.ascii_uppercase + string.digits, k=8))
|
|
codes.append(code)
|
|
return codes
|
|
|
|
def hash_backup_code(code):
|
|
"""Hash a backup code for storage"""
|
|
return hashlib.sha256(code.encode()).hexdigest()
|
|
|
|
def verify_backup_code(code, hashed_codes):
|
|
"""Verify a backup code against stored hashes"""
|
|
code_hash = hashlib.sha256(code.encode()).hexdigest()
|
|
return code_hash in hashed_codes
|
|
|
|
# Audit-Log-Funktion
|
|
def log_audit(action, entity_type, entity_id=None, old_values=None, new_values=None, additional_info=None):
|
|
"""Protokolliert Änderungen im Audit-Log"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
username = session.get('username', 'system')
|
|
ip_address = get_client_ip() if request else None
|
|
user_agent = request.headers.get('User-Agent') if request else None
|
|
|
|
# Debug logging
|
|
app.logger.info(f"Audit log - IP address captured: {ip_address}, Action: {action}, User: {username}")
|
|
|
|
# Konvertiere Dictionaries zu JSONB
|
|
old_json = Json(old_values) if old_values else None
|
|
new_json = Json(new_values) if new_values else None
|
|
|
|
cur.execute("""
|
|
INSERT INTO audit_log
|
|
(username, action, entity_type, entity_id, old_values, new_values,
|
|
ip_address, user_agent, additional_info)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
""", (username, action, entity_type, entity_id, old_json, new_json,
|
|
ip_address, user_agent, additional_info))
|
|
|
|
conn.commit()
|
|
except Exception as e:
|
|
print(f"Audit log error: {e}")
|
|
conn.rollback()
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Verschlüsselungs-Funktionen
|
|
def get_or_create_encryption_key():
|
|
"""Holt oder erstellt einen Verschlüsselungsschlüssel"""
|
|
key_file = BACKUP_DIR / ".backup_key"
|
|
|
|
# Versuche Key aus Umgebungsvariable zu lesen
|
|
env_key = os.getenv("BACKUP_ENCRYPTION_KEY")
|
|
if env_key:
|
|
try:
|
|
# Validiere den Key
|
|
Fernet(env_key.encode())
|
|
return env_key.encode()
|
|
except:
|
|
pass
|
|
|
|
# Wenn kein gültiger Key in ENV, prüfe Datei
|
|
if key_file.exists():
|
|
return key_file.read_bytes()
|
|
|
|
# Erstelle neuen Key
|
|
key = Fernet.generate_key()
|
|
key_file.write_bytes(key)
|
|
logging.info("Neuer Backup-Verschlüsselungsschlüssel erstellt")
|
|
return key
|
|
|
|
# Backup-Funktionen
|
|
def create_backup(backup_type="manual", created_by=None):
|
|
"""Erstellt ein verschlüsseltes Backup der Datenbank"""
|
|
start_time = time.time()
|
|
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime("%Y%m%d_%H%M%S")
|
|
filename = f"backup_v2docker_{timestamp}_encrypted.sql.gz.enc"
|
|
filepath = BACKUP_DIR / filename
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Backup-Eintrag erstellen
|
|
cur.execute("""
|
|
INSERT INTO backup_history
|
|
(filename, filepath, backup_type, status, created_by, is_encrypted)
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
RETURNING id
|
|
""", (filename, str(filepath), backup_type, 'in_progress',
|
|
created_by or 'system', True))
|
|
backup_id = cur.fetchone()[0]
|
|
conn.commit()
|
|
|
|
try:
|
|
# PostgreSQL Dump erstellen
|
|
dump_command = [
|
|
'pg_dump',
|
|
'-h', os.getenv("POSTGRES_HOST", "postgres"),
|
|
'-p', os.getenv("POSTGRES_PORT", "5432"),
|
|
'-U', os.getenv("POSTGRES_USER"),
|
|
'-d', os.getenv("POSTGRES_DB"),
|
|
'--no-password',
|
|
'--verbose'
|
|
]
|
|
|
|
# PGPASSWORD setzen
|
|
env = os.environ.copy()
|
|
env['PGPASSWORD'] = os.getenv("POSTGRES_PASSWORD")
|
|
|
|
# Dump ausführen
|
|
result = subprocess.run(dump_command, capture_output=True, text=True, env=env)
|
|
|
|
if result.returncode != 0:
|
|
raise Exception(f"pg_dump failed: {result.stderr}")
|
|
|
|
dump_data = result.stdout.encode('utf-8')
|
|
|
|
# Komprimieren
|
|
compressed_data = gzip.compress(dump_data)
|
|
|
|
# Verschlüsseln
|
|
key = get_or_create_encryption_key()
|
|
f = Fernet(key)
|
|
encrypted_data = f.encrypt(compressed_data)
|
|
|
|
# Speichern
|
|
filepath.write_bytes(encrypted_data)
|
|
|
|
# Statistiken sammeln
|
|
cur.execute("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public'")
|
|
tables_count = cur.fetchone()[0]
|
|
|
|
cur.execute("""
|
|
SELECT SUM(n_live_tup)
|
|
FROM pg_stat_user_tables
|
|
""")
|
|
records_count = cur.fetchone()[0] or 0
|
|
|
|
duration = time.time() - start_time
|
|
filesize = filepath.stat().st_size
|
|
|
|
# Backup-Eintrag aktualisieren
|
|
cur.execute("""
|
|
UPDATE backup_history
|
|
SET status = %s, filesize = %s, tables_count = %s,
|
|
records_count = %s, duration_seconds = %s
|
|
WHERE id = %s
|
|
""", ('success', filesize, tables_count, records_count, duration, backup_id))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('BACKUP', 'database', backup_id,
|
|
additional_info=f"Backup erstellt: {filename} ({filesize} bytes)")
|
|
|
|
# E-Mail-Benachrichtigung (wenn konfiguriert)
|
|
send_backup_notification(True, filename, filesize, duration)
|
|
|
|
logging.info(f"Backup erfolgreich erstellt: {filename}")
|
|
return True, filename
|
|
|
|
except Exception as e:
|
|
# Fehler protokollieren
|
|
cur.execute("""
|
|
UPDATE backup_history
|
|
SET status = %s, error_message = %s, duration_seconds = %s
|
|
WHERE id = %s
|
|
""", ('failed', str(e), time.time() - start_time, backup_id))
|
|
conn.commit()
|
|
|
|
logging.error(f"Backup fehlgeschlagen: {e}")
|
|
send_backup_notification(False, filename, error=str(e))
|
|
|
|
return False, str(e)
|
|
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
def restore_backup(backup_id, encryption_key=None):
|
|
"""Stellt ein Backup wieder her"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Backup-Info abrufen
|
|
cur.execute("""
|
|
SELECT filename, filepath, is_encrypted
|
|
FROM backup_history
|
|
WHERE id = %s
|
|
""", (backup_id,))
|
|
backup_info = cur.fetchone()
|
|
|
|
if not backup_info:
|
|
raise Exception("Backup nicht gefunden")
|
|
|
|
filename, filepath, is_encrypted = backup_info
|
|
filepath = Path(filepath)
|
|
|
|
if not filepath.exists():
|
|
raise Exception("Backup-Datei nicht gefunden")
|
|
|
|
# Datei lesen
|
|
encrypted_data = filepath.read_bytes()
|
|
|
|
# Entschlüsseln
|
|
if is_encrypted:
|
|
key = encryption_key.encode() if encryption_key else get_or_create_encryption_key()
|
|
try:
|
|
f = Fernet(key)
|
|
compressed_data = f.decrypt(encrypted_data)
|
|
except:
|
|
raise Exception("Entschlüsselung fehlgeschlagen. Falsches Passwort?")
|
|
else:
|
|
compressed_data = encrypted_data
|
|
|
|
# Dekomprimieren
|
|
dump_data = gzip.decompress(compressed_data)
|
|
sql_commands = dump_data.decode('utf-8')
|
|
|
|
# Bestehende Verbindungen schließen
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Datenbank wiederherstellen
|
|
restore_command = [
|
|
'psql',
|
|
'-h', os.getenv("POSTGRES_HOST", "postgres"),
|
|
'-p', os.getenv("POSTGRES_PORT", "5432"),
|
|
'-U', os.getenv("POSTGRES_USER"),
|
|
'-d', os.getenv("POSTGRES_DB"),
|
|
'--no-password'
|
|
]
|
|
|
|
env = os.environ.copy()
|
|
env['PGPASSWORD'] = os.getenv("POSTGRES_PASSWORD")
|
|
|
|
result = subprocess.run(restore_command, input=sql_commands,
|
|
capture_output=True, text=True, env=env)
|
|
|
|
if result.returncode != 0:
|
|
raise Exception(f"Wiederherstellung fehlgeschlagen: {result.stderr}")
|
|
|
|
# Audit-Log (neue Verbindung)
|
|
log_audit('RESTORE', 'database', backup_id,
|
|
additional_info=f"Backup wiederhergestellt: {filename}")
|
|
|
|
return True, "Backup erfolgreich wiederhergestellt"
|
|
|
|
except Exception as e:
|
|
logging.error(f"Wiederherstellung fehlgeschlagen: {e}")
|
|
return False, str(e)
|
|
|
|
def send_backup_notification(success, filename, filesize=None, duration=None, error=None):
|
|
"""Sendet E-Mail-Benachrichtigung (wenn konfiguriert)"""
|
|
if not os.getenv("EMAIL_ENABLED", "false").lower() == "true":
|
|
return
|
|
|
|
# E-Mail-Funktion vorbereitet aber deaktiviert
|
|
# TODO: Implementieren wenn E-Mail-Server konfiguriert ist
|
|
logging.info(f"E-Mail-Benachrichtigung vorbereitet: Backup {'erfolgreich' if success else 'fehlgeschlagen'}")
|
|
|
|
# Scheduled Backup Job
|
|
def scheduled_backup():
|
|
"""Führt ein geplantes Backup aus"""
|
|
logging.info("Starte geplantes Backup...")
|
|
create_backup(backup_type="scheduled", created_by="scheduler")
|
|
|
|
# Scheduler konfigurieren - täglich um 3:00 Uhr
|
|
scheduler.add_job(
|
|
scheduled_backup,
|
|
'cron',
|
|
hour=3,
|
|
minute=0,
|
|
id='daily_backup',
|
|
replace_existing=True
|
|
)
|
|
|
|
# Rate-Limiting Funktionen
|
|
def get_client_ip():
|
|
"""Ermittelt die echte IP-Adresse des Clients"""
|
|
# Debug logging
|
|
app.logger.info(f"Headers - X-Real-IP: {request.headers.get('X-Real-IP')}, X-Forwarded-For: {request.headers.get('X-Forwarded-For')}, Remote-Addr: {request.remote_addr}")
|
|
|
|
# Try X-Real-IP first (set by nginx)
|
|
if request.headers.get('X-Real-IP'):
|
|
return request.headers.get('X-Real-IP')
|
|
# Then X-Forwarded-For
|
|
elif request.headers.get('X-Forwarded-For'):
|
|
# X-Forwarded-For can contain multiple IPs, take the first one
|
|
return request.headers.get('X-Forwarded-For').split(',')[0].strip()
|
|
# Fallback to remote_addr
|
|
else:
|
|
return request.remote_addr
|
|
|
|
def check_ip_blocked(ip_address):
|
|
"""Prüft ob eine IP-Adresse gesperrt ist"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("""
|
|
SELECT blocked_until FROM login_attempts
|
|
WHERE ip_address = %s AND blocked_until IS NOT NULL
|
|
""", (ip_address,))
|
|
|
|
result = cur.fetchone()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
if result and result[0]:
|
|
if result[0] > datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None):
|
|
return True, result[0]
|
|
return False, None
|
|
|
|
def record_failed_attempt(ip_address, username):
|
|
"""Zeichnet einen fehlgeschlagenen Login-Versuch auf"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Random Fehlermeldung
|
|
error_message = random.choice(FAIL_MESSAGES)
|
|
|
|
try:
|
|
# Prüfen ob IP bereits existiert
|
|
cur.execute("""
|
|
SELECT attempt_count FROM login_attempts
|
|
WHERE ip_address = %s
|
|
""", (ip_address,))
|
|
|
|
result = cur.fetchone()
|
|
|
|
if result:
|
|
# Update bestehenden Eintrag
|
|
new_count = result[0] + 1
|
|
blocked_until = None
|
|
|
|
if new_count >= MAX_LOGIN_ATTEMPTS:
|
|
blocked_until = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None) + timedelta(hours=BLOCK_DURATION_HOURS)
|
|
# E-Mail-Benachrichtigung (wenn aktiviert)
|
|
if os.getenv("EMAIL_ENABLED", "false").lower() == "true":
|
|
send_security_alert_email(ip_address, username, new_count)
|
|
|
|
cur.execute("""
|
|
UPDATE login_attempts
|
|
SET attempt_count = %s,
|
|
last_attempt = CURRENT_TIMESTAMP,
|
|
blocked_until = %s,
|
|
last_username_tried = %s,
|
|
last_error_message = %s
|
|
WHERE ip_address = %s
|
|
""", (new_count, blocked_until, username, error_message, ip_address))
|
|
else:
|
|
# Neuen Eintrag erstellen
|
|
cur.execute("""
|
|
INSERT INTO login_attempts
|
|
(ip_address, attempt_count, last_username_tried, last_error_message)
|
|
VALUES (%s, 1, %s, %s)
|
|
""", (ip_address, username, error_message))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('LOGIN_FAILED', 'user',
|
|
additional_info=f"IP: {ip_address}, User: {username}, Message: {error_message}")
|
|
|
|
except Exception as e:
|
|
print(f"Rate limiting error: {e}")
|
|
conn.rollback()
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return error_message
|
|
|
|
def reset_login_attempts(ip_address):
|
|
"""Setzt die Login-Versuche für eine IP zurück"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
cur.execute("""
|
|
DELETE FROM login_attempts
|
|
WHERE ip_address = %s
|
|
""", (ip_address,))
|
|
conn.commit()
|
|
except Exception as e:
|
|
print(f"Reset attempts error: {e}")
|
|
conn.rollback()
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
def get_login_attempts(ip_address):
|
|
"""Gibt die Anzahl der Login-Versuche für eine IP zurück"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("""
|
|
SELECT attempt_count FROM login_attempts
|
|
WHERE ip_address = %s
|
|
""", (ip_address,))
|
|
|
|
result = cur.fetchone()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return result[0] if result else 0
|
|
|
|
def send_security_alert_email(ip_address, username, attempt_count):
|
|
"""Sendet eine Sicherheitswarnung per E-Mail"""
|
|
subject = f"⚠️ SICHERHEITSWARNUNG: {attempt_count} fehlgeschlagene Login-Versuche"
|
|
body = f"""
|
|
WARNUNG: Mehrere fehlgeschlagene Login-Versuche erkannt!
|
|
|
|
IP-Adresse: {ip_address}
|
|
Versuchter Benutzername: {username}
|
|
Anzahl Versuche: {attempt_count}
|
|
Zeit: {datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y-%m-%d %H:%M:%S')}
|
|
|
|
Die IP-Adresse wurde für 24 Stunden gesperrt.
|
|
|
|
Dies ist eine automatische Nachricht vom v2-Docker Admin Panel.
|
|
"""
|
|
|
|
# TODO: E-Mail-Versand implementieren wenn SMTP konfiguriert
|
|
logging.warning(f"Sicherheitswarnung: {attempt_count} fehlgeschlagene Versuche von IP {ip_address}")
|
|
print(f"E-Mail würde gesendet: {subject}")
|
|
|
|
def verify_recaptcha(response):
|
|
"""Verifiziert die reCAPTCHA v2 Response mit Google"""
|
|
secret_key = os.getenv('RECAPTCHA_SECRET_KEY')
|
|
|
|
# Wenn kein Secret Key konfiguriert ist, CAPTCHA als bestanden werten (für PoC)
|
|
if not secret_key:
|
|
logging.warning("RECAPTCHA_SECRET_KEY nicht konfiguriert - CAPTCHA wird übersprungen")
|
|
return True
|
|
|
|
# Verifizierung bei Google
|
|
try:
|
|
verify_url = 'https://www.google.com/recaptcha/api/siteverify'
|
|
data = {
|
|
'secret': secret_key,
|
|
'response': response
|
|
}
|
|
|
|
# Timeout für Request setzen
|
|
r = requests.post(verify_url, data=data, timeout=5)
|
|
result = r.json()
|
|
|
|
# Log für Debugging
|
|
if not result.get('success'):
|
|
logging.warning(f"reCAPTCHA Validierung fehlgeschlagen: {result.get('error-codes', [])}")
|
|
|
|
return result.get('success', False)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
logging.error(f"reCAPTCHA Verifizierung fehlgeschlagen: {str(e)}")
|
|
# Bei Netzwerkfehlern CAPTCHA als bestanden werten
|
|
return True
|
|
except Exception as e:
|
|
logging.error(f"Unerwarteter Fehler bei reCAPTCHA: {str(e)}")
|
|
return False
|
|
|
|
def generate_license_key(license_type='full'):
|
|
"""
|
|
Generiert einen Lizenzschlüssel im Format: AF-F-YYYYMM-XXXX-YYYY-ZZZZ
|
|
|
|
AF = Account Factory (Produktkennung)
|
|
F/T = F für Fullversion, T für Testversion
|
|
YYYY = Jahr
|
|
MM = Monat
|
|
XXXX-YYYY-ZZZZ = Zufällige alphanumerische Zeichen
|
|
"""
|
|
# Erlaubte Zeichen (ohne verwirrende wie 0/O, 1/I/l)
|
|
chars = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789'
|
|
|
|
# Datum-Teil
|
|
now = datetime.now(ZoneInfo("Europe/Berlin"))
|
|
date_part = now.strftime('%Y%m')
|
|
type_char = 'F' if license_type == 'full' else 'T'
|
|
|
|
# Zufällige Teile generieren (3 Blöcke à 4 Zeichen)
|
|
parts = []
|
|
for _ in range(3):
|
|
part = ''.join(secrets.choice(chars) for _ in range(4))
|
|
parts.append(part)
|
|
|
|
# Key zusammensetzen
|
|
key = f"AF-{type_char}-{date_part}-{parts[0]}-{parts[1]}-{parts[2]}"
|
|
|
|
return key
|
|
|
|
def validate_license_key(key):
|
|
"""
|
|
Validiert das License Key Format
|
|
Erwartet: AF-F-YYYYMM-XXXX-YYYY-ZZZZ oder AF-T-YYYYMM-XXXX-YYYY-ZZZZ
|
|
"""
|
|
if not key:
|
|
return False
|
|
|
|
# Pattern für das neue Format
|
|
# AF- (fest) + F oder T + - + 6 Ziffern (YYYYMM) + - + 4 Zeichen + - + 4 Zeichen + - + 4 Zeichen
|
|
pattern = r'^AF-[FT]-\d{6}-[A-Z0-9]{4}-[A-Z0-9]{4}-[A-Z0-9]{4}$'
|
|
|
|
# Großbuchstaben für Vergleich
|
|
return bool(re.match(pattern, key.upper()))
|
|
|
|
@app.route("/login", methods=["GET", "POST"])
|
|
def login():
|
|
# Timing-Attack Schutz - Start Zeit merken
|
|
start_time = time.time()
|
|
|
|
# IP-Adresse ermitteln
|
|
ip_address = get_client_ip()
|
|
|
|
# Prüfen ob IP gesperrt ist
|
|
is_blocked, blocked_until = check_ip_blocked(ip_address)
|
|
if is_blocked:
|
|
time_remaining = (blocked_until - datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None)).total_seconds() / 3600
|
|
error_msg = f"IP GESPERRT! Noch {time_remaining:.1f} Stunden warten."
|
|
return render_template("login.html", error=error_msg, error_type="blocked")
|
|
|
|
# Anzahl bisheriger Versuche
|
|
attempt_count = get_login_attempts(ip_address)
|
|
|
|
if request.method == "POST":
|
|
username = request.form.get("username")
|
|
password = request.form.get("password")
|
|
captcha_response = request.form.get("g-recaptcha-response")
|
|
|
|
# CAPTCHA-Prüfung nur wenn Keys konfiguriert sind
|
|
recaptcha_site_key = os.getenv('RECAPTCHA_SITE_KEY')
|
|
if attempt_count >= CAPTCHA_AFTER_ATTEMPTS and recaptcha_site_key:
|
|
if not captcha_response:
|
|
# Timing-Attack Schutz
|
|
elapsed = time.time() - start_time
|
|
if elapsed < 1.0:
|
|
time.sleep(1.0 - elapsed)
|
|
return render_template("login.html",
|
|
error="CAPTCHA ERFORDERLICH!",
|
|
show_captcha=True,
|
|
error_type="captcha",
|
|
attempts_left=max(0, MAX_LOGIN_ATTEMPTS - attempt_count),
|
|
recaptcha_site_key=recaptcha_site_key)
|
|
|
|
# CAPTCHA validieren
|
|
if not verify_recaptcha(captcha_response):
|
|
# Timing-Attack Schutz
|
|
elapsed = time.time() - start_time
|
|
if elapsed < 1.0:
|
|
time.sleep(1.0 - elapsed)
|
|
return render_template("login.html",
|
|
error="CAPTCHA UNGÜLTIG! Bitte erneut versuchen.",
|
|
show_captcha=True,
|
|
error_type="captcha",
|
|
attempts_left=max(0, MAX_LOGIN_ATTEMPTS - attempt_count),
|
|
recaptcha_site_key=recaptcha_site_key)
|
|
|
|
# Check user in database first, fallback to env vars
|
|
user = get_user_by_username(username)
|
|
login_success = False
|
|
needs_2fa = False
|
|
|
|
if user:
|
|
# Database user authentication
|
|
if verify_password(password, user['password_hash']):
|
|
login_success = True
|
|
needs_2fa = user['totp_enabled']
|
|
else:
|
|
# Fallback to environment variables for backward compatibility
|
|
admin1_user = os.getenv("ADMIN1_USERNAME")
|
|
admin1_pass = os.getenv("ADMIN1_PASSWORD")
|
|
admin2_user = os.getenv("ADMIN2_USERNAME")
|
|
admin2_pass = os.getenv("ADMIN2_PASSWORD")
|
|
|
|
if ((username == admin1_user and password == admin1_pass) or
|
|
(username == admin2_user and password == admin2_pass)):
|
|
login_success = True
|
|
|
|
# Timing-Attack Schutz - Mindestens 1 Sekunde warten
|
|
elapsed = time.time() - start_time
|
|
if elapsed < 1.0:
|
|
time.sleep(1.0 - elapsed)
|
|
|
|
if login_success:
|
|
# Erfolgreicher Login
|
|
if needs_2fa:
|
|
# Store temporary session for 2FA verification
|
|
session['temp_username'] = username
|
|
session['temp_user_id'] = user['id']
|
|
session['awaiting_2fa'] = True
|
|
return redirect(url_for('verify_2fa'))
|
|
else:
|
|
# Complete login without 2FA
|
|
session.permanent = True # Aktiviert das Timeout
|
|
session['logged_in'] = True
|
|
session['username'] = username
|
|
session['user_id'] = user['id'] if user else None
|
|
session['last_activity'] = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None).isoformat()
|
|
reset_login_attempts(ip_address)
|
|
log_audit('LOGIN_SUCCESS', 'user',
|
|
additional_info=f"Erfolgreiche Anmeldung von IP: {ip_address}")
|
|
return redirect(url_for('dashboard'))
|
|
else:
|
|
# Fehlgeschlagener Login
|
|
error_message = record_failed_attempt(ip_address, username)
|
|
new_attempt_count = get_login_attempts(ip_address)
|
|
|
|
# Prüfen ob jetzt gesperrt
|
|
is_now_blocked, _ = check_ip_blocked(ip_address)
|
|
if is_now_blocked:
|
|
log_audit('LOGIN_BLOCKED', 'security',
|
|
additional_info=f"IP {ip_address} wurde nach {MAX_LOGIN_ATTEMPTS} Versuchen gesperrt")
|
|
|
|
return render_template("login.html",
|
|
error=error_message,
|
|
show_captcha=(new_attempt_count >= CAPTCHA_AFTER_ATTEMPTS and os.getenv('RECAPTCHA_SITE_KEY')),
|
|
error_type="failed",
|
|
attempts_left=max(0, MAX_LOGIN_ATTEMPTS - new_attempt_count),
|
|
recaptcha_site_key=os.getenv('RECAPTCHA_SITE_KEY'))
|
|
|
|
# GET Request
|
|
return render_template("login.html",
|
|
show_captcha=(attempt_count >= CAPTCHA_AFTER_ATTEMPTS and os.getenv('RECAPTCHA_SITE_KEY')),
|
|
attempts_left=max(0, MAX_LOGIN_ATTEMPTS - attempt_count),
|
|
recaptcha_site_key=os.getenv('RECAPTCHA_SITE_KEY'))
|
|
|
|
@app.route("/logout")
|
|
def logout():
|
|
username = session.get('username', 'unknown')
|
|
log_audit('LOGOUT', 'user', additional_info=f"Abmeldung")
|
|
session.pop('logged_in', None)
|
|
session.pop('username', None)
|
|
session.pop('user_id', None)
|
|
session.pop('temp_username', None)
|
|
session.pop('temp_user_id', None)
|
|
session.pop('awaiting_2fa', None)
|
|
return redirect(url_for('login'))
|
|
|
|
@app.route("/verify-2fa", methods=["GET", "POST"])
|
|
def verify_2fa():
|
|
if not session.get('awaiting_2fa'):
|
|
return redirect(url_for('login'))
|
|
|
|
if request.method == "POST":
|
|
token = request.form.get('token', '').replace(' ', '')
|
|
username = session.get('temp_username')
|
|
user_id = session.get('temp_user_id')
|
|
|
|
if not username or not user_id:
|
|
flash('Session expired. Please login again.', 'error')
|
|
return redirect(url_for('login'))
|
|
|
|
user = get_user_by_username(username)
|
|
if not user:
|
|
flash('User not found.', 'error')
|
|
return redirect(url_for('login'))
|
|
|
|
# Check if it's a backup code
|
|
if len(token) == 8 and token.isupper():
|
|
# Try backup code
|
|
backup_codes = json.loads(user['backup_codes']) if user['backup_codes'] else []
|
|
if verify_backup_code(token, backup_codes):
|
|
# Remove used backup code
|
|
code_hash = hash_backup_code(token)
|
|
backup_codes.remove(code_hash)
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute("UPDATE users SET backup_codes = %s WHERE id = %s",
|
|
(json.dumps(backup_codes), user_id))
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Complete login
|
|
session.permanent = True
|
|
session['logged_in'] = True
|
|
session['username'] = username
|
|
session['user_id'] = user_id
|
|
session['last_activity'] = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None).isoformat()
|
|
session.pop('temp_username', None)
|
|
session.pop('temp_user_id', None)
|
|
session.pop('awaiting_2fa', None)
|
|
|
|
flash('Login successful using backup code. Please generate new backup codes.', 'warning')
|
|
log_audit('LOGIN_2FA_BACKUP', 'user', additional_info=f"2FA login with backup code")
|
|
return redirect(url_for('dashboard'))
|
|
else:
|
|
# Try TOTP token
|
|
if verify_totp(user['totp_secret'], token):
|
|
# Complete login
|
|
session.permanent = True
|
|
session['logged_in'] = True
|
|
session['username'] = username
|
|
session['user_id'] = user_id
|
|
session['last_activity'] = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None).isoformat()
|
|
session.pop('temp_username', None)
|
|
session.pop('temp_user_id', None)
|
|
session.pop('awaiting_2fa', None)
|
|
|
|
log_audit('LOGIN_2FA_SUCCESS', 'user', additional_info=f"2FA login successful")
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Failed verification
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute("UPDATE users SET failed_2fa_attempts = failed_2fa_attempts + 1, last_failed_2fa = %s WHERE id = %s",
|
|
(datetime.now(), user_id))
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
flash('Invalid authentication code. Please try again.', 'error')
|
|
log_audit('LOGIN_2FA_FAILED', 'user', additional_info=f"Failed 2FA attempt")
|
|
|
|
return render_template('verify_2fa.html')
|
|
|
|
@app.route("/profile")
|
|
@login_required
|
|
def profile():
|
|
user = get_user_by_username(session['username'])
|
|
if not user:
|
|
# For environment-based users, redirect with message
|
|
flash('Bitte führen Sie das Migrations-Script aus, um Passwort-Änderung und 2FA zu aktivieren.', 'info')
|
|
return redirect(url_for('dashboard'))
|
|
return render_template('profile.html', user=user)
|
|
|
|
@app.route("/profile/change-password", methods=["POST"])
|
|
@login_required
|
|
def change_password():
|
|
current_password = request.form.get('current_password')
|
|
new_password = request.form.get('new_password')
|
|
confirm_password = request.form.get('confirm_password')
|
|
|
|
user = get_user_by_username(session['username'])
|
|
|
|
# Verify current password
|
|
if not verify_password(current_password, user['password_hash']):
|
|
flash('Current password is incorrect.', 'error')
|
|
return redirect(url_for('profile'))
|
|
|
|
# Check new password
|
|
if new_password != confirm_password:
|
|
flash('New passwords do not match.', 'error')
|
|
return redirect(url_for('profile'))
|
|
|
|
if len(new_password) < 8:
|
|
flash('Password must be at least 8 characters long.', 'error')
|
|
return redirect(url_for('profile'))
|
|
|
|
# Update password
|
|
new_hash = hash_password(new_password)
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute("UPDATE users SET password_hash = %s, last_password_change = %s WHERE id = %s",
|
|
(new_hash, datetime.now(), user['id']))
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
log_audit('PASSWORD_CHANGE', 'user', entity_id=user['id'],
|
|
additional_info="Password changed successfully")
|
|
flash('Password changed successfully.', 'success')
|
|
return redirect(url_for('profile'))
|
|
|
|
@app.route("/profile/setup-2fa")
|
|
@login_required
|
|
def setup_2fa():
|
|
user = get_user_by_username(session['username'])
|
|
|
|
if user['totp_enabled']:
|
|
flash('2FA is already enabled for your account.', 'info')
|
|
return redirect(url_for('profile'))
|
|
|
|
# Generate new TOTP secret
|
|
totp_secret = generate_totp_secret()
|
|
session['temp_totp_secret'] = totp_secret
|
|
|
|
# Generate QR code
|
|
qr_code = generate_qr_code(user['username'], totp_secret)
|
|
|
|
return render_template('setup_2fa.html',
|
|
totp_secret=totp_secret,
|
|
qr_code=qr_code)
|
|
|
|
@app.route("/profile/enable-2fa", methods=["POST"])
|
|
@login_required
|
|
def enable_2fa():
|
|
token = request.form.get('token', '').replace(' ', '')
|
|
totp_secret = session.get('temp_totp_secret')
|
|
|
|
if not totp_secret:
|
|
flash('2FA setup session expired. Please try again.', 'error')
|
|
return redirect(url_for('setup_2fa'))
|
|
|
|
# Verify the token
|
|
if not verify_totp(totp_secret, token):
|
|
flash('Invalid authentication code. Please try again.', 'error')
|
|
return redirect(url_for('setup_2fa'))
|
|
|
|
# Generate backup codes
|
|
backup_codes = generate_backup_codes()
|
|
hashed_codes = [hash_backup_code(code) for code in backup_codes]
|
|
|
|
# Enable 2FA
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute("""
|
|
UPDATE users
|
|
SET totp_secret = %s, totp_enabled = TRUE, backup_codes = %s
|
|
WHERE username = %s
|
|
""", (totp_secret, json.dumps(hashed_codes), session['username']))
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
session.pop('temp_totp_secret', None)
|
|
|
|
log_audit('2FA_ENABLED', 'user', additional_info="2FA enabled successfully")
|
|
|
|
# Show backup codes
|
|
return render_template('backup_codes.html', backup_codes=backup_codes)
|
|
|
|
@app.route("/profile/disable-2fa", methods=["POST"])
|
|
@login_required
|
|
def disable_2fa():
|
|
password = request.form.get('password')
|
|
user = get_user_by_username(session['username'])
|
|
|
|
# Verify password
|
|
if not verify_password(password, user['password_hash']):
|
|
flash('Incorrect password.', 'error')
|
|
return redirect(url_for('profile'))
|
|
|
|
# Disable 2FA
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
cur.execute("""
|
|
UPDATE users
|
|
SET totp_enabled = FALSE, totp_secret = NULL, backup_codes = NULL
|
|
WHERE username = %s
|
|
""", (session['username'],))
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
log_audit('2FA_DISABLED', 'user', additional_info="2FA disabled")
|
|
flash('2FA has been disabled for your account.', 'success')
|
|
return redirect(url_for('profile'))
|
|
|
|
@app.route("/heartbeat", methods=['POST'])
|
|
@login_required
|
|
def heartbeat():
|
|
"""Endpoint für Session Keep-Alive - aktualisiert last_activity"""
|
|
# Aktualisiere last_activity nur wenn explizit angefordert
|
|
session['last_activity'] = datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None).isoformat()
|
|
# Force session save
|
|
session.modified = True
|
|
|
|
return jsonify({
|
|
'status': 'ok',
|
|
'last_activity': session['last_activity'],
|
|
'username': session.get('username')
|
|
})
|
|
|
|
@app.route("/api/generate-license-key", methods=['POST'])
|
|
@login_required
|
|
def api_generate_key():
|
|
"""API Endpoint zur Generierung eines neuen Lizenzschlüssels"""
|
|
try:
|
|
# Lizenztyp aus Request holen (default: full)
|
|
data = request.get_json() or {}
|
|
license_type = data.get('type', 'full')
|
|
|
|
# Key generieren
|
|
key = generate_license_key(license_type)
|
|
|
|
# Prüfen ob Key bereits existiert (sehr unwahrscheinlich aber sicher ist sicher)
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Wiederhole bis eindeutiger Key gefunden
|
|
attempts = 0
|
|
while attempts < 10: # Max 10 Versuche
|
|
cur.execute("SELECT 1 FROM licenses WHERE license_key = %s", (key,))
|
|
if not cur.fetchone():
|
|
break # Key ist eindeutig
|
|
key = generate_license_key(license_type)
|
|
attempts += 1
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Log für Audit
|
|
log_audit('GENERATE_KEY', 'license',
|
|
additional_info={'type': license_type, 'key': key})
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'key': key,
|
|
'type': license_type
|
|
})
|
|
|
|
except Exception as e:
|
|
logging.error(f"Fehler bei Key-Generierung: {str(e)}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Fehler bei der Key-Generierung'
|
|
}), 500
|
|
|
|
@app.route("/api/customers", methods=['GET'])
|
|
@login_required
|
|
def api_customers():
|
|
"""API Endpoint für die Kundensuche mit Select2"""
|
|
try:
|
|
# Suchparameter
|
|
search = request.args.get('q', '').strip()
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 20
|
|
customer_id = request.args.get('id', type=int)
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Einzelnen Kunden per ID abrufen
|
|
if customer_id:
|
|
cur.execute("""
|
|
SELECT c.id, c.name, c.email,
|
|
COUNT(l.id) as license_count
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
WHERE c.id = %s
|
|
GROUP BY c.id, c.name, c.email
|
|
""", (customer_id,))
|
|
|
|
customer = cur.fetchone()
|
|
results = []
|
|
if customer:
|
|
results.append({
|
|
'id': customer[0],
|
|
'text': f"{customer[1]} ({customer[2]})",
|
|
'name': customer[1],
|
|
'email': customer[2],
|
|
'license_count': customer[3]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'results': results,
|
|
'pagination': {'more': False}
|
|
})
|
|
|
|
# SQL Query mit optionaler Suche
|
|
elif search:
|
|
cur.execute("""
|
|
SELECT c.id, c.name, c.email,
|
|
COUNT(l.id) as license_count
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
WHERE LOWER(c.name) LIKE LOWER(%s)
|
|
OR LOWER(c.email) LIKE LOWER(%s)
|
|
GROUP BY c.id, c.name, c.email
|
|
ORDER BY c.name
|
|
LIMIT %s OFFSET %s
|
|
""", (f'%{search}%', f'%{search}%', per_page, (page - 1) * per_page))
|
|
else:
|
|
cur.execute("""
|
|
SELECT c.id, c.name, c.email,
|
|
COUNT(l.id) as license_count
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
GROUP BY c.id, c.name, c.email
|
|
ORDER BY c.name
|
|
LIMIT %s OFFSET %s
|
|
""", (per_page, (page - 1) * per_page))
|
|
|
|
customers = cur.fetchall()
|
|
|
|
# Format für Select2
|
|
results = []
|
|
for customer in customers:
|
|
results.append({
|
|
'id': customer[0],
|
|
'text': f"{customer[1]} - {customer[2]} ({customer[3]} Lizenzen)",
|
|
'name': customer[1],
|
|
'email': customer[2],
|
|
'license_count': customer[3]
|
|
})
|
|
|
|
# Gesamtanzahl für Pagination
|
|
if search:
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM customers
|
|
WHERE LOWER(name) LIKE LOWER(%s)
|
|
OR LOWER(email) LIKE LOWER(%s)
|
|
""", (f'%{search}%', f'%{search}%'))
|
|
else:
|
|
cur.execute("SELECT COUNT(*) FROM customers")
|
|
|
|
total_count = cur.fetchone()[0]
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Select2 Response Format
|
|
return jsonify({
|
|
'results': results,
|
|
'pagination': {
|
|
'more': (page * per_page) < total_count
|
|
}
|
|
})
|
|
|
|
except Exception as e:
|
|
logging.error(f"Fehler bei Kundensuche: {str(e)}")
|
|
return jsonify({
|
|
'results': [],
|
|
'error': 'Fehler bei der Kundensuche'
|
|
}), 500
|
|
|
|
@app.route("/")
|
|
@login_required
|
|
def dashboard():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Statistiken abrufen
|
|
# Gesamtanzahl Kunden (ohne Testdaten)
|
|
cur.execute("SELECT COUNT(*) FROM customers WHERE is_test = FALSE")
|
|
total_customers = cur.fetchone()[0]
|
|
|
|
# Gesamtanzahl Lizenzen (ohne Testdaten)
|
|
cur.execute("SELECT COUNT(*) FROM licenses WHERE is_test = FALSE")
|
|
total_licenses = cur.fetchone()[0]
|
|
|
|
# Aktive Lizenzen (nicht abgelaufen und is_active = true, ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM licenses
|
|
WHERE valid_until >= CURRENT_DATE AND is_active = TRUE AND is_test = FALSE
|
|
""")
|
|
active_licenses = cur.fetchone()[0]
|
|
|
|
# Aktive Sessions
|
|
cur.execute("SELECT COUNT(*) FROM sessions WHERE is_active = TRUE")
|
|
active_sessions_count = cur.fetchone()[0]
|
|
|
|
# Abgelaufene Lizenzen (ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM licenses
|
|
WHERE valid_until < CURRENT_DATE AND is_test = FALSE
|
|
""")
|
|
expired_licenses = cur.fetchone()[0]
|
|
|
|
# Deaktivierte Lizenzen (ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM licenses
|
|
WHERE is_active = FALSE AND is_test = FALSE
|
|
""")
|
|
inactive_licenses = cur.fetchone()[0]
|
|
|
|
# Lizenzen die in den nächsten 30 Tagen ablaufen (ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM licenses
|
|
WHERE valid_until >= CURRENT_DATE
|
|
AND valid_until < CURRENT_DATE + INTERVAL '30 days'
|
|
AND is_active = TRUE
|
|
AND is_test = FALSE
|
|
""")
|
|
expiring_soon = cur.fetchone()[0]
|
|
|
|
# Testlizenzen vs Vollversionen (ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT license_type, COUNT(*)
|
|
FROM licenses
|
|
WHERE is_test = FALSE
|
|
GROUP BY license_type
|
|
""")
|
|
license_types = dict(cur.fetchall())
|
|
|
|
# Anzahl Testdaten
|
|
cur.execute("SELECT COUNT(*) FROM licenses WHERE is_test = TRUE")
|
|
test_data_count = cur.fetchone()[0]
|
|
|
|
# Anzahl Test-Kunden
|
|
cur.execute("SELECT COUNT(*) FROM customers WHERE is_test = TRUE")
|
|
test_customers_count = cur.fetchone()[0]
|
|
|
|
# Anzahl Test-Ressourcen
|
|
cur.execute("SELECT COUNT(*) FROM resource_pools WHERE is_test = TRUE")
|
|
test_resources_count = cur.fetchone()[0]
|
|
|
|
# Letzte 5 erstellten Lizenzen (ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT l.id, l.license_key, c.name, l.valid_until,
|
|
CASE
|
|
WHEN l.is_active = FALSE THEN 'deaktiviert'
|
|
WHEN l.valid_until < CURRENT_DATE THEN 'abgelaufen'
|
|
WHEN l.valid_until < CURRENT_DATE + INTERVAL '30 days' THEN 'läuft bald ab'
|
|
ELSE 'aktiv'
|
|
END as status
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE l.is_test = FALSE
|
|
ORDER BY l.id DESC
|
|
LIMIT 5
|
|
""")
|
|
recent_licenses = cur.fetchall()
|
|
|
|
# Bald ablaufende Lizenzen (nächste 30 Tage, ohne Testdaten)
|
|
cur.execute("""
|
|
SELECT l.id, l.license_key, c.name, l.valid_until,
|
|
l.valid_until - CURRENT_DATE as days_left
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE l.valid_until >= CURRENT_DATE
|
|
AND l.valid_until < CURRENT_DATE + INTERVAL '30 days'
|
|
AND l.is_active = TRUE
|
|
AND l.is_test = FALSE
|
|
ORDER BY l.valid_until
|
|
LIMIT 10
|
|
""")
|
|
expiring_licenses = cur.fetchall()
|
|
|
|
# Letztes Backup
|
|
cur.execute("""
|
|
SELECT created_at, filesize, duration_seconds, backup_type, status
|
|
FROM backup_history
|
|
ORDER BY created_at DESC
|
|
LIMIT 1
|
|
""")
|
|
last_backup_info = cur.fetchone()
|
|
|
|
# Sicherheitsstatistiken
|
|
# Gesperrte IPs
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM login_attempts
|
|
WHERE blocked_until IS NOT NULL AND blocked_until > CURRENT_TIMESTAMP
|
|
""")
|
|
blocked_ips_count = cur.fetchone()[0]
|
|
|
|
# Fehlversuche heute
|
|
cur.execute("""
|
|
SELECT COALESCE(SUM(attempt_count), 0) FROM login_attempts
|
|
WHERE last_attempt::date = CURRENT_DATE
|
|
""")
|
|
failed_attempts_today = cur.fetchone()[0]
|
|
|
|
# Letzte 5 Sicherheitsereignisse
|
|
cur.execute("""
|
|
SELECT
|
|
la.ip_address,
|
|
la.attempt_count,
|
|
la.last_attempt,
|
|
la.blocked_until,
|
|
la.last_username_tried,
|
|
la.last_error_message
|
|
FROM login_attempts la
|
|
ORDER BY la.last_attempt DESC
|
|
LIMIT 5
|
|
""")
|
|
recent_security_events = []
|
|
for event in cur.fetchall():
|
|
recent_security_events.append({
|
|
'ip_address': event[0],
|
|
'attempt_count': event[1],
|
|
'last_attempt': event[2].strftime('%d.%m %H:%M'),
|
|
'blocked_until': event[3].strftime('%d.%m %H:%M') if event[3] and event[3] > datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None) else None,
|
|
'username_tried': event[4],
|
|
'error_message': event[5]
|
|
})
|
|
|
|
# Sicherheitslevel berechnen
|
|
if blocked_ips_count > 5 or failed_attempts_today > 50:
|
|
security_level = 'danger'
|
|
security_level_text = 'KRITISCH'
|
|
elif blocked_ips_count > 2 or failed_attempts_today > 20:
|
|
security_level = 'warning'
|
|
security_level_text = 'ERHÖHT'
|
|
else:
|
|
security_level = 'success'
|
|
security_level_text = 'NORMAL'
|
|
|
|
# Resource Pool Statistiken (nur Live-Daten, keine Testdaten)
|
|
cur.execute("""
|
|
SELECT
|
|
resource_type,
|
|
COUNT(*) FILTER (WHERE status = 'available') as available,
|
|
COUNT(*) FILTER (WHERE status = 'allocated') as allocated,
|
|
COUNT(*) FILTER (WHERE status = 'quarantine') as quarantine,
|
|
COUNT(*) as total
|
|
FROM resource_pools
|
|
WHERE is_test = FALSE
|
|
GROUP BY resource_type
|
|
""")
|
|
|
|
resource_stats = {}
|
|
resource_warning = None
|
|
|
|
for row in cur.fetchall():
|
|
available_percent = round((row[1] / row[4] * 100) if row[4] > 0 else 0, 1)
|
|
resource_stats[row[0]] = {
|
|
'available': row[1],
|
|
'allocated': row[2],
|
|
'quarantine': row[3],
|
|
'total': row[4],
|
|
'available_percent': available_percent,
|
|
'warning_level': 'danger' if row[1] < 50 else 'warning' if row[1] < 100 else 'success'
|
|
}
|
|
|
|
# Warnung bei niedrigem Bestand
|
|
if row[1] < 50:
|
|
if not resource_warning:
|
|
resource_warning = f"Niedriger Bestand bei {row[0].upper()}: nur noch {row[1]} verfügbar!"
|
|
else:
|
|
resource_warning += f" | {row[0].upper()}: {row[1]}"
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
stats = {
|
|
'total_customers': total_customers,
|
|
'total_licenses': total_licenses,
|
|
'active_licenses': active_licenses,
|
|
'expired_licenses': expired_licenses,
|
|
'inactive_licenses': inactive_licenses,
|
|
'expiring_soon': expiring_soon,
|
|
'full_licenses': license_types.get('full', 0),
|
|
'test_licenses': license_types.get('test', 0),
|
|
'test_data_count': test_data_count,
|
|
'test_customers_count': test_customers_count,
|
|
'test_resources_count': test_resources_count,
|
|
'recent_licenses': recent_licenses,
|
|
'expiring_licenses': expiring_licenses,
|
|
'active_sessions': active_sessions_count,
|
|
'last_backup': last_backup_info,
|
|
# Sicherheitsstatistiken
|
|
'blocked_ips_count': blocked_ips_count,
|
|
'failed_attempts_today': failed_attempts_today,
|
|
'recent_security_events': recent_security_events,
|
|
'security_level': security_level,
|
|
'security_level_text': security_level_text,
|
|
'resource_stats': resource_stats
|
|
}
|
|
|
|
return render_template("dashboard.html",
|
|
stats=stats,
|
|
resource_stats=resource_stats,
|
|
resource_warning=resource_warning,
|
|
username=session.get('username'))
|
|
|
|
@app.route("/create", methods=["GET", "POST"])
|
|
@login_required
|
|
def create_license():
|
|
if request.method == "POST":
|
|
customer_id = request.form.get("customer_id")
|
|
license_key = request.form["license_key"].upper() # Immer Großbuchstaben
|
|
license_type = request.form["license_type"]
|
|
valid_from = request.form["valid_from"]
|
|
is_test = request.form.get("is_test") == "on" # Checkbox value
|
|
|
|
# Berechne valid_until basierend auf Laufzeit
|
|
duration = int(request.form.get("duration", 1))
|
|
duration_type = request.form.get("duration_type", "years")
|
|
|
|
from datetime import datetime, timedelta
|
|
from dateutil.relativedelta import relativedelta
|
|
|
|
start_date = datetime.strptime(valid_from, "%Y-%m-%d")
|
|
|
|
if duration_type == "days":
|
|
end_date = start_date + timedelta(days=duration)
|
|
elif duration_type == "months":
|
|
end_date = start_date + relativedelta(months=duration)
|
|
else: # years
|
|
end_date = start_date + relativedelta(years=duration)
|
|
|
|
# Ein Tag abziehen, da der Starttag mitgezählt wird
|
|
end_date = end_date - timedelta(days=1)
|
|
valid_until = end_date.strftime("%Y-%m-%d")
|
|
|
|
# Validiere License Key Format
|
|
if not validate_license_key(license_key):
|
|
flash('Ungültiges License Key Format! Erwartet: AF-YYYYMMFT-XXXX-YYYY-ZZZZ', 'error')
|
|
return redirect(url_for('create_license'))
|
|
|
|
# Resource counts
|
|
domain_count = int(request.form.get("domain_count", 1))
|
|
ipv4_count = int(request.form.get("ipv4_count", 1))
|
|
phone_count = int(request.form.get("phone_count", 1))
|
|
device_limit = int(request.form.get("device_limit", 3))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Prüfe ob neuer Kunde oder bestehender
|
|
if customer_id == "new":
|
|
# Neuer Kunde
|
|
name = request.form.get("customer_name")
|
|
email = request.form.get("email")
|
|
|
|
if not name:
|
|
flash('Kundenname ist erforderlich!', 'error')
|
|
return redirect(url_for('create_license'))
|
|
|
|
# Prüfe ob E-Mail bereits existiert
|
|
if email:
|
|
cur.execute("SELECT id, name FROM customers WHERE LOWER(email) = LOWER(%s)", (email,))
|
|
existing = cur.fetchone()
|
|
if existing:
|
|
flash(f'E-Mail bereits vergeben für Kunde: {existing[1]}', 'error')
|
|
return redirect(url_for('create_license'))
|
|
|
|
# Kunde einfügen (erbt Test-Status von Lizenz)
|
|
cur.execute("""
|
|
INSERT INTO customers (name, email, is_test, created_at)
|
|
VALUES (%s, %s, %s, NOW())
|
|
RETURNING id
|
|
""", (name, email, is_test))
|
|
customer_id = cur.fetchone()[0]
|
|
customer_info = {'name': name, 'email': email, 'is_test': is_test}
|
|
|
|
# Audit-Log für neuen Kunden
|
|
log_audit('CREATE', 'customer', customer_id,
|
|
new_values={'name': name, 'email': email, 'is_test': is_test})
|
|
else:
|
|
# Bestehender Kunde - hole Infos für Audit-Log
|
|
cur.execute("SELECT name, email, is_test FROM customers WHERE id = %s", (customer_id,))
|
|
customer_data = cur.fetchone()
|
|
if not customer_data:
|
|
flash('Kunde nicht gefunden!', 'error')
|
|
return redirect(url_for('create_license'))
|
|
customer_info = {'name': customer_data[0], 'email': customer_data[1]}
|
|
|
|
# Wenn Kunde Test-Kunde ist, Lizenz auch als Test markieren
|
|
if customer_data[2]: # is_test des Kunden
|
|
is_test = True
|
|
|
|
# Lizenz hinzufügen
|
|
cur.execute("""
|
|
INSERT INTO licenses (license_key, customer_id, license_type, valid_from, valid_until, is_active,
|
|
domain_count, ipv4_count, phone_count, device_limit, is_test)
|
|
VALUES (%s, %s, %s, %s, %s, TRUE, %s, %s, %s, %s, %s)
|
|
RETURNING id
|
|
""", (license_key, customer_id, license_type, valid_from, valid_until,
|
|
domain_count, ipv4_count, phone_count, device_limit, is_test))
|
|
license_id = cur.fetchone()[0]
|
|
|
|
# Ressourcen zuweisen
|
|
try:
|
|
# Prüfe Verfügbarkeit
|
|
cur.execute("""
|
|
SELECT
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'domain' AND status = 'available' AND is_test = %s) as domains,
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'ipv4' AND status = 'available' AND is_test = %s) as ipv4s,
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'phone' AND status = 'available' AND is_test = %s) as phones
|
|
""", (is_test, is_test, is_test))
|
|
available = cur.fetchone()
|
|
|
|
if available[0] < domain_count:
|
|
raise ValueError(f"Nicht genügend Domains verfügbar (benötigt: {domain_count}, verfügbar: {available[0]})")
|
|
if available[1] < ipv4_count:
|
|
raise ValueError(f"Nicht genügend IPv4-Adressen verfügbar (benötigt: {ipv4_count}, verfügbar: {available[1]})")
|
|
if available[2] < phone_count:
|
|
raise ValueError(f"Nicht genügend Telefonnummern verfügbar (benötigt: {phone_count}, verfügbar: {available[2]})")
|
|
|
|
# Domains zuweisen
|
|
if domain_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'domain' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, domain_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
# IPv4s zuweisen
|
|
if ipv4_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'ipv4' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, ipv4_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
# Telefonnummern zuweisen
|
|
if phone_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'phone' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, phone_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
except ValueError as e:
|
|
conn.rollback()
|
|
flash(str(e), 'error')
|
|
return redirect(url_for('create_license'))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('CREATE', 'license', license_id,
|
|
new_values={
|
|
'license_key': license_key,
|
|
'customer_name': customer_info['name'],
|
|
'customer_email': customer_info['email'],
|
|
'license_type': license_type,
|
|
'valid_from': valid_from,
|
|
'valid_until': valid_until,
|
|
'device_limit': device_limit,
|
|
'is_test': is_test
|
|
})
|
|
|
|
flash(f'Lizenz {license_key} erfolgreich erstellt!', 'success')
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
logging.error(f"Fehler beim Erstellen der Lizenz: {str(e)}")
|
|
flash('Fehler beim Erstellen der Lizenz!', 'error')
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Preserve show_test parameter if present
|
|
redirect_url = "/create"
|
|
if request.args.get('show_test') == 'true':
|
|
redirect_url += "?show_test=true"
|
|
return redirect(redirect_url)
|
|
|
|
# Unterstützung für vorausgewählten Kunden
|
|
preselected_customer_id = request.args.get('customer_id', type=int)
|
|
return render_template("index.html", username=session.get('username'), preselected_customer_id=preselected_customer_id)
|
|
|
|
@app.route("/batch", methods=["GET", "POST"])
|
|
@login_required
|
|
def batch_licenses():
|
|
"""Batch-Generierung mehrerer Lizenzen für einen Kunden"""
|
|
if request.method == "POST":
|
|
# Formulardaten
|
|
customer_id = request.form.get("customer_id")
|
|
license_type = request.form["license_type"]
|
|
quantity = int(request.form["quantity"])
|
|
valid_from = request.form["valid_from"]
|
|
is_test = request.form.get("is_test") == "on" # Checkbox value
|
|
|
|
# Berechne valid_until basierend auf Laufzeit
|
|
duration = int(request.form.get("duration", 1))
|
|
duration_type = request.form.get("duration_type", "years")
|
|
|
|
from datetime import datetime, timedelta
|
|
from dateutil.relativedelta import relativedelta
|
|
|
|
start_date = datetime.strptime(valid_from, "%Y-%m-%d")
|
|
|
|
if duration_type == "days":
|
|
end_date = start_date + timedelta(days=duration)
|
|
elif duration_type == "months":
|
|
end_date = start_date + relativedelta(months=duration)
|
|
else: # years
|
|
end_date = start_date + relativedelta(years=duration)
|
|
|
|
# Ein Tag abziehen, da der Starttag mitgezählt wird
|
|
end_date = end_date - timedelta(days=1)
|
|
valid_until = end_date.strftime("%Y-%m-%d")
|
|
|
|
# Resource counts
|
|
domain_count = int(request.form.get("domain_count", 1))
|
|
ipv4_count = int(request.form.get("ipv4_count", 1))
|
|
phone_count = int(request.form.get("phone_count", 1))
|
|
device_limit = int(request.form.get("device_limit", 3))
|
|
|
|
# Sicherheitslimit
|
|
if quantity < 1 or quantity > 100:
|
|
flash('Anzahl muss zwischen 1 und 100 liegen!', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Prüfe ob neuer Kunde oder bestehender
|
|
if customer_id == "new":
|
|
# Neuer Kunde
|
|
name = request.form.get("customer_name")
|
|
email = request.form.get("email")
|
|
|
|
if not name:
|
|
flash('Kundenname ist erforderlich!', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
|
|
# Prüfe ob E-Mail bereits existiert
|
|
if email:
|
|
cur.execute("SELECT id, name FROM customers WHERE LOWER(email) = LOWER(%s)", (email,))
|
|
existing = cur.fetchone()
|
|
if existing:
|
|
flash(f'E-Mail bereits vergeben für Kunde: {existing[1]}', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
|
|
# Kunde einfügen (erbt Test-Status von Lizenz)
|
|
cur.execute("""
|
|
INSERT INTO customers (name, email, is_test, created_at)
|
|
VALUES (%s, %s, %s, NOW())
|
|
RETURNING id
|
|
""", (name, email, is_test))
|
|
customer_id = cur.fetchone()[0]
|
|
|
|
# Audit-Log für neuen Kunden
|
|
log_audit('CREATE', 'customer', customer_id,
|
|
new_values={'name': name, 'email': email, 'is_test': is_test})
|
|
else:
|
|
# Bestehender Kunde - hole Infos
|
|
cur.execute("SELECT name, email, is_test FROM customers WHERE id = %s", (customer_id,))
|
|
customer_data = cur.fetchone()
|
|
if not customer_data:
|
|
flash('Kunde nicht gefunden!', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
name = customer_data[0]
|
|
email = customer_data[1]
|
|
|
|
# Wenn Kunde Test-Kunde ist, Lizenzen auch als Test markieren
|
|
if customer_data[2]: # is_test des Kunden
|
|
is_test = True
|
|
|
|
# Prüfe Ressourcen-Verfügbarkeit für gesamten Batch
|
|
total_domains_needed = domain_count * quantity
|
|
total_ipv4s_needed = ipv4_count * quantity
|
|
total_phones_needed = phone_count * quantity
|
|
|
|
cur.execute("""
|
|
SELECT
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'domain' AND status = 'available' AND is_test = %s) as domains,
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'ipv4' AND status = 'available' AND is_test = %s) as ipv4s,
|
|
(SELECT COUNT(*) FROM resource_pools WHERE resource_type = 'phone' AND status = 'available' AND is_test = %s) as phones
|
|
""", (is_test, is_test, is_test))
|
|
available = cur.fetchone()
|
|
|
|
if available[0] < total_domains_needed:
|
|
flash(f"Nicht genügend Domains verfügbar (benötigt: {total_domains_needed}, verfügbar: {available[0]})", 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
if available[1] < total_ipv4s_needed:
|
|
flash(f"Nicht genügend IPv4-Adressen verfügbar (benötigt: {total_ipv4s_needed}, verfügbar: {available[1]})", 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
if available[2] < total_phones_needed:
|
|
flash(f"Nicht genügend Telefonnummern verfügbar (benötigt: {total_phones_needed}, verfügbar: {available[2]})", 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
|
|
# Lizenzen generieren und speichern
|
|
generated_licenses = []
|
|
for i in range(quantity):
|
|
# Eindeutigen Key generieren
|
|
attempts = 0
|
|
while attempts < 10:
|
|
license_key = generate_license_key(license_type)
|
|
cur.execute("SELECT 1 FROM licenses WHERE license_key = %s", (license_key,))
|
|
if not cur.fetchone():
|
|
break
|
|
attempts += 1
|
|
|
|
# Lizenz einfügen
|
|
cur.execute("""
|
|
INSERT INTO licenses (license_key, customer_id, license_type, is_test,
|
|
valid_from, valid_until, is_active,
|
|
domain_count, ipv4_count, phone_count, device_limit)
|
|
VALUES (%s, %s, %s, %s, %s, %s, true, %s, %s, %s, %s)
|
|
RETURNING id
|
|
""", (license_key, customer_id, license_type, is_test, valid_from, valid_until,
|
|
domain_count, ipv4_count, phone_count, device_limit))
|
|
license_id = cur.fetchone()[0]
|
|
|
|
# Ressourcen für diese Lizenz zuweisen
|
|
# Domains
|
|
if domain_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'domain' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, domain_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
# IPv4s
|
|
if ipv4_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'ipv4' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, ipv4_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
# Telefonnummern
|
|
if phone_count > 0:
|
|
cur.execute("""
|
|
SELECT id FROM resource_pools
|
|
WHERE resource_type = 'phone' AND status = 'available' AND is_test = %s
|
|
LIMIT %s FOR UPDATE
|
|
""", (is_test, phone_count))
|
|
for (resource_id,) in cur.fetchall():
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated', allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP, status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], resource_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, resource_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (resource_id, license_id, session['username'], get_client_ip()))
|
|
|
|
generated_licenses.append({
|
|
'id': license_id,
|
|
'key': license_key,
|
|
'type': license_type
|
|
})
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('CREATE_BATCH', 'license',
|
|
new_values={'customer': name, 'quantity': quantity, 'type': license_type},
|
|
additional_info=f"Batch-Generierung von {quantity} Lizenzen")
|
|
|
|
# Session für Export speichern
|
|
session['batch_export'] = {
|
|
'customer': name,
|
|
'email': email,
|
|
'licenses': generated_licenses,
|
|
'valid_from': valid_from,
|
|
'valid_until': valid_until,
|
|
'timestamp': datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None).isoformat()
|
|
}
|
|
|
|
flash(f'{quantity} Lizenzen erfolgreich generiert!', 'success')
|
|
return render_template("batch_result.html",
|
|
customer=name,
|
|
email=email,
|
|
licenses=generated_licenses,
|
|
valid_from=valid_from,
|
|
valid_until=valid_until)
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
logging.error(f"Fehler bei Batch-Generierung: {str(e)}")
|
|
flash('Fehler bei der Batch-Generierung!', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# GET Request
|
|
return render_template("batch_form.html")
|
|
|
|
@app.route("/batch/export")
|
|
@login_required
|
|
def export_batch():
|
|
"""Exportiert die zuletzt generierten Batch-Lizenzen"""
|
|
batch_data = session.get('batch_export')
|
|
if not batch_data:
|
|
flash('Keine Batch-Daten zum Exportieren vorhanden!', 'error')
|
|
return redirect(url_for('batch_licenses'))
|
|
|
|
# CSV generieren
|
|
output = io.StringIO()
|
|
output.write('\ufeff') # UTF-8 BOM für Excel
|
|
|
|
# Header
|
|
output.write(f"Kunde: {batch_data['customer']}\n")
|
|
output.write(f"E-Mail: {batch_data['email']}\n")
|
|
output.write(f"Generiert am: {datetime.fromisoformat(batch_data['timestamp']).strftime('%d.%m.%Y %H:%M')}\n")
|
|
output.write(f"Gültig von: {batch_data['valid_from']} bis {batch_data['valid_until']}\n")
|
|
output.write("\n")
|
|
output.write("Nr;Lizenzschlüssel;Typ\n")
|
|
|
|
# Lizenzen
|
|
for i, license in enumerate(batch_data['licenses'], 1):
|
|
typ_text = "Vollversion" if license['type'] == 'full' else "Testversion"
|
|
output.write(f"{i};{license['key']};{typ_text}\n")
|
|
|
|
output.seek(0)
|
|
|
|
# Audit-Log
|
|
log_audit('EXPORT', 'batch_licenses',
|
|
additional_info=f"Export von {len(batch_data['licenses'])} Batch-Lizenzen")
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8-sig')),
|
|
mimetype='text/csv',
|
|
as_attachment=True,
|
|
download_name=f"batch_licenses_{batch_data['customer'].replace(' ', '_')}_{datetime.now(ZoneInfo('Europe/Berlin')).strftime('%Y%m%d_%H%M%S')}.csv"
|
|
)
|
|
|
|
@app.route("/licenses")
|
|
@login_required
|
|
def licenses():
|
|
# Redirect zur kombinierten Ansicht
|
|
return redirect("/customers-licenses")
|
|
|
|
@app.route("/license/edit/<int:license_id>", methods=["GET", "POST"])
|
|
@login_required
|
|
def edit_license(license_id):
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
if request.method == "POST":
|
|
# Alte Werte für Audit-Log abrufen
|
|
cur.execute("""
|
|
SELECT license_key, license_type, valid_from, valid_until, is_active, is_test, device_limit
|
|
FROM licenses WHERE id = %s
|
|
""", (license_id,))
|
|
old_license = cur.fetchone()
|
|
|
|
# Update license
|
|
license_key = request.form["license_key"]
|
|
license_type = request.form["license_type"]
|
|
valid_from = request.form["valid_from"]
|
|
valid_until = request.form["valid_until"]
|
|
is_active = request.form.get("is_active") == "on"
|
|
is_test = request.form.get("is_test") == "on"
|
|
device_limit = int(request.form.get("device_limit", 3))
|
|
|
|
cur.execute("""
|
|
UPDATE licenses
|
|
SET license_key = %s, license_type = %s, valid_from = %s,
|
|
valid_until = %s, is_active = %s, is_test = %s, device_limit = %s
|
|
WHERE id = %s
|
|
""", (license_key, license_type, valid_from, valid_until, is_active, is_test, device_limit, license_id))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('UPDATE', 'license', license_id,
|
|
old_values={
|
|
'license_key': old_license[0],
|
|
'license_type': old_license[1],
|
|
'valid_from': str(old_license[2]),
|
|
'valid_until': str(old_license[3]),
|
|
'is_active': old_license[4],
|
|
'is_test': old_license[5],
|
|
'device_limit': old_license[6]
|
|
},
|
|
new_values={
|
|
'license_key': license_key,
|
|
'license_type': license_type,
|
|
'valid_from': valid_from,
|
|
'valid_until': valid_until,
|
|
'is_active': is_active,
|
|
'is_test': is_test,
|
|
'device_limit': device_limit
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Redirect zurück zu customers-licenses mit beibehaltenen Parametern
|
|
redirect_url = "/customers-licenses"
|
|
|
|
# Behalte show_test Parameter bei (aus Form oder GET-Parameter)
|
|
show_test = request.form.get('show_test') or request.args.get('show_test')
|
|
if show_test == 'true':
|
|
redirect_url += "?show_test=true"
|
|
|
|
# Behalte customer_id bei wenn vorhanden
|
|
if request.referrer and 'customer_id=' in request.referrer:
|
|
import re
|
|
match = re.search(r'customer_id=(\d+)', request.referrer)
|
|
if match:
|
|
connector = "&" if "?" in redirect_url else "?"
|
|
redirect_url += f"{connector}customer_id={match.group(1)}"
|
|
|
|
return redirect(redirect_url)
|
|
|
|
# Get license data
|
|
cur.execute("""
|
|
SELECT l.id, l.license_key, c.name, c.email, l.license_type,
|
|
l.valid_from, l.valid_until, l.is_active, c.id, l.is_test, l.device_limit
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE l.id = %s
|
|
""", (license_id,))
|
|
|
|
license = cur.fetchone()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
if not license:
|
|
return redirect("/licenses")
|
|
|
|
return render_template("edit_license.html", license=license, username=session.get('username'))
|
|
|
|
@app.route("/license/delete/<int:license_id>", methods=["POST"])
|
|
@login_required
|
|
def delete_license(license_id):
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Lizenzdetails für Audit-Log abrufen
|
|
cur.execute("""
|
|
SELECT l.license_key, c.name, l.license_type
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE l.id = %s
|
|
""", (license_id,))
|
|
license_info = cur.fetchone()
|
|
|
|
cur.execute("DELETE FROM licenses WHERE id = %s", (license_id,))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
if license_info:
|
|
log_audit('DELETE', 'license', license_id,
|
|
old_values={
|
|
'license_key': license_info[0],
|
|
'customer_name': license_info[1],
|
|
'license_type': license_info[2]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return redirect("/licenses")
|
|
|
|
@app.route("/customers")
|
|
@login_required
|
|
def customers():
|
|
# Redirect zur kombinierten Ansicht
|
|
return redirect("/customers-licenses")
|
|
|
|
@app.route("/customer/edit/<int:customer_id>", methods=["GET", "POST"])
|
|
@login_required
|
|
def edit_customer(customer_id):
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
if request.method == "POST":
|
|
# Alte Werte für Audit-Log abrufen
|
|
cur.execute("SELECT name, email, is_test FROM customers WHERE id = %s", (customer_id,))
|
|
old_customer = cur.fetchone()
|
|
|
|
# Update customer
|
|
name = request.form["name"]
|
|
email = request.form["email"]
|
|
is_test = request.form.get("is_test") == "on"
|
|
|
|
cur.execute("""
|
|
UPDATE customers
|
|
SET name = %s, email = %s, is_test = %s
|
|
WHERE id = %s
|
|
""", (name, email, is_test, customer_id))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('UPDATE', 'customer', customer_id,
|
|
old_values={
|
|
'name': old_customer[0],
|
|
'email': old_customer[1],
|
|
'is_test': old_customer[2]
|
|
},
|
|
new_values={
|
|
'name': name,
|
|
'email': email,
|
|
'is_test': is_test
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Redirect zurück zu customers-licenses mit beibehaltenen Parametern
|
|
redirect_url = "/customers-licenses"
|
|
|
|
# Behalte show_test Parameter bei (aus Form oder GET-Parameter)
|
|
show_test = request.form.get('show_test') or request.args.get('show_test')
|
|
if show_test == 'true':
|
|
redirect_url += "?show_test=true"
|
|
|
|
# Behalte customer_id bei (immer der aktuelle Kunde)
|
|
connector = "&" if "?" in redirect_url else "?"
|
|
redirect_url += f"{connector}customer_id={customer_id}"
|
|
|
|
return redirect(redirect_url)
|
|
|
|
# Get customer data with licenses
|
|
cur.execute("""
|
|
SELECT id, name, email, created_at, is_test FROM customers WHERE id = %s
|
|
""", (customer_id,))
|
|
|
|
customer = cur.fetchone()
|
|
if not customer:
|
|
cur.close()
|
|
conn.close()
|
|
return "Kunde nicht gefunden", 404
|
|
|
|
|
|
# Get customer's licenses
|
|
cur.execute("""
|
|
SELECT id, license_key, license_type, valid_from, valid_until, is_active
|
|
FROM licenses
|
|
WHERE customer_id = %s
|
|
ORDER BY valid_until DESC
|
|
""", (customer_id,))
|
|
|
|
licenses = cur.fetchall()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
if not customer:
|
|
return redirect("/customers-licenses")
|
|
|
|
return render_template("edit_customer.html", customer=customer, licenses=licenses, username=session.get('username'))
|
|
|
|
@app.route("/customer/create", methods=["GET", "POST"])
|
|
@login_required
|
|
def create_customer():
|
|
"""Erstellt einen neuen Kunden ohne Lizenz"""
|
|
if request.method == "POST":
|
|
name = request.form.get('name')
|
|
email = request.form.get('email')
|
|
is_test = request.form.get('is_test') == 'on'
|
|
|
|
if not name or not email:
|
|
flash("Name und E-Mail sind Pflichtfelder!", "error")
|
|
return render_template("create_customer.html", username=session.get('username'))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Prüfen ob E-Mail bereits existiert
|
|
cur.execute("SELECT id, name FROM customers WHERE email = %s", (email,))
|
|
existing = cur.fetchone()
|
|
if existing:
|
|
flash(f"Ein Kunde mit der E-Mail '{email}' existiert bereits: {existing[1]}", "error")
|
|
return render_template("create_customer.html", username=session.get('username'))
|
|
|
|
# Kunde erstellen
|
|
cur.execute("""
|
|
INSERT INTO customers (name, email, created_at, is_test)
|
|
VALUES (%s, %s, %s, %s) RETURNING id
|
|
""", (name, email, datetime.now(), is_test))
|
|
|
|
customer_id = cur.fetchone()[0]
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('CREATE', 'customer', customer_id,
|
|
new_values={
|
|
'name': name,
|
|
'email': email,
|
|
'is_test': is_test
|
|
})
|
|
|
|
flash(f"Kunde '{name}' wurde erfolgreich angelegt!", "success")
|
|
return redirect(f"/customer/edit/{customer_id}")
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
flash(f"Fehler beim Anlegen des Kunden: {str(e)}", "error")
|
|
return render_template("create_customer.html", username=session.get('username'))
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# GET Request - Formular anzeigen
|
|
return render_template("create_customer.html", username=session.get('username'))
|
|
|
|
@app.route("/customer/delete/<int:customer_id>", methods=["POST"])
|
|
@login_required
|
|
def delete_customer(customer_id):
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Prüfen ob Kunde Lizenzen hat
|
|
cur.execute("SELECT COUNT(*) FROM licenses WHERE customer_id = %s", (customer_id,))
|
|
license_count = cur.fetchone()[0]
|
|
|
|
if license_count > 0:
|
|
# Kunde hat Lizenzen - nicht löschen
|
|
cur.close()
|
|
conn.close()
|
|
return redirect("/customers")
|
|
|
|
# Kundendetails für Audit-Log abrufen
|
|
cur.execute("SELECT name, email FROM customers WHERE id = %s", (customer_id,))
|
|
customer_info = cur.fetchone()
|
|
|
|
# Kunde löschen wenn keine Lizenzen vorhanden
|
|
cur.execute("DELETE FROM customers WHERE id = %s", (customer_id,))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
if customer_info:
|
|
log_audit('DELETE', 'customer', customer_id,
|
|
old_values={
|
|
'name': customer_info[0],
|
|
'email': customer_info[1]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return redirect("/customers")
|
|
|
|
@app.route("/customers-licenses")
|
|
@login_required
|
|
def customers_licenses():
|
|
"""Kombinierte Ansicht für Kunden und deren Lizenzen"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Hole alle Kunden mit Lizenzstatistiken (inkl. Testkunden wenn gewünscht)
|
|
show_test = request.args.get('show_test', 'false').lower() == 'true'
|
|
|
|
query = """
|
|
SELECT
|
|
c.id,
|
|
c.name,
|
|
c.email,
|
|
c.created_at,
|
|
COUNT(l.id) as total_licenses,
|
|
COUNT(CASE WHEN l.is_active = TRUE AND l.valid_until >= CURRENT_DATE THEN 1 END) as active_licenses,
|
|
COUNT(CASE WHEN l.valid_until < CURRENT_DATE THEN 1 END) as expired_licenses
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
"""
|
|
|
|
if not show_test:
|
|
query += " WHERE c.is_test = FALSE"
|
|
|
|
query += """
|
|
GROUP BY c.id, c.name, c.email, c.created_at
|
|
ORDER BY c.name
|
|
"""
|
|
|
|
cur.execute(query)
|
|
customers = cur.fetchall()
|
|
|
|
# Hole ausgewählten Kunden nur wenn explizit in URL angegeben
|
|
selected_customer_id = request.args.get('customer_id', type=int)
|
|
licenses = []
|
|
selected_customer = None
|
|
|
|
if customers and selected_customer_id:
|
|
# Hole Daten des ausgewählten Kunden
|
|
for customer in customers:
|
|
if customer[0] == selected_customer_id:
|
|
selected_customer = customer
|
|
break
|
|
|
|
# Hole Lizenzen des ausgewählten Kunden
|
|
if selected_customer:
|
|
cur.execute("""
|
|
SELECT
|
|
l.id,
|
|
l.license_key,
|
|
l.license_type,
|
|
l.valid_from,
|
|
l.valid_until,
|
|
l.is_active,
|
|
CASE
|
|
WHEN l.is_active = FALSE THEN 'deaktiviert'
|
|
WHEN l.valid_until < CURRENT_DATE THEN 'abgelaufen'
|
|
WHEN l.valid_until < CURRENT_DATE + INTERVAL '30 days' THEN 'läuft bald ab'
|
|
ELSE 'aktiv'
|
|
END as status,
|
|
l.domain_count,
|
|
l.ipv4_count,
|
|
l.phone_count,
|
|
l.device_limit,
|
|
(SELECT COUNT(*) FROM device_registrations WHERE license_id = l.id AND is_active = TRUE) as active_devices,
|
|
-- Actual resource counts
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'domain') as actual_domain_count,
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'ipv4') as actual_ipv4_count,
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'phone') as actual_phone_count
|
|
FROM licenses l
|
|
WHERE l.customer_id = %s
|
|
ORDER BY l.created_at DESC, l.id DESC
|
|
""", (selected_customer_id,))
|
|
licenses = cur.fetchall()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template("customers_licenses.html",
|
|
customers=customers,
|
|
selected_customer=selected_customer,
|
|
selected_customer_id=selected_customer_id,
|
|
licenses=licenses,
|
|
show_test=show_test)
|
|
|
|
@app.route("/api/customer/<int:customer_id>/licenses")
|
|
@login_required
|
|
def api_customer_licenses(customer_id):
|
|
"""API-Endpoint für AJAX-Abruf der Lizenzen eines Kunden"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Hole Lizenzen des Kunden
|
|
cur.execute("""
|
|
SELECT
|
|
l.id,
|
|
l.license_key,
|
|
l.license_type,
|
|
l.valid_from,
|
|
l.valid_until,
|
|
l.is_active,
|
|
CASE
|
|
WHEN l.is_active = FALSE THEN 'deaktiviert'
|
|
WHEN l.valid_until < CURRENT_DATE THEN 'abgelaufen'
|
|
WHEN l.valid_until < CURRENT_DATE + INTERVAL '30 days' THEN 'läuft bald ab'
|
|
ELSE 'aktiv'
|
|
END as status,
|
|
l.domain_count,
|
|
l.ipv4_count,
|
|
l.phone_count,
|
|
l.device_limit,
|
|
(SELECT COUNT(*) FROM device_registrations WHERE license_id = l.id AND is_active = TRUE) as active_devices,
|
|
-- Actual resource counts
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'domain') as actual_domain_count,
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'ipv4') as actual_ipv4_count,
|
|
(SELECT COUNT(*) FROM license_resources lr
|
|
JOIN resource_pools rp ON lr.resource_id = rp.id
|
|
WHERE lr.license_id = l.id AND lr.is_active = true AND rp.resource_type = 'phone') as actual_phone_count
|
|
FROM licenses l
|
|
WHERE l.customer_id = %s
|
|
ORDER BY l.created_at DESC, l.id DESC
|
|
""", (customer_id,))
|
|
|
|
licenses = []
|
|
for row in cur.fetchall():
|
|
license_id = row[0]
|
|
|
|
# Hole die konkreten zugewiesenen Ressourcen für diese Lizenz
|
|
cur.execute("""
|
|
SELECT rp.id, rp.resource_type, rp.resource_value, lr.assigned_at
|
|
FROM resource_pools rp
|
|
JOIN license_resources lr ON rp.id = lr.resource_id
|
|
WHERE lr.license_id = %s AND lr.is_active = true
|
|
ORDER BY rp.resource_type, rp.resource_value
|
|
""", (license_id,))
|
|
|
|
resources = {
|
|
'domains': [],
|
|
'ipv4s': [],
|
|
'phones': []
|
|
}
|
|
|
|
for res_row in cur.fetchall():
|
|
resource_info = {
|
|
'id': res_row[0],
|
|
'value': res_row[2],
|
|
'assigned_at': res_row[3].strftime('%d.%m.%Y') if res_row[3] else ''
|
|
}
|
|
|
|
if res_row[1] == 'domain':
|
|
resources['domains'].append(resource_info)
|
|
elif res_row[1] == 'ipv4':
|
|
resources['ipv4s'].append(resource_info)
|
|
elif res_row[1] == 'phone':
|
|
resources['phones'].append(resource_info)
|
|
|
|
licenses.append({
|
|
'id': row[0],
|
|
'license_key': row[1],
|
|
'license_type': row[2],
|
|
'valid_from': row[3].strftime('%d.%m.%Y') if row[3] else '',
|
|
'valid_until': row[4].strftime('%d.%m.%Y') if row[4] else '',
|
|
'is_active': row[5],
|
|
'status': row[6],
|
|
'domain_count': row[7], # limit
|
|
'ipv4_count': row[8], # limit
|
|
'phone_count': row[9], # limit
|
|
'device_limit': row[10],
|
|
'active_devices': row[11],
|
|
'actual_domain_count': row[12], # actual count
|
|
'actual_ipv4_count': row[13], # actual count
|
|
'actual_phone_count': row[14], # actual count
|
|
'resources': resources
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'licenses': licenses,
|
|
'count': len(licenses)
|
|
})
|
|
|
|
@app.route("/api/customer/<int:customer_id>/quick-stats")
|
|
@login_required
|
|
def api_customer_quick_stats(customer_id):
|
|
"""API-Endpoint für Schnellstatistiken eines Kunden"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Hole Kundenstatistiken
|
|
cur.execute("""
|
|
SELECT
|
|
COUNT(l.id) as total_licenses,
|
|
COUNT(CASE WHEN l.is_active = TRUE AND l.valid_until >= CURRENT_DATE THEN 1 END) as active_licenses,
|
|
COUNT(CASE WHEN l.valid_until < CURRENT_DATE THEN 1 END) as expired_licenses,
|
|
COUNT(CASE WHEN l.valid_until < CURRENT_DATE + INTERVAL '30 days' AND l.valid_until >= CURRENT_DATE THEN 1 END) as expiring_soon
|
|
FROM licenses l
|
|
WHERE l.customer_id = %s
|
|
""", (customer_id,))
|
|
|
|
stats = cur.fetchone()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'stats': {
|
|
'total': stats[0],
|
|
'active': stats[1],
|
|
'expired': stats[2],
|
|
'expiring_soon': stats[3]
|
|
}
|
|
})
|
|
|
|
@app.route("/api/license/<int:license_id>/quick-edit", methods=['POST'])
|
|
@login_required
|
|
def api_license_quick_edit(license_id):
|
|
"""API-Endpoint für schnelle Lizenz-Bearbeitung"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
data = request.get_json()
|
|
|
|
# Hole alte Werte für Audit-Log
|
|
cur.execute("""
|
|
SELECT is_active, valid_until, license_type
|
|
FROM licenses WHERE id = %s
|
|
""", (license_id,))
|
|
old_values = cur.fetchone()
|
|
|
|
if not old_values:
|
|
return jsonify({'success': False, 'error': 'Lizenz nicht gefunden'}), 404
|
|
|
|
# Update-Felder vorbereiten
|
|
updates = []
|
|
params = []
|
|
new_values = {}
|
|
|
|
if 'is_active' in data:
|
|
updates.append("is_active = %s")
|
|
params.append(data['is_active'])
|
|
new_values['is_active'] = data['is_active']
|
|
|
|
if 'valid_until' in data:
|
|
updates.append("valid_until = %s")
|
|
params.append(data['valid_until'])
|
|
new_values['valid_until'] = data['valid_until']
|
|
|
|
if 'license_type' in data:
|
|
updates.append("license_type = %s")
|
|
params.append(data['license_type'])
|
|
new_values['license_type'] = data['license_type']
|
|
|
|
if updates:
|
|
params.append(license_id)
|
|
cur.execute(f"""
|
|
UPDATE licenses
|
|
SET {', '.join(updates)}
|
|
WHERE id = %s
|
|
""", params)
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('UPDATE', 'license', license_id,
|
|
old_values={
|
|
'is_active': old_values[0],
|
|
'valid_until': old_values[1].isoformat() if old_values[1] else None,
|
|
'license_type': old_values[2]
|
|
},
|
|
new_values=new_values)
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True})
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
cur.close()
|
|
conn.close()
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
@app.route("/api/license/<int:license_id>/resources")
|
|
@login_required
|
|
def api_license_resources(license_id):
|
|
"""API-Endpoint für detaillierte Ressourcen-Informationen einer Lizenz"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Hole die konkreten zugewiesenen Ressourcen für diese Lizenz
|
|
cur.execute("""
|
|
SELECT rp.id, rp.resource_type, rp.resource_value, lr.assigned_at
|
|
FROM resource_pools rp
|
|
JOIN license_resources lr ON rp.id = lr.resource_id
|
|
WHERE lr.license_id = %s AND lr.is_active = true
|
|
ORDER BY rp.resource_type, rp.resource_value
|
|
""", (license_id,))
|
|
|
|
resources = {
|
|
'domains': [],
|
|
'ipv4s': [],
|
|
'phones': []
|
|
}
|
|
|
|
for row in cur.fetchall():
|
|
resource_info = {
|
|
'id': row[0],
|
|
'value': row[2],
|
|
'assigned_at': row[3].strftime('%d.%m.%Y') if row[3] else ''
|
|
}
|
|
|
|
if row[1] == 'domain':
|
|
resources['domains'].append(resource_info)
|
|
elif row[1] == 'ipv4':
|
|
resources['ipv4s'].append(resource_info)
|
|
elif row[1] == 'phone':
|
|
resources['phones'].append(resource_info)
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'resources': resources
|
|
})
|
|
|
|
except Exception as e:
|
|
cur.close()
|
|
conn.close()
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
@app.route("/sessions")
|
|
@login_required
|
|
def sessions():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Sortierparameter
|
|
active_sort = request.args.get('active_sort', 'last_heartbeat')
|
|
active_order = request.args.get('active_order', 'desc')
|
|
ended_sort = request.args.get('ended_sort', 'ended_at')
|
|
ended_order = request.args.get('ended_order', 'desc')
|
|
|
|
# Whitelist für erlaubte Sortierfelder - Aktive Sessions
|
|
active_sort_fields = {
|
|
'customer': 'c.name',
|
|
'license': 'l.license_key',
|
|
'ip': 's.ip_address',
|
|
'started': 's.started_at',
|
|
'last_heartbeat': 's.last_heartbeat',
|
|
'inactive': 'minutes_inactive'
|
|
}
|
|
|
|
# Whitelist für erlaubte Sortierfelder - Beendete Sessions
|
|
ended_sort_fields = {
|
|
'customer': 'c.name',
|
|
'license': 'l.license_key',
|
|
'ip': 's.ip_address',
|
|
'started': 's.started_at',
|
|
'ended_at': 's.ended_at',
|
|
'duration': 'duration_minutes'
|
|
}
|
|
|
|
# Validierung
|
|
if active_sort not in active_sort_fields:
|
|
active_sort = 'last_heartbeat'
|
|
if ended_sort not in ended_sort_fields:
|
|
ended_sort = 'ended_at'
|
|
if active_order not in ['asc', 'desc']:
|
|
active_order = 'desc'
|
|
if ended_order not in ['asc', 'desc']:
|
|
ended_order = 'desc'
|
|
|
|
# Aktive Sessions abrufen
|
|
cur.execute(f"""
|
|
SELECT s.id, s.session_id, l.license_key, c.name, s.ip_address,
|
|
s.user_agent, s.started_at, s.last_heartbeat,
|
|
EXTRACT(EPOCH FROM (NOW() - s.last_heartbeat))/60 as minutes_inactive
|
|
FROM sessions s
|
|
JOIN licenses l ON s.license_id = l.id
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE s.is_active = TRUE
|
|
ORDER BY {active_sort_fields[active_sort]} {active_order.upper()}
|
|
""")
|
|
active_sessions = cur.fetchall()
|
|
|
|
# Inaktive Sessions der letzten 24 Stunden
|
|
cur.execute(f"""
|
|
SELECT s.id, s.session_id, l.license_key, c.name, s.ip_address,
|
|
s.started_at, s.ended_at,
|
|
EXTRACT(EPOCH FROM (s.ended_at - s.started_at))/60 as duration_minutes
|
|
FROM sessions s
|
|
JOIN licenses l ON s.license_id = l.id
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE s.is_active = FALSE
|
|
AND s.ended_at > NOW() - INTERVAL '24 hours'
|
|
ORDER BY {ended_sort_fields[ended_sort]} {ended_order.upper()}
|
|
LIMIT 50
|
|
""")
|
|
recent_sessions = cur.fetchall()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template("sessions.html",
|
|
active_sessions=active_sessions,
|
|
recent_sessions=recent_sessions,
|
|
active_sort=active_sort,
|
|
active_order=active_order,
|
|
ended_sort=ended_sort,
|
|
ended_order=ended_order,
|
|
username=session.get('username'))
|
|
|
|
@app.route("/session/end/<int:session_id>", methods=["POST"])
|
|
@login_required
|
|
def end_session(session_id):
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Session beenden
|
|
cur.execute("""
|
|
UPDATE sessions
|
|
SET is_active = FALSE, ended_at = NOW()
|
|
WHERE id = %s AND is_active = TRUE
|
|
""", (session_id,))
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return redirect("/sessions")
|
|
|
|
@app.route("/export/licenses")
|
|
@login_required
|
|
def export_licenses():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Alle Lizenzen mit Kundeninformationen abrufen (ohne Testdaten, außer explizit gewünscht)
|
|
include_test = request.args.get('include_test', 'false').lower() == 'true'
|
|
customer_id = request.args.get('customer_id', type=int)
|
|
|
|
query = """
|
|
SELECT l.id, l.license_key, c.name as customer_name, c.email as customer_email,
|
|
l.license_type, l.valid_from, l.valid_until, l.is_active, l.is_test,
|
|
CASE
|
|
WHEN l.is_active = FALSE THEN 'Deaktiviert'
|
|
WHEN l.valid_until < CURRENT_DATE THEN 'Abgelaufen'
|
|
WHEN l.valid_until < CURRENT_DATE + INTERVAL '30 days' THEN 'Läuft bald ab'
|
|
ELSE 'Aktiv'
|
|
END as status
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
"""
|
|
|
|
# Build WHERE clause
|
|
where_conditions = []
|
|
params = []
|
|
|
|
if not include_test:
|
|
where_conditions.append("l.is_test = FALSE")
|
|
|
|
if customer_id:
|
|
where_conditions.append("l.customer_id = %s")
|
|
params.append(customer_id)
|
|
|
|
if where_conditions:
|
|
query += " WHERE " + " AND ".join(where_conditions)
|
|
|
|
query += " ORDER BY l.id"
|
|
|
|
cur.execute(query, params)
|
|
|
|
# Spaltennamen
|
|
columns = ['ID', 'Lizenzschlüssel', 'Kunde', 'E-Mail', 'Typ',
|
|
'Gültig von', 'Gültig bis', 'Aktiv', 'Testdaten', 'Status']
|
|
|
|
# Daten in DataFrame
|
|
data = cur.fetchall()
|
|
df = pd.DataFrame(data, columns=columns)
|
|
|
|
# Datumsformatierung
|
|
df['Gültig von'] = pd.to_datetime(df['Gültig von']).dt.strftime('%d.%m.%Y')
|
|
df['Gültig bis'] = pd.to_datetime(df['Gültig bis']).dt.strftime('%d.%m.%Y')
|
|
|
|
# Typ und Aktiv Status anpassen
|
|
df['Typ'] = df['Typ'].replace({'full': 'Vollversion', 'test': 'Testversion'})
|
|
df['Aktiv'] = df['Aktiv'].replace({True: 'Ja', False: 'Nein'})
|
|
df['Testdaten'] = df['Testdaten'].replace({True: 'Ja', False: 'Nein'})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Export Format
|
|
export_format = request.args.get('format', 'excel')
|
|
|
|
# Audit-Log
|
|
log_audit('EXPORT', 'license',
|
|
additional_info=f"Export aller Lizenzen als {export_format.upper()}")
|
|
filename = f'lizenzen_export_{datetime.now(ZoneInfo("Europe/Berlin")).strftime("%Y%m%d_%H%M%S")}'
|
|
|
|
if export_format == 'csv':
|
|
# CSV Export
|
|
output = io.StringIO()
|
|
df.to_csv(output, index=False, encoding='utf-8-sig', sep=';')
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8-sig')),
|
|
mimetype='text/csv',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv'
|
|
)
|
|
else:
|
|
# Excel Export
|
|
output = io.BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, sheet_name='Lizenzen', index=False)
|
|
|
|
# Formatierung
|
|
worksheet = writer.sheets['Lizenzen']
|
|
for column in worksheet.columns:
|
|
max_length = 0
|
|
column_letter = column[0].column_letter
|
|
for cell in column:
|
|
try:
|
|
if len(str(cell.value)) > max_length:
|
|
max_length = len(str(cell.value))
|
|
except:
|
|
pass
|
|
adjusted_width = min(max_length + 2, 50)
|
|
worksheet.column_dimensions[column_letter].width = adjusted_width
|
|
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx'
|
|
)
|
|
|
|
@app.route("/export/audit")
|
|
@login_required
|
|
def export_audit():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Holen der Filter-Parameter
|
|
filter_user = request.args.get('user', '')
|
|
filter_action = request.args.get('action', '')
|
|
filter_entity = request.args.get('entity', '')
|
|
export_format = request.args.get('format', 'excel')
|
|
|
|
# SQL Query mit Filtern
|
|
query = """
|
|
SELECT id, timestamp, username, action, entity_type, entity_id,
|
|
old_values, new_values, ip_address, user_agent, additional_info
|
|
FROM audit_log
|
|
WHERE 1=1
|
|
"""
|
|
params = []
|
|
|
|
if filter_user:
|
|
query += " AND username ILIKE %s"
|
|
params.append(f'%{filter_user}%')
|
|
|
|
if filter_action:
|
|
query += " AND action = %s"
|
|
params.append(filter_action)
|
|
|
|
if filter_entity:
|
|
query += " AND entity_type = %s"
|
|
params.append(filter_entity)
|
|
|
|
query += " ORDER BY timestamp DESC"
|
|
|
|
cur.execute(query, params)
|
|
audit_logs = cur.fetchall()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Daten für Export vorbereiten
|
|
data = []
|
|
for log in audit_logs:
|
|
action_text = {
|
|
'CREATE': 'Erstellt',
|
|
'UPDATE': 'Bearbeitet',
|
|
'DELETE': 'Gelöscht',
|
|
'LOGIN': 'Anmeldung',
|
|
'LOGOUT': 'Abmeldung',
|
|
'AUTO_LOGOUT': 'Auto-Logout',
|
|
'EXPORT': 'Export',
|
|
'GENERATE_KEY': 'Key generiert',
|
|
'CREATE_BATCH': 'Batch erstellt',
|
|
'BACKUP': 'Backup erstellt',
|
|
'LOGIN_2FA_SUCCESS': '2FA-Anmeldung',
|
|
'LOGIN_2FA_BACKUP': '2FA-Backup-Code',
|
|
'LOGIN_2FA_FAILED': '2FA-Fehlgeschlagen',
|
|
'LOGIN_BLOCKED': 'Login-Blockiert',
|
|
'RESTORE': 'Wiederhergestellt',
|
|
'PASSWORD_CHANGE': 'Passwort geändert',
|
|
'2FA_ENABLED': '2FA aktiviert',
|
|
'2FA_DISABLED': '2FA deaktiviert'
|
|
}.get(log[3], log[3])
|
|
|
|
data.append({
|
|
'ID': log[0],
|
|
'Zeitstempel': log[1].strftime('%d.%m.%Y %H:%M:%S'),
|
|
'Benutzer': log[2],
|
|
'Aktion': action_text,
|
|
'Entität': log[4],
|
|
'Entität-ID': log[5] or '',
|
|
'IP-Adresse': log[8] or '',
|
|
'Zusatzinfo': log[10] or ''
|
|
})
|
|
|
|
# DataFrame erstellen
|
|
df = pd.DataFrame(data)
|
|
|
|
# Timestamp für Dateiname
|
|
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
|
|
filename = f'audit_log_export_{timestamp}'
|
|
|
|
# Audit Log für Export
|
|
log_audit('EXPORT', 'audit_log',
|
|
additional_info=f"{export_format.upper()} Export mit {len(data)} Einträgen")
|
|
|
|
if export_format == 'csv':
|
|
# CSV Export
|
|
output = io.StringIO()
|
|
# UTF-8 BOM für Excel
|
|
output.write('\ufeff')
|
|
df.to_csv(output, index=False, sep=';', encoding='utf-8')
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8')),
|
|
mimetype='text/csv;charset=utf-8',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv'
|
|
)
|
|
else:
|
|
# Excel Export
|
|
output = BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, index=False, sheet_name='Audit Log')
|
|
|
|
# Spaltenbreiten anpassen
|
|
worksheet = writer.sheets['Audit Log']
|
|
for idx, col in enumerate(df.columns):
|
|
max_length = max(
|
|
df[col].astype(str).map(len).max(),
|
|
len(col)
|
|
) + 2
|
|
worksheet.column_dimensions[get_column_letter(idx + 1)].width = min(max_length, 50)
|
|
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx'
|
|
)
|
|
|
|
@app.route("/export/customers")
|
|
@login_required
|
|
def export_customers():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Check if test data should be included
|
|
include_test = request.args.get('include_test', 'false').lower() == 'true'
|
|
|
|
# Build query based on test data filter
|
|
if include_test:
|
|
# Include all customers
|
|
query = """
|
|
SELECT c.id, c.name, c.email, c.created_at, c.is_test,
|
|
COUNT(l.id) as total_licenses,
|
|
COUNT(CASE WHEN l.is_active = TRUE AND l.valid_until >= CURRENT_DATE THEN 1 END) as active_licenses,
|
|
COUNT(CASE WHEN l.valid_until < CURRENT_DATE THEN 1 END) as expired_licenses
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
GROUP BY c.id, c.name, c.email, c.created_at, c.is_test
|
|
ORDER BY c.id
|
|
"""
|
|
else:
|
|
# Exclude test customers and test licenses
|
|
query = """
|
|
SELECT c.id, c.name, c.email, c.created_at, c.is_test,
|
|
COUNT(CASE WHEN l.is_test = FALSE THEN 1 END) as total_licenses,
|
|
COUNT(CASE WHEN l.is_active = TRUE AND l.valid_until >= CURRENT_DATE AND l.is_test = FALSE THEN 1 END) as active_licenses,
|
|
COUNT(CASE WHEN l.valid_until < CURRENT_DATE AND l.is_test = FALSE THEN 1 END) as expired_licenses
|
|
FROM customers c
|
|
LEFT JOIN licenses l ON c.id = l.customer_id
|
|
WHERE c.is_test = FALSE
|
|
GROUP BY c.id, c.name, c.email, c.created_at, c.is_test
|
|
ORDER BY c.id
|
|
"""
|
|
|
|
cur.execute(query)
|
|
|
|
# Spaltennamen
|
|
columns = ['ID', 'Name', 'E-Mail', 'Erstellt am', 'Testdaten',
|
|
'Lizenzen gesamt', 'Aktive Lizenzen', 'Abgelaufene Lizenzen']
|
|
|
|
# Daten in DataFrame
|
|
data = cur.fetchall()
|
|
df = pd.DataFrame(data, columns=columns)
|
|
|
|
# Datumsformatierung
|
|
df['Erstellt am'] = pd.to_datetime(df['Erstellt am']).dt.strftime('%d.%m.%Y %H:%M')
|
|
|
|
# Testdaten formatting
|
|
df['Testdaten'] = df['Testdaten'].replace({True: 'Ja', False: 'Nein'})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Export Format
|
|
export_format = request.args.get('format', 'excel')
|
|
|
|
# Audit-Log
|
|
log_audit('EXPORT', 'customer',
|
|
additional_info=f"Export aller Kunden als {export_format.upper()}")
|
|
filename = f'kunden_export_{datetime.now(ZoneInfo("Europe/Berlin")).strftime("%Y%m%d_%H%M%S")}'
|
|
|
|
if export_format == 'csv':
|
|
# CSV Export
|
|
output = io.StringIO()
|
|
df.to_csv(output, index=False, encoding='utf-8-sig', sep=';')
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8-sig')),
|
|
mimetype='text/csv',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv'
|
|
)
|
|
else:
|
|
# Excel Export
|
|
output = io.BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, sheet_name='Kunden', index=False)
|
|
|
|
# Formatierung
|
|
worksheet = writer.sheets['Kunden']
|
|
for column in worksheet.columns:
|
|
max_length = 0
|
|
column_letter = column[0].column_letter
|
|
for cell in column:
|
|
try:
|
|
if len(str(cell.value)) > max_length:
|
|
max_length = len(str(cell.value))
|
|
except:
|
|
pass
|
|
adjusted_width = min(max_length + 2, 50)
|
|
worksheet.column_dimensions[column_letter].width = adjusted_width
|
|
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx'
|
|
)
|
|
|
|
@app.route("/export/sessions")
|
|
@login_required
|
|
def export_sessions():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Holen des Session-Typs (active oder ended)
|
|
session_type = request.args.get('type', 'active')
|
|
export_format = request.args.get('format', 'excel')
|
|
|
|
# Daten je nach Typ abrufen
|
|
if session_type == 'active':
|
|
# Aktive Lizenz-Sessions
|
|
cur.execute("""
|
|
SELECT s.id, l.license_key, c.name as customer_name, s.session_id,
|
|
s.started_at, s.last_heartbeat,
|
|
EXTRACT(EPOCH FROM (NOW() - s.started_at))::INT as duration_seconds,
|
|
s.ip_address, s.user_agent
|
|
FROM sessions s
|
|
JOIN licenses l ON s.license_id = l.id
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE s.is_active = true
|
|
ORDER BY s.last_heartbeat DESC
|
|
""")
|
|
sessions = cur.fetchall()
|
|
|
|
# Daten für Export vorbereiten
|
|
data = []
|
|
for sess in sessions:
|
|
duration = sess[6]
|
|
hours = duration // 3600
|
|
minutes = (duration % 3600) // 60
|
|
seconds = duration % 60
|
|
|
|
data.append({
|
|
'Session-ID': sess[0],
|
|
'Lizenzschlüssel': sess[1],
|
|
'Kunde': sess[2],
|
|
'Session-ID (Tech)': sess[3],
|
|
'Startzeit': sess[4].strftime('%d.%m.%Y %H:%M:%S'),
|
|
'Letzte Aktivität': sess[5].strftime('%d.%m.%Y %H:%M:%S'),
|
|
'Dauer': f"{hours}h {minutes}m {seconds}s",
|
|
'IP-Adresse': sess[7],
|
|
'Browser': sess[8]
|
|
})
|
|
|
|
sheet_name = 'Aktive Sessions'
|
|
filename_prefix = 'aktive_sessions'
|
|
else:
|
|
# Beendete Lizenz-Sessions
|
|
cur.execute("""
|
|
SELECT s.id, l.license_key, c.name as customer_name, s.session_id,
|
|
s.started_at, s.ended_at,
|
|
EXTRACT(EPOCH FROM (s.ended_at - s.started_at))::INT as duration_seconds,
|
|
s.ip_address, s.user_agent
|
|
FROM sessions s
|
|
JOIN licenses l ON s.license_id = l.id
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE s.is_active = false AND s.ended_at IS NOT NULL
|
|
ORDER BY s.ended_at DESC
|
|
LIMIT 1000
|
|
""")
|
|
sessions = cur.fetchall()
|
|
|
|
# Daten für Export vorbereiten
|
|
data = []
|
|
for sess in sessions:
|
|
duration = sess[6] if sess[6] else 0
|
|
hours = duration // 3600
|
|
minutes = (duration % 3600) // 60
|
|
seconds = duration % 60
|
|
|
|
data.append({
|
|
'Session-ID': sess[0],
|
|
'Lizenzschlüssel': sess[1],
|
|
'Kunde': sess[2],
|
|
'Session-ID (Tech)': sess[3],
|
|
'Startzeit': sess[4].strftime('%d.%m.%Y %H:%M:%S'),
|
|
'Endzeit': sess[5].strftime('%d.%m.%Y %H:%M:%S') if sess[5] else '',
|
|
'Dauer': f"{hours}h {minutes}m {seconds}s",
|
|
'IP-Adresse': sess[7],
|
|
'Browser': sess[8]
|
|
})
|
|
|
|
sheet_name = 'Beendete Sessions'
|
|
filename_prefix = 'beendete_sessions'
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# DataFrame erstellen
|
|
df = pd.DataFrame(data)
|
|
|
|
# Timestamp für Dateiname
|
|
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
|
|
filename = f'{filename_prefix}_export_{timestamp}'
|
|
|
|
# Audit Log für Export
|
|
log_audit('EXPORT', 'sessions',
|
|
additional_info=f"{export_format.upper()} Export von {session_type} Sessions mit {len(data)} Einträgen")
|
|
|
|
if export_format == 'csv':
|
|
# CSV Export
|
|
output = io.StringIO()
|
|
# UTF-8 BOM für Excel
|
|
output.write('\ufeff')
|
|
df.to_csv(output, index=False, sep=';', encoding='utf-8')
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8')),
|
|
mimetype='text/csv;charset=utf-8',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv'
|
|
)
|
|
else:
|
|
# Excel Export
|
|
output = BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, index=False, sheet_name=sheet_name)
|
|
|
|
# Spaltenbreiten anpassen
|
|
worksheet = writer.sheets[sheet_name]
|
|
for idx, col in enumerate(df.columns):
|
|
max_length = max(
|
|
df[col].astype(str).map(len).max(),
|
|
len(col)
|
|
) + 2
|
|
worksheet.column_dimensions[get_column_letter(idx + 1)].width = min(max_length, 50)
|
|
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx'
|
|
)
|
|
|
|
@app.route("/export/resources")
|
|
@login_required
|
|
def export_resources():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Holen der Filter-Parameter
|
|
filter_type = request.args.get('type', '')
|
|
filter_status = request.args.get('status', '')
|
|
search_query = request.args.get('search', '')
|
|
show_test = request.args.get('show_test', 'false').lower() == 'true'
|
|
export_format = request.args.get('format', 'excel')
|
|
|
|
# SQL Query mit Filtern
|
|
query = """
|
|
SELECT r.id, r.resource_type, r.resource_value, r.status, r.allocated_to_license,
|
|
r.created_at, r.status_changed_at,
|
|
l.license_key, c.name as customer_name, c.email as customer_email,
|
|
l.license_type
|
|
FROM resource_pools r
|
|
LEFT JOIN licenses l ON r.allocated_to_license = l.id
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
WHERE 1=1
|
|
"""
|
|
params = []
|
|
|
|
# Filter für Testdaten
|
|
if not show_test:
|
|
query += " AND (r.is_test = false OR r.is_test IS NULL)"
|
|
|
|
# Filter für Ressourcentyp
|
|
if filter_type:
|
|
query += " AND r.resource_type = %s"
|
|
params.append(filter_type)
|
|
|
|
# Filter für Status
|
|
if filter_status:
|
|
query += " AND r.status = %s"
|
|
params.append(filter_status)
|
|
|
|
# Suchfilter
|
|
if search_query:
|
|
query += " AND (r.resource_value ILIKE %s OR l.license_key ILIKE %s OR c.name ILIKE %s)"
|
|
params.extend([f'%{search_query}%', f'%{search_query}%', f'%{search_query}%'])
|
|
|
|
query += " ORDER BY r.id DESC"
|
|
|
|
cur.execute(query, params)
|
|
resources = cur.fetchall()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Daten für Export vorbereiten
|
|
data = []
|
|
for res in resources:
|
|
status_text = {
|
|
'available': 'Verfügbar',
|
|
'allocated': 'Zugewiesen',
|
|
'quarantine': 'Quarantäne'
|
|
}.get(res[3], res[3])
|
|
|
|
type_text = {
|
|
'domain': 'Domain',
|
|
'ipv4': 'IPv4',
|
|
'phone': 'Telefon'
|
|
}.get(res[1], res[1])
|
|
|
|
data.append({
|
|
'ID': res[0],
|
|
'Typ': type_text,
|
|
'Ressource': res[2],
|
|
'Status': status_text,
|
|
'Lizenzschlüssel': res[7] or '',
|
|
'Kunde': res[8] or '',
|
|
'Kunden-Email': res[9] or '',
|
|
'Lizenztyp': res[10] or '',
|
|
'Erstellt am': res[5].strftime('%d.%m.%Y %H:%M:%S') if res[5] else '',
|
|
'Zugewiesen am': res[6].strftime('%d.%m.%Y %H:%M:%S') if res[6] else ''
|
|
})
|
|
|
|
# DataFrame erstellen
|
|
df = pd.DataFrame(data)
|
|
|
|
# Timestamp für Dateiname
|
|
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
|
|
filename = f'resources_export_{timestamp}'
|
|
|
|
# Audit Log für Export
|
|
log_audit('EXPORT', 'resources',
|
|
additional_info=f"{export_format.upper()} Export mit {len(data)} Ressourcen")
|
|
|
|
if export_format == 'csv':
|
|
# CSV Export
|
|
output = io.StringIO()
|
|
# UTF-8 BOM für Excel
|
|
output.write('\ufeff')
|
|
df.to_csv(output, index=False, sep=';', encoding='utf-8')
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
io.BytesIO(output.getvalue().encode('utf-8')),
|
|
mimetype='text/csv;charset=utf-8',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv'
|
|
)
|
|
else:
|
|
# Excel Export
|
|
output = BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, index=False, sheet_name='Resources')
|
|
|
|
# Spaltenbreiten anpassen
|
|
worksheet = writer.sheets['Resources']
|
|
for idx, col in enumerate(df.columns):
|
|
max_length = max(
|
|
df[col].astype(str).map(len).max(),
|
|
len(col)
|
|
) + 2
|
|
worksheet.column_dimensions[get_column_letter(idx + 1)].width = min(max_length, 50)
|
|
|
|
output.seek(0)
|
|
|
|
return send_file(
|
|
output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx'
|
|
)
|
|
|
|
@app.route("/audit")
|
|
@login_required
|
|
def audit_log():
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Parameter
|
|
filter_user = request.args.get('user', '').strip()
|
|
filter_action = request.args.get('action', '').strip()
|
|
filter_entity = request.args.get('entity', '').strip()
|
|
page = request.args.get('page', 1, type=int)
|
|
sort = request.args.get('sort', 'timestamp')
|
|
order = request.args.get('order', 'desc')
|
|
per_page = 50
|
|
|
|
# Whitelist für erlaubte Sortierfelder
|
|
allowed_sort_fields = {
|
|
'timestamp': 'timestamp',
|
|
'username': 'username',
|
|
'action': 'action',
|
|
'entity': 'entity_type',
|
|
'ip': 'ip_address'
|
|
}
|
|
|
|
# Validierung
|
|
if sort not in allowed_sort_fields:
|
|
sort = 'timestamp'
|
|
if order not in ['asc', 'desc']:
|
|
order = 'desc'
|
|
|
|
sort_field = allowed_sort_fields[sort]
|
|
|
|
# SQL Query mit optionalen Filtern
|
|
query = """
|
|
SELECT id, timestamp, username, action, entity_type, entity_id,
|
|
old_values, new_values, ip_address, user_agent, additional_info
|
|
FROM audit_log
|
|
WHERE 1=1
|
|
"""
|
|
|
|
params = []
|
|
|
|
# Filter
|
|
if filter_user:
|
|
query += " AND LOWER(username) LIKE LOWER(%s)"
|
|
params.append(f'%{filter_user}%')
|
|
|
|
if filter_action:
|
|
query += " AND action = %s"
|
|
params.append(filter_action)
|
|
|
|
if filter_entity:
|
|
query += " AND entity_type = %s"
|
|
params.append(filter_entity)
|
|
|
|
# Gesamtanzahl für Pagination
|
|
count_query = "SELECT COUNT(*) FROM (" + query + ") as count_table"
|
|
cur.execute(count_query, params)
|
|
total = cur.fetchone()[0]
|
|
|
|
# Pagination
|
|
offset = (page - 1) * per_page
|
|
query += f" ORDER BY {sort_field} {order.upper()} LIMIT %s OFFSET %s"
|
|
params.extend([per_page, offset])
|
|
|
|
cur.execute(query, params)
|
|
logs = cur.fetchall()
|
|
|
|
# JSON-Werte parsen
|
|
parsed_logs = []
|
|
for log in logs:
|
|
parsed_log = list(log)
|
|
# old_values und new_values sind bereits Dictionaries (JSONB)
|
|
# Keine Konvertierung nötig
|
|
parsed_logs.append(parsed_log)
|
|
|
|
# Pagination Info
|
|
total_pages = (total + per_page - 1) // per_page
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template("audit_log.html",
|
|
logs=parsed_logs,
|
|
filter_user=filter_user,
|
|
filter_action=filter_action,
|
|
filter_entity=filter_entity,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total=total,
|
|
sort=sort,
|
|
order=order,
|
|
username=session.get('username'))
|
|
|
|
@app.route("/backups")
|
|
@login_required
|
|
def backups():
|
|
"""Zeigt die Backup-Historie an"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Letztes erfolgreiches Backup für Dashboard
|
|
cur.execute("""
|
|
SELECT created_at, filesize, duration_seconds
|
|
FROM backup_history
|
|
WHERE status = 'success'
|
|
ORDER BY created_at DESC
|
|
LIMIT 1
|
|
""")
|
|
last_backup = cur.fetchone()
|
|
|
|
# Alle Backups abrufen
|
|
cur.execute("""
|
|
SELECT id, filename, filesize, backup_type, status, error_message,
|
|
created_at, created_by, tables_count, records_count,
|
|
duration_seconds, is_encrypted
|
|
FROM backup_history
|
|
ORDER BY created_at DESC
|
|
""")
|
|
backups = cur.fetchall()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template("backups.html",
|
|
backups=backups,
|
|
last_backup=last_backup,
|
|
username=session.get('username'))
|
|
|
|
@app.route("/backup/create", methods=["POST"])
|
|
@login_required
|
|
def create_backup_route():
|
|
"""Erstellt ein manuelles Backup"""
|
|
username = session.get('username')
|
|
success, result = create_backup(backup_type="manual", created_by=username)
|
|
|
|
if success:
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Backup erfolgreich erstellt: {result}'
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Backup fehlgeschlagen: {result}'
|
|
}), 500
|
|
|
|
@app.route("/backup/restore/<int:backup_id>", methods=["POST"])
|
|
@login_required
|
|
def restore_backup_route(backup_id):
|
|
"""Stellt ein Backup wieder her"""
|
|
encryption_key = request.form.get('encryption_key')
|
|
|
|
success, message = restore_backup(backup_id, encryption_key)
|
|
|
|
if success:
|
|
return jsonify({
|
|
'success': True,
|
|
'message': message
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': False,
|
|
'message': message
|
|
}), 500
|
|
|
|
@app.route("/backup/download/<int:backup_id>")
|
|
@login_required
|
|
def download_backup(backup_id):
|
|
"""Lädt eine Backup-Datei herunter"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("""
|
|
SELECT filename, filepath
|
|
FROM backup_history
|
|
WHERE id = %s
|
|
""", (backup_id,))
|
|
backup_info = cur.fetchone()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
if not backup_info:
|
|
return "Backup nicht gefunden", 404
|
|
|
|
filename, filepath = backup_info
|
|
filepath = Path(filepath)
|
|
|
|
if not filepath.exists():
|
|
return "Backup-Datei nicht gefunden", 404
|
|
|
|
# Audit-Log
|
|
log_audit('DOWNLOAD', 'backup', backup_id,
|
|
additional_info=f"Backup heruntergeladen: {filename}")
|
|
|
|
return send_file(filepath, as_attachment=True, download_name=filename)
|
|
|
|
@app.route("/backup/delete/<int:backup_id>", methods=["DELETE"])
|
|
@login_required
|
|
def delete_backup(backup_id):
|
|
"""Löscht ein Backup"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
# Backup-Informationen abrufen
|
|
cur.execute("""
|
|
SELECT filename, filepath
|
|
FROM backup_history
|
|
WHERE id = %s
|
|
""", (backup_id,))
|
|
backup_info = cur.fetchone()
|
|
|
|
if not backup_info:
|
|
return jsonify({
|
|
'success': False,
|
|
'message': 'Backup nicht gefunden'
|
|
}), 404
|
|
|
|
filename, filepath = backup_info
|
|
filepath = Path(filepath)
|
|
|
|
# Datei löschen, wenn sie existiert
|
|
if filepath.exists():
|
|
filepath.unlink()
|
|
|
|
# Aus Datenbank löschen
|
|
cur.execute("""
|
|
DELETE FROM backup_history
|
|
WHERE id = %s
|
|
""", (backup_id,))
|
|
|
|
conn.commit()
|
|
|
|
# Audit-Log
|
|
log_audit('DELETE', 'backup', backup_id,
|
|
additional_info=f"Backup gelöscht: {filename}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Backup "{filename}" wurde erfolgreich gelöscht'
|
|
})
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'message': f'Fehler beim Löschen des Backups: {str(e)}'
|
|
}), 500
|
|
finally:
|
|
cur.close()
|
|
conn.close()
|
|
|
|
@app.route("/security/blocked-ips")
|
|
@login_required
|
|
def blocked_ips():
|
|
"""Zeigt alle gesperrten IPs an"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("""
|
|
SELECT
|
|
ip_address,
|
|
attempt_count,
|
|
first_attempt,
|
|
last_attempt,
|
|
blocked_until,
|
|
last_username_tried,
|
|
last_error_message
|
|
FROM login_attempts
|
|
WHERE blocked_until IS NOT NULL
|
|
ORDER BY blocked_until DESC
|
|
""")
|
|
|
|
blocked_ips_list = []
|
|
for ip in cur.fetchall():
|
|
blocked_ips_list.append({
|
|
'ip_address': ip[0],
|
|
'attempt_count': ip[1],
|
|
'first_attempt': ip[2].strftime('%d.%m.%Y %H:%M'),
|
|
'last_attempt': ip[3].strftime('%d.%m.%Y %H:%M'),
|
|
'blocked_until': ip[4].strftime('%d.%m.%Y %H:%M'),
|
|
'is_active': ip[4] > datetime.now(ZoneInfo("Europe/Berlin")).replace(tzinfo=None),
|
|
'last_username': ip[5],
|
|
'last_error': ip[6]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template("blocked_ips.html",
|
|
blocked_ips=blocked_ips_list,
|
|
username=session.get('username'))
|
|
|
|
@app.route("/security/unblock-ip", methods=["POST"])
|
|
@login_required
|
|
def unblock_ip():
|
|
"""Entsperrt eine IP-Adresse"""
|
|
ip_address = request.form.get('ip_address')
|
|
|
|
if ip_address:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("""
|
|
UPDATE login_attempts
|
|
SET blocked_until = NULL
|
|
WHERE ip_address = %s
|
|
""", (ip_address,))
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Audit-Log
|
|
log_audit('UNBLOCK_IP', 'security',
|
|
additional_info=f"IP {ip_address} manuell entsperrt")
|
|
|
|
return redirect(url_for('blocked_ips'))
|
|
|
|
@app.route("/security/clear-attempts", methods=["POST"])
|
|
@login_required
|
|
def clear_attempts():
|
|
"""Löscht alle Login-Versuche für eine IP"""
|
|
ip_address = request.form.get('ip_address')
|
|
|
|
if ip_address:
|
|
reset_login_attempts(ip_address)
|
|
|
|
# Audit-Log
|
|
log_audit('CLEAR_ATTEMPTS', 'security',
|
|
additional_info=f"Login-Versuche für IP {ip_address} zurückgesetzt")
|
|
|
|
return redirect(url_for('blocked_ips'))
|
|
|
|
# API Endpoints for License Management
|
|
@app.route("/api/license/<int:license_id>/toggle", methods=["POST"])
|
|
@login_required
|
|
def toggle_license_api(license_id):
|
|
"""Toggle license active status via API"""
|
|
try:
|
|
data = request.get_json()
|
|
is_active = data.get('is_active', False)
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Update license status
|
|
cur.execute("""
|
|
UPDATE licenses
|
|
SET is_active = %s
|
|
WHERE id = %s
|
|
""", (is_active, license_id))
|
|
|
|
conn.commit()
|
|
|
|
# Log the action
|
|
log_audit('UPDATE', 'license', license_id,
|
|
new_values={'is_active': is_active},
|
|
additional_info=f"Lizenz {'aktiviert' if is_active else 'deaktiviert'} via Toggle")
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': 'Status erfolgreich geändert'})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'message': str(e)}), 500
|
|
|
|
@app.route("/api/licenses/bulk-activate", methods=["POST"])
|
|
@login_required
|
|
def bulk_activate_licenses():
|
|
"""Activate multiple licenses at once"""
|
|
try:
|
|
data = request.get_json()
|
|
license_ids = data.get('ids', [])
|
|
|
|
if not license_ids:
|
|
return jsonify({'success': False, 'message': 'Keine Lizenzen ausgewählt'}), 400
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Update all selected licenses (nur Live-Daten)
|
|
cur.execute("""
|
|
UPDATE licenses
|
|
SET is_active = TRUE
|
|
WHERE id = ANY(%s) AND is_test = FALSE
|
|
""", (license_ids,))
|
|
|
|
affected_rows = cur.rowcount
|
|
conn.commit()
|
|
|
|
# Log the bulk action
|
|
log_audit('BULK_UPDATE', 'licenses', None,
|
|
new_values={'is_active': True, 'count': affected_rows},
|
|
additional_info=f"{affected_rows} Lizenzen aktiviert")
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': f'{affected_rows} Lizenzen aktiviert'})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'message': str(e)}), 500
|
|
|
|
@app.route("/api/licenses/bulk-deactivate", methods=["POST"])
|
|
@login_required
|
|
def bulk_deactivate_licenses():
|
|
"""Deactivate multiple licenses at once"""
|
|
try:
|
|
data = request.get_json()
|
|
license_ids = data.get('ids', [])
|
|
|
|
if not license_ids:
|
|
return jsonify({'success': False, 'message': 'Keine Lizenzen ausgewählt'}), 400
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Update all selected licenses (nur Live-Daten)
|
|
cur.execute("""
|
|
UPDATE licenses
|
|
SET is_active = FALSE
|
|
WHERE id = ANY(%s) AND is_test = FALSE
|
|
""", (license_ids,))
|
|
|
|
affected_rows = cur.rowcount
|
|
conn.commit()
|
|
|
|
# Log the bulk action
|
|
log_audit('BULK_UPDATE', 'licenses', None,
|
|
new_values={'is_active': False, 'count': affected_rows},
|
|
additional_info=f"{affected_rows} Lizenzen deaktiviert")
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': f'{affected_rows} Lizenzen deaktiviert'})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'message': str(e)}), 500
|
|
|
|
@app.route("/api/license/<int:license_id>/devices")
|
|
@login_required
|
|
def get_license_devices(license_id):
|
|
"""Hole alle registrierten Geräte einer Lizenz"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Prüfe ob Lizenz existiert und hole device_limit
|
|
cur.execute("""
|
|
SELECT device_limit FROM licenses WHERE id = %s
|
|
""", (license_id,))
|
|
license_data = cur.fetchone()
|
|
|
|
if not license_data:
|
|
return jsonify({'success': False, 'message': 'Lizenz nicht gefunden'}), 404
|
|
|
|
device_limit = license_data[0]
|
|
|
|
# Hole alle Geräte für diese Lizenz
|
|
cur.execute("""
|
|
SELECT id, hardware_id, device_name, operating_system,
|
|
first_seen, last_seen, is_active, ip_address
|
|
FROM device_registrations
|
|
WHERE license_id = %s
|
|
ORDER BY is_active DESC, last_seen DESC
|
|
""", (license_id,))
|
|
|
|
devices = []
|
|
for row in cur.fetchall():
|
|
devices.append({
|
|
'id': row[0],
|
|
'hardware_id': row[1],
|
|
'device_name': row[2] or 'Unbekanntes Gerät',
|
|
'operating_system': row[3] or 'Unbekannt',
|
|
'first_seen': row[4].strftime('%d.%m.%Y %H:%M') if row[4] else '',
|
|
'last_seen': row[5].strftime('%d.%m.%Y %H:%M') if row[5] else '',
|
|
'is_active': row[6],
|
|
'ip_address': row[7] or '-'
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'devices': devices,
|
|
'device_limit': device_limit,
|
|
'active_count': sum(1 for d in devices if d['is_active'])
|
|
})
|
|
|
|
except Exception as e:
|
|
logging.error(f"Fehler beim Abrufen der Geräte: {str(e)}")
|
|
return jsonify({'success': False, 'message': 'Fehler beim Abrufen der Geräte'}), 500
|
|
|
|
@app.route("/api/license/<int:license_id>/register-device", methods=["POST"])
|
|
def register_device(license_id):
|
|
"""Registriere ein neues Gerät für eine Lizenz"""
|
|
try:
|
|
data = request.get_json()
|
|
hardware_id = data.get('hardware_id')
|
|
device_name = data.get('device_name', '')
|
|
operating_system = data.get('operating_system', '')
|
|
|
|
if not hardware_id:
|
|
return jsonify({'success': False, 'message': 'Hardware-ID fehlt'}), 400
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Prüfe ob Lizenz existiert und aktiv ist
|
|
cur.execute("""
|
|
SELECT device_limit, is_active, valid_until
|
|
FROM licenses
|
|
WHERE id = %s
|
|
""", (license_id,))
|
|
license_data = cur.fetchone()
|
|
|
|
if not license_data:
|
|
return jsonify({'success': False, 'message': 'Lizenz nicht gefunden'}), 404
|
|
|
|
device_limit, is_active, valid_until = license_data
|
|
|
|
# Prüfe ob Lizenz aktiv und gültig ist
|
|
if not is_active:
|
|
return jsonify({'success': False, 'message': 'Lizenz ist deaktiviert'}), 403
|
|
|
|
if valid_until < datetime.now(ZoneInfo("Europe/Berlin")).date():
|
|
return jsonify({'success': False, 'message': 'Lizenz ist abgelaufen'}), 403
|
|
|
|
# Prüfe ob Gerät bereits registriert ist
|
|
cur.execute("""
|
|
SELECT id, is_active FROM device_registrations
|
|
WHERE license_id = %s AND hardware_id = %s
|
|
""", (license_id, hardware_id))
|
|
existing_device = cur.fetchone()
|
|
|
|
if existing_device:
|
|
device_id, is_device_active = existing_device
|
|
if is_device_active:
|
|
# Gerät ist bereits aktiv, update last_seen
|
|
cur.execute("""
|
|
UPDATE device_registrations
|
|
SET last_seen = CURRENT_TIMESTAMP,
|
|
ip_address = %s,
|
|
user_agent = %s
|
|
WHERE id = %s
|
|
""", (get_client_ip(), request.headers.get('User-Agent', ''), device_id))
|
|
conn.commit()
|
|
return jsonify({'success': True, 'message': 'Gerät bereits registriert', 'device_id': device_id})
|
|
else:
|
|
# Gerät war deaktiviert, prüfe ob wir es reaktivieren können
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM device_registrations
|
|
WHERE license_id = %s AND is_active = TRUE
|
|
""", (license_id,))
|
|
active_count = cur.fetchone()[0]
|
|
|
|
if active_count >= device_limit:
|
|
return jsonify({'success': False, 'message': f'Gerätelimit erreicht ({device_limit} Geräte)'}), 403
|
|
|
|
# Reaktiviere das Gerät
|
|
cur.execute("""
|
|
UPDATE device_registrations
|
|
SET is_active = TRUE,
|
|
last_seen = CURRENT_TIMESTAMP,
|
|
deactivated_at = NULL,
|
|
deactivated_by = NULL,
|
|
ip_address = %s,
|
|
user_agent = %s
|
|
WHERE id = %s
|
|
""", (get_client_ip(), request.headers.get('User-Agent', ''), device_id))
|
|
conn.commit()
|
|
return jsonify({'success': True, 'message': 'Gerät reaktiviert', 'device_id': device_id})
|
|
|
|
# Neues Gerät - prüfe Gerätelimit
|
|
cur.execute("""
|
|
SELECT COUNT(*) FROM device_registrations
|
|
WHERE license_id = %s AND is_active = TRUE
|
|
""", (license_id,))
|
|
active_count = cur.fetchone()[0]
|
|
|
|
if active_count >= device_limit:
|
|
return jsonify({'success': False, 'message': f'Gerätelimit erreicht ({device_limit} Geräte)'}), 403
|
|
|
|
# Registriere neues Gerät
|
|
cur.execute("""
|
|
INSERT INTO device_registrations
|
|
(license_id, hardware_id, device_name, operating_system, ip_address, user_agent)
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
RETURNING id
|
|
""", (license_id, hardware_id, device_name, operating_system,
|
|
get_client_ip(), request.headers.get('User-Agent', '')))
|
|
device_id = cur.fetchone()[0]
|
|
|
|
conn.commit()
|
|
|
|
# Audit Log
|
|
log_audit('DEVICE_REGISTER', 'device', device_id,
|
|
new_values={'license_id': license_id, 'hardware_id': hardware_id})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': 'Gerät erfolgreich registriert', 'device_id': device_id})
|
|
|
|
except Exception as e:
|
|
logging.error(f"Fehler bei Geräte-Registrierung: {str(e)}")
|
|
return jsonify({'success': False, 'message': 'Fehler bei der Registrierung'}), 500
|
|
|
|
@app.route("/api/license/<int:license_id>/deactivate-device/<int:device_id>", methods=["POST"])
|
|
@login_required
|
|
def deactivate_device(license_id, device_id):
|
|
"""Deaktiviere ein registriertes Gerät"""
|
|
try:
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Prüfe ob das Gerät zu dieser Lizenz gehört
|
|
cur.execute("""
|
|
SELECT id FROM device_registrations
|
|
WHERE id = %s AND license_id = %s AND is_active = TRUE
|
|
""", (device_id, license_id))
|
|
|
|
if not cur.fetchone():
|
|
return jsonify({'success': False, 'message': 'Gerät nicht gefunden oder bereits deaktiviert'}), 404
|
|
|
|
# Deaktiviere das Gerät
|
|
cur.execute("""
|
|
UPDATE device_registrations
|
|
SET is_active = FALSE,
|
|
deactivated_at = CURRENT_TIMESTAMP,
|
|
deactivated_by = %s
|
|
WHERE id = %s
|
|
""", (session['username'], device_id))
|
|
|
|
conn.commit()
|
|
|
|
# Audit Log
|
|
log_audit('DEVICE_DEACTIVATE', 'device', device_id,
|
|
old_values={'is_active': True},
|
|
new_values={'is_active': False})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': 'Gerät erfolgreich deaktiviert'})
|
|
|
|
except Exception as e:
|
|
logging.error(f"Fehler beim Deaktivieren des Geräts: {str(e)}")
|
|
return jsonify({'success': False, 'message': 'Fehler beim Deaktivieren'}), 500
|
|
|
|
@app.route("/api/licenses/bulk-delete", methods=["POST"])
|
|
@login_required
|
|
def bulk_delete_licenses():
|
|
"""Delete multiple licenses at once"""
|
|
try:
|
|
data = request.get_json()
|
|
license_ids = data.get('ids', [])
|
|
|
|
if not license_ids:
|
|
return jsonify({'success': False, 'message': 'Keine Lizenzen ausgewählt'}), 400
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Get license info for audit log (nur Live-Daten)
|
|
cur.execute("""
|
|
SELECT license_key
|
|
FROM licenses
|
|
WHERE id = ANY(%s) AND is_test = FALSE
|
|
""", (license_ids,))
|
|
license_keys = [row[0] for row in cur.fetchall()]
|
|
|
|
# Delete all selected licenses (nur Live-Daten)
|
|
cur.execute("""
|
|
DELETE FROM licenses
|
|
WHERE id = ANY(%s) AND is_test = FALSE
|
|
""", (license_ids,))
|
|
|
|
affected_rows = cur.rowcount
|
|
conn.commit()
|
|
|
|
# Log the bulk action
|
|
log_audit('BULK_DELETE', 'licenses', None,
|
|
old_values={'license_keys': license_keys, 'count': affected_rows},
|
|
additional_info=f"{affected_rows} Lizenzen gelöscht")
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({'success': True, 'message': f'{affected_rows} Lizenzen gelöscht'})
|
|
except Exception as e:
|
|
return jsonify({'success': False, 'message': str(e)}), 500
|
|
|
|
# ===================== RESOURCE POOL MANAGEMENT =====================
|
|
|
|
@app.route('/resources')
|
|
@login_required
|
|
def resources():
|
|
"""Resource Pool Hauptübersicht"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Prüfe ob Testdaten angezeigt werden sollen (gleiche Logik wie bei Kunden)
|
|
show_test = request.args.get('show_test', 'false').lower() == 'true'
|
|
|
|
# Statistiken abrufen
|
|
cur.execute("""
|
|
SELECT
|
|
resource_type,
|
|
COUNT(*) FILTER (WHERE status = 'available') as available,
|
|
COUNT(*) FILTER (WHERE status = 'allocated') as allocated,
|
|
COUNT(*) FILTER (WHERE status = 'quarantine') as quarantine,
|
|
COUNT(*) as total
|
|
FROM resource_pools
|
|
WHERE is_test = %s
|
|
GROUP BY resource_type
|
|
""", (show_test,))
|
|
|
|
stats = {}
|
|
for row in cur.fetchall():
|
|
stats[row[0]] = {
|
|
'available': row[1],
|
|
'allocated': row[2],
|
|
'quarantine': row[3],
|
|
'total': row[4],
|
|
'available_percent': round((row[1] / row[4] * 100) if row[4] > 0 else 0, 1)
|
|
}
|
|
|
|
# Letzte Aktivitäten (gefiltert nach Test/Live)
|
|
cur.execute("""
|
|
SELECT
|
|
rh.action,
|
|
rh.action_by,
|
|
rh.action_at,
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
rh.details
|
|
FROM resource_history rh
|
|
JOIN resource_pools rp ON rh.resource_id = rp.id
|
|
WHERE rp.is_test = %s
|
|
ORDER BY rh.action_at DESC
|
|
LIMIT 10
|
|
""", (show_test,))
|
|
recent_activities = cur.fetchall()
|
|
|
|
# Ressourcen-Liste mit Pagination
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 50
|
|
offset = (page - 1) * per_page
|
|
|
|
resource_type = request.args.get('type', '')
|
|
status_filter = request.args.get('status', '')
|
|
search = request.args.get('search', '')
|
|
|
|
# Sortierung
|
|
sort_by = request.args.get('sort', 'id')
|
|
sort_order = request.args.get('order', 'desc')
|
|
|
|
# Base Query
|
|
query = """
|
|
SELECT
|
|
rp.id,
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
rp.status,
|
|
rp.allocated_to_license,
|
|
l.license_key,
|
|
c.name as customer_name,
|
|
rp.status_changed_at,
|
|
rp.quarantine_reason,
|
|
rp.quarantine_until,
|
|
c.id as customer_id
|
|
FROM resource_pools rp
|
|
LEFT JOIN licenses l ON rp.allocated_to_license = l.id
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
WHERE rp.is_test = %s
|
|
"""
|
|
params = [show_test]
|
|
|
|
if resource_type:
|
|
query += " AND rp.resource_type = %s"
|
|
params.append(resource_type)
|
|
|
|
if status_filter:
|
|
query += " AND rp.status = %s"
|
|
params.append(status_filter)
|
|
|
|
if search:
|
|
query += " AND rp.resource_value ILIKE %s"
|
|
params.append(f'%{search}%')
|
|
|
|
# Count total
|
|
count_query = f"SELECT COUNT(*) FROM ({query}) as cnt"
|
|
cur.execute(count_query, params)
|
|
total = cur.fetchone()[0]
|
|
total_pages = (total + per_page - 1) // per_page
|
|
|
|
# Get paginated results with dynamic sorting
|
|
sort_column_map = {
|
|
'id': 'rp.id',
|
|
'type': 'rp.resource_type',
|
|
'resource': 'rp.resource_value',
|
|
'status': 'rp.status',
|
|
'assigned': 'c.name',
|
|
'changed': 'rp.status_changed_at'
|
|
}
|
|
|
|
sort_column = sort_column_map.get(sort_by, 'rp.id')
|
|
sort_direction = 'ASC' if sort_order == 'asc' else 'DESC'
|
|
|
|
query += f" ORDER BY {sort_column} {sort_direction} LIMIT %s OFFSET %s"
|
|
params.extend([per_page, offset])
|
|
|
|
cur.execute(query, params)
|
|
resources = cur.fetchall()
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template('resources.html',
|
|
stats=stats,
|
|
resources=resources,
|
|
recent_activities=recent_activities,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total=total,
|
|
resource_type=resource_type,
|
|
status_filter=status_filter,
|
|
search=search,
|
|
show_test=show_test,
|
|
sort_by=sort_by,
|
|
sort_order=sort_order,
|
|
datetime=datetime,
|
|
timedelta=timedelta)
|
|
|
|
@app.route('/resources/add', methods=['GET', 'POST'])
|
|
@login_required
|
|
def add_resources():
|
|
"""Ressourcen zum Pool hinzufügen"""
|
|
# Hole show_test Parameter für die Anzeige
|
|
show_test = request.args.get('show_test', 'false').lower() == 'true'
|
|
|
|
if request.method == 'POST':
|
|
resource_type = request.form.get('resource_type')
|
|
resources_text = request.form.get('resources_text', '')
|
|
is_test = request.form.get('is_test') == 'on' # Checkbox für Testdaten
|
|
|
|
# Parse resources (one per line)
|
|
resources = [r.strip() for r in resources_text.split('\n') if r.strip()]
|
|
|
|
if not resources:
|
|
flash('Keine Ressourcen angegeben', 'error')
|
|
return redirect(url_for('add_resources', show_test=show_test))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
added = 0
|
|
duplicates = 0
|
|
|
|
for resource_value in resources:
|
|
try:
|
|
cur.execute("""
|
|
INSERT INTO resource_pools (resource_type, resource_value, status_changed_by, is_test)
|
|
VALUES (%s, %s, %s, %s)
|
|
ON CONFLICT (resource_type, resource_value) DO NOTHING
|
|
""", (resource_type, resource_value, session['username'], is_test))
|
|
|
|
if cur.rowcount > 0:
|
|
added += 1
|
|
# Get the inserted ID
|
|
cur.execute("SELECT id FROM resource_pools WHERE resource_type = %s AND resource_value = %s",
|
|
(resource_type, resource_value))
|
|
resource_id = cur.fetchone()[0]
|
|
|
|
# Log in history
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, action, action_by, ip_address)
|
|
VALUES (%s, 'created', %s, %s)
|
|
""", (resource_id, session['username'], get_client_ip()))
|
|
else:
|
|
duplicates += 1
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Error adding resource {resource_value}: {e}")
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
log_audit('CREATE', 'resource_pool', None,
|
|
new_values={'type': resource_type, 'added': added, 'duplicates': duplicates, 'is_test': is_test},
|
|
additional_info=f"{added} {'Test-' if is_test else ''}Ressourcen hinzugefügt, {duplicates} Duplikate übersprungen")
|
|
|
|
flash(f'{added} {"Test-" if is_test else ""}Ressourcen hinzugefügt, {duplicates} Duplikate übersprungen', 'success')
|
|
return redirect(url_for('resources', show_test=show_test))
|
|
|
|
return render_template('add_resources.html', show_test=show_test)
|
|
|
|
@app.route('/resources/quarantine/<int:resource_id>', methods=['POST'])
|
|
@login_required
|
|
def quarantine_resource(resource_id):
|
|
"""Ressource in Quarantäne setzen"""
|
|
reason = request.form.get('reason', 'review')
|
|
until_date = request.form.get('until_date')
|
|
notes = request.form.get('notes', '')
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Get current resource info
|
|
cur.execute("SELECT resource_type, resource_value, status FROM resource_pools WHERE id = %s", (resource_id,))
|
|
resource = cur.fetchone()
|
|
|
|
if not resource:
|
|
flash('Ressource nicht gefunden', 'error')
|
|
return redirect(url_for('resources'))
|
|
|
|
old_status = resource[2]
|
|
|
|
# Update resource
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'quarantine',
|
|
quarantine_reason = %s,
|
|
quarantine_until = %s,
|
|
notes = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP,
|
|
status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (reason, until_date if until_date else None, notes, session['username'], resource_id))
|
|
|
|
# Log in history
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, action, action_by, ip_address, details)
|
|
VALUES (%s, 'quarantined', %s, %s, %s)
|
|
""", (resource_id, session['username'], get_client_ip(),
|
|
Json({'reason': reason, 'until': until_date, 'notes': notes, 'old_status': old_status})))
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
log_audit('UPDATE', 'resource', resource_id,
|
|
old_values={'status': old_status},
|
|
new_values={'status': 'quarantine', 'reason': reason},
|
|
additional_info=f"Ressource {resource[0]}: {resource[1]} in Quarantäne")
|
|
|
|
flash('Ressource in Quarantäne gesetzt', 'success')
|
|
|
|
# Redirect mit allen aktuellen Filtern
|
|
return redirect(url_for('resources',
|
|
show_test=request.args.get('show_test', request.form.get('show_test', 'false')),
|
|
type=request.args.get('type', request.form.get('type', '')),
|
|
status=request.args.get('status', request.form.get('status', '')),
|
|
search=request.args.get('search', request.form.get('search', ''))))
|
|
|
|
@app.route('/resources/release', methods=['POST'])
|
|
@login_required
|
|
def release_resources():
|
|
"""Ressourcen aus Quarantäne freigeben"""
|
|
resource_ids = request.form.getlist('resource_ids')
|
|
|
|
if not resource_ids:
|
|
flash('Keine Ressourcen ausgewählt', 'error')
|
|
return redirect(url_for('resources'))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
released = 0
|
|
for resource_id in resource_ids:
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'available',
|
|
quarantine_reason = NULL,
|
|
quarantine_until = NULL,
|
|
allocated_to_license = NULL,
|
|
status_changed_at = CURRENT_TIMESTAMP,
|
|
status_changed_by = %s
|
|
WHERE id = %s AND status = 'quarantine'
|
|
""", (session['username'], resource_id))
|
|
|
|
if cur.rowcount > 0:
|
|
released += 1
|
|
# Log in history
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, action, action_by, ip_address)
|
|
VALUES (%s, 'released', %s, %s)
|
|
""", (resource_id, session['username'], get_client_ip()))
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
log_audit('UPDATE', 'resource_pool', None,
|
|
new_values={'released': released},
|
|
additional_info=f"{released} Ressourcen aus Quarantäne freigegeben")
|
|
|
|
flash(f'{released} Ressourcen freigegeben', 'success')
|
|
|
|
# Redirect mit allen aktuellen Filtern
|
|
return redirect(url_for('resources',
|
|
show_test=request.args.get('show_test', request.form.get('show_test', 'false')),
|
|
type=request.args.get('type', request.form.get('type', '')),
|
|
status=request.args.get('status', request.form.get('status', '')),
|
|
search=request.args.get('search', request.form.get('search', ''))))
|
|
|
|
@app.route('/api/resources/allocate', methods=['POST'])
|
|
@login_required
|
|
def allocate_resources_api():
|
|
"""API für Ressourcen-Zuweisung bei Lizenzerstellung"""
|
|
data = request.json
|
|
license_id = data.get('license_id')
|
|
domain_count = data.get('domain_count', 1)
|
|
ipv4_count = data.get('ipv4_count', 1)
|
|
phone_count = data.get('phone_count', 1)
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
allocated = {'domains': [], 'ipv4s': [], 'phones': []}
|
|
|
|
# Allocate domains
|
|
if domain_count > 0:
|
|
cur.execute("""
|
|
SELECT id, resource_value FROM resource_pools
|
|
WHERE resource_type = 'domain' AND status = 'available'
|
|
LIMIT %s FOR UPDATE
|
|
""", (domain_count,))
|
|
domains = cur.fetchall()
|
|
|
|
if len(domains) < domain_count:
|
|
raise ValueError(f"Nicht genügend Domains verfügbar (benötigt: {domain_count}, verfügbar: {len(domains)})")
|
|
|
|
for domain_id, domain_value in domains:
|
|
# Update resource status
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated',
|
|
allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP,
|
|
status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], domain_id))
|
|
|
|
# Create assignment
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, domain_id, session['username']))
|
|
|
|
# Log history
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (domain_id, license_id, session['username'], get_client_ip()))
|
|
|
|
allocated['domains'].append(domain_value)
|
|
|
|
# Allocate IPv4s (similar logic)
|
|
if ipv4_count > 0:
|
|
cur.execute("""
|
|
SELECT id, resource_value FROM resource_pools
|
|
WHERE resource_type = 'ipv4' AND status = 'available'
|
|
LIMIT %s FOR UPDATE
|
|
""", (ipv4_count,))
|
|
ipv4s = cur.fetchall()
|
|
|
|
if len(ipv4s) < ipv4_count:
|
|
raise ValueError(f"Nicht genügend IPv4-Adressen verfügbar")
|
|
|
|
for ipv4_id, ipv4_value in ipv4s:
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated',
|
|
allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP,
|
|
status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], ipv4_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, ipv4_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (ipv4_id, license_id, session['username'], get_client_ip()))
|
|
|
|
allocated['ipv4s'].append(ipv4_value)
|
|
|
|
# Allocate phones (similar logic)
|
|
if phone_count > 0:
|
|
cur.execute("""
|
|
SELECT id, resource_value FROM resource_pools
|
|
WHERE resource_type = 'phone' AND status = 'available'
|
|
LIMIT %s FOR UPDATE
|
|
""", (phone_count,))
|
|
phones = cur.fetchall()
|
|
|
|
if len(phones) < phone_count:
|
|
raise ValueError(f"Nicht genügend Telefonnummern verfügbar")
|
|
|
|
for phone_id, phone_value in phones:
|
|
cur.execute("""
|
|
UPDATE resource_pools
|
|
SET status = 'allocated',
|
|
allocated_to_license = %s,
|
|
status_changed_at = CURRENT_TIMESTAMP,
|
|
status_changed_by = %s
|
|
WHERE id = %s
|
|
""", (license_id, session['username'], phone_id))
|
|
|
|
cur.execute("""
|
|
INSERT INTO license_resources (license_id, resource_id, assigned_by)
|
|
VALUES (%s, %s, %s)
|
|
""", (license_id, phone_id, session['username']))
|
|
|
|
cur.execute("""
|
|
INSERT INTO resource_history (resource_id, license_id, action, action_by, ip_address)
|
|
VALUES (%s, %s, 'allocated', %s, %s)
|
|
""", (phone_id, license_id, session['username'], get_client_ip()))
|
|
|
|
allocated['phones'].append(phone_value)
|
|
|
|
# Update license resource counts
|
|
cur.execute("""
|
|
UPDATE licenses
|
|
SET domain_count = %s,
|
|
ipv4_count = %s,
|
|
phone_count = %s
|
|
WHERE id = %s
|
|
""", (domain_count, ipv4_count, phone_count, license_id))
|
|
|
|
conn.commit()
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'allocated': allocated
|
|
})
|
|
|
|
except Exception as e:
|
|
conn.rollback()
|
|
cur.close()
|
|
conn.close()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': str(e)
|
|
}), 400
|
|
|
|
@app.route('/api/resources/check-availability', methods=['GET'])
|
|
@login_required
|
|
def check_resource_availability():
|
|
"""Prüft verfügbare Ressourcen"""
|
|
resource_type = request.args.get('type', '')
|
|
count = request.args.get('count', 10, type=int)
|
|
show_test = request.args.get('show_test', 'false').lower() == 'true'
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
if resource_type:
|
|
# Spezifische Ressourcen für einen Typ
|
|
cur.execute("""
|
|
SELECT id, resource_value
|
|
FROM resource_pools
|
|
WHERE status = 'available'
|
|
AND resource_type = %s
|
|
AND is_test = %s
|
|
ORDER BY resource_value
|
|
LIMIT %s
|
|
""", (resource_type, show_test, count))
|
|
|
|
resources = []
|
|
for row in cur.fetchall():
|
|
resources.append({
|
|
'id': row[0],
|
|
'value': row[1]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'available': resources,
|
|
'type': resource_type,
|
|
'count': len(resources)
|
|
})
|
|
else:
|
|
# Zusammenfassung aller Typen
|
|
cur.execute("""
|
|
SELECT
|
|
resource_type,
|
|
COUNT(*) as available
|
|
FROM resource_pools
|
|
WHERE status = 'available'
|
|
AND is_test = %s
|
|
GROUP BY resource_type
|
|
""", (show_test,))
|
|
|
|
availability = {}
|
|
for row in cur.fetchall():
|
|
availability[row[0]] = row[1]
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify(availability)
|
|
|
|
@app.route('/api/global-search', methods=['GET'])
|
|
@login_required
|
|
def global_search():
|
|
"""Global search API endpoint for searching customers and licenses"""
|
|
query = request.args.get('q', '').strip()
|
|
|
|
if not query or len(query) < 2:
|
|
return jsonify({'customers': [], 'licenses': []})
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Search pattern with wildcards
|
|
search_pattern = f'%{query}%'
|
|
|
|
# Search customers
|
|
cur.execute("""
|
|
SELECT id, name, email, company_name
|
|
FROM customers
|
|
WHERE (LOWER(name) LIKE LOWER(%s)
|
|
OR LOWER(email) LIKE LOWER(%s)
|
|
OR LOWER(company_name) LIKE LOWER(%s))
|
|
AND is_test = FALSE
|
|
ORDER BY name
|
|
LIMIT 5
|
|
""", (search_pattern, search_pattern, search_pattern))
|
|
|
|
customers = []
|
|
for row in cur.fetchall():
|
|
customers.append({
|
|
'id': row[0],
|
|
'name': row[1],
|
|
'email': row[2],
|
|
'company_name': row[3]
|
|
})
|
|
|
|
# Search licenses
|
|
cur.execute("""
|
|
SELECT l.id, l.license_key, c.name as customer_name
|
|
FROM licenses l
|
|
JOIN customers c ON l.customer_id = c.id
|
|
WHERE LOWER(l.license_key) LIKE LOWER(%s)
|
|
AND l.is_test = FALSE
|
|
ORDER BY l.created_at DESC
|
|
LIMIT 5
|
|
""", (search_pattern,))
|
|
|
|
licenses = []
|
|
for row in cur.fetchall():
|
|
licenses.append({
|
|
'id': row[0],
|
|
'license_key': row[1],
|
|
'customer_name': row[2]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'customers': customers,
|
|
'licenses': licenses
|
|
})
|
|
|
|
@app.route('/resources/history/<int:resource_id>')
|
|
@login_required
|
|
def resource_history(resource_id):
|
|
"""Zeigt die komplette Historie einer Ressource"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Get complete resource info using named columns
|
|
cur.execute("""
|
|
SELECT id, resource_type, resource_value, status, allocated_to_license,
|
|
status_changed_at, status_changed_by, quarantine_reason,
|
|
quarantine_until, created_at, notes
|
|
FROM resource_pools
|
|
WHERE id = %s
|
|
""", (resource_id,))
|
|
row = cur.fetchone()
|
|
|
|
if not row:
|
|
flash('Ressource nicht gefunden', 'error')
|
|
return redirect(url_for('resources'))
|
|
|
|
# Create resource object with named attributes
|
|
resource = {
|
|
'id': row[0],
|
|
'resource_type': row[1],
|
|
'resource_value': row[2],
|
|
'status': row[3],
|
|
'allocated_to_license': row[4],
|
|
'status_changed_at': row[5],
|
|
'status_changed_by': row[6],
|
|
'quarantine_reason': row[7],
|
|
'quarantine_until': row[8],
|
|
'created_at': row[9],
|
|
'notes': row[10]
|
|
}
|
|
|
|
# Get license info if allocated
|
|
license_info = None
|
|
if resource['allocated_to_license']:
|
|
cur.execute("SELECT license_key FROM licenses WHERE id = %s",
|
|
(resource['allocated_to_license'],))
|
|
lic = cur.fetchone()
|
|
if lic:
|
|
license_info = {'license_key': lic[0]}
|
|
|
|
# Get history with named columns
|
|
cur.execute("""
|
|
SELECT
|
|
rh.action,
|
|
rh.action_by,
|
|
rh.action_at,
|
|
rh.details,
|
|
rh.license_id,
|
|
rh.ip_address
|
|
FROM resource_history rh
|
|
WHERE rh.resource_id = %s
|
|
ORDER BY rh.action_at DESC
|
|
""", (resource_id,))
|
|
|
|
history = []
|
|
for row in cur.fetchall():
|
|
history.append({
|
|
'action': row[0],
|
|
'action_by': row[1],
|
|
'action_at': row[2],
|
|
'details': row[3],
|
|
'license_id': row[4],
|
|
'ip_address': row[5]
|
|
})
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Convert to object-like for template
|
|
class ResourceObj:
|
|
def __init__(self, data):
|
|
for key, value in data.items():
|
|
setattr(self, key, value)
|
|
|
|
resource_obj = ResourceObj(resource)
|
|
history_objs = [ResourceObj(h) for h in history]
|
|
|
|
return render_template('resource_history.html',
|
|
resource=resource_obj,
|
|
license_info=license_info,
|
|
history=history_objs)
|
|
|
|
@app.route('/resources/metrics')
|
|
@login_required
|
|
def resources_metrics():
|
|
"""Dashboard für Resource Metrics und Reports"""
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
# Overall stats with fallback values
|
|
cur.execute("""
|
|
SELECT
|
|
COUNT(DISTINCT resource_id) as total_resources,
|
|
COALESCE(AVG(performance_score), 0) as avg_performance,
|
|
COALESCE(SUM(cost), 0) as total_cost,
|
|
COALESCE(SUM(revenue), 0) as total_revenue,
|
|
COALESCE(SUM(issues_count), 0) as total_issues
|
|
FROM resource_metrics
|
|
WHERE metric_date >= CURRENT_DATE - INTERVAL '30 days'
|
|
""")
|
|
row = cur.fetchone()
|
|
|
|
# Calculate ROI
|
|
roi = 0
|
|
if row[2] > 0: # if total_cost > 0
|
|
roi = row[3] / row[2] # revenue / cost
|
|
|
|
stats = {
|
|
'total_resources': row[0] or 0,
|
|
'avg_performance': row[1] or 0,
|
|
'total_cost': row[2] or 0,
|
|
'total_revenue': row[3] or 0,
|
|
'total_issues': row[4] or 0,
|
|
'roi': roi
|
|
}
|
|
|
|
# Performance by type
|
|
cur.execute("""
|
|
SELECT
|
|
rp.resource_type,
|
|
COALESCE(AVG(rm.performance_score), 0) as avg_score,
|
|
COUNT(DISTINCT rp.id) as resource_count
|
|
FROM resource_pools rp
|
|
LEFT JOIN resource_metrics rm ON rp.id = rm.resource_id
|
|
AND rm.metric_date >= CURRENT_DATE - INTERVAL '30 days'
|
|
GROUP BY rp.resource_type
|
|
ORDER BY rp.resource_type
|
|
""")
|
|
performance_by_type = cur.fetchall()
|
|
|
|
# Utilization data
|
|
cur.execute("""
|
|
SELECT
|
|
resource_type,
|
|
COUNT(*) FILTER (WHERE status = 'allocated') as allocated,
|
|
COUNT(*) as total,
|
|
ROUND(COUNT(*) FILTER (WHERE status = 'allocated') * 100.0 / COUNT(*), 1) as allocated_percent
|
|
FROM resource_pools
|
|
GROUP BY resource_type
|
|
""")
|
|
utilization_rows = cur.fetchall()
|
|
utilization_data = [
|
|
{
|
|
'type': row[0].upper(),
|
|
'allocated': row[1],
|
|
'total': row[2],
|
|
'allocated_percent': row[3]
|
|
}
|
|
for row in utilization_rows
|
|
]
|
|
|
|
# Top performing resources
|
|
cur.execute("""
|
|
SELECT
|
|
rp.id,
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
COALESCE(AVG(rm.performance_score), 0) as avg_score,
|
|
COALESCE(SUM(rm.revenue), 0) as total_revenue,
|
|
COALESCE(SUM(rm.cost), 1) as total_cost,
|
|
CASE
|
|
WHEN COALESCE(SUM(rm.cost), 0) = 0 THEN 0
|
|
ELSE COALESCE(SUM(rm.revenue), 0) / COALESCE(SUM(rm.cost), 1)
|
|
END as roi
|
|
FROM resource_pools rp
|
|
LEFT JOIN resource_metrics rm ON rp.id = rm.resource_id
|
|
AND rm.metric_date >= CURRENT_DATE - INTERVAL '30 days'
|
|
WHERE rp.status != 'quarantine'
|
|
GROUP BY rp.id, rp.resource_type, rp.resource_value
|
|
HAVING AVG(rm.performance_score) IS NOT NULL
|
|
ORDER BY avg_score DESC
|
|
LIMIT 10
|
|
""")
|
|
top_rows = cur.fetchall()
|
|
top_performers = [
|
|
{
|
|
'id': row[0],
|
|
'resource_type': row[1],
|
|
'resource_value': row[2],
|
|
'avg_score': row[3],
|
|
'roi': row[6]
|
|
}
|
|
for row in top_rows
|
|
]
|
|
|
|
# Resources with issues
|
|
cur.execute("""
|
|
SELECT
|
|
rp.id,
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
rp.status,
|
|
COALESCE(SUM(rm.issues_count), 0) as total_issues
|
|
FROM resource_pools rp
|
|
LEFT JOIN resource_metrics rm ON rp.id = rm.resource_id
|
|
AND rm.metric_date >= CURRENT_DATE - INTERVAL '30 days'
|
|
WHERE rm.issues_count > 0 OR rp.status = 'quarantine'
|
|
GROUP BY rp.id, rp.resource_type, rp.resource_value, rp.status
|
|
HAVING SUM(rm.issues_count) > 0
|
|
ORDER BY total_issues DESC
|
|
LIMIT 10
|
|
""")
|
|
problem_rows = cur.fetchall()
|
|
problem_resources = [
|
|
{
|
|
'id': row[0],
|
|
'resource_type': row[1],
|
|
'resource_value': row[2],
|
|
'status': row[3],
|
|
'total_issues': row[4]
|
|
}
|
|
for row in problem_rows
|
|
]
|
|
|
|
# Daily metrics for trend chart (last 30 days)
|
|
cur.execute("""
|
|
SELECT
|
|
metric_date,
|
|
COALESCE(AVG(performance_score), 0) as avg_performance,
|
|
COALESCE(SUM(issues_count), 0) as total_issues
|
|
FROM resource_metrics
|
|
WHERE metric_date >= CURRENT_DATE - INTERVAL '30 days'
|
|
GROUP BY metric_date
|
|
ORDER BY metric_date
|
|
""")
|
|
daily_rows = cur.fetchall()
|
|
daily_metrics = [
|
|
{
|
|
'date': row[0].strftime('%d.%m'),
|
|
'performance': float(row[1]),
|
|
'issues': int(row[2])
|
|
}
|
|
for row in daily_rows
|
|
]
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
return render_template('resource_metrics.html',
|
|
stats=stats,
|
|
performance_by_type=performance_by_type,
|
|
utilization_data=utilization_data,
|
|
top_performers=top_performers,
|
|
problem_resources=problem_resources,
|
|
daily_metrics=daily_metrics)
|
|
|
|
@app.route('/resources/report', methods=['GET'])
|
|
@login_required
|
|
def resources_report():
|
|
"""Generiert Ressourcen-Reports oder zeigt Report-Formular"""
|
|
# Prüfe ob Download angefordert wurde
|
|
if request.args.get('download') == 'true':
|
|
report_type = request.args.get('type', 'usage')
|
|
format_type = request.args.get('format', 'excel')
|
|
date_from = request.args.get('from', (datetime.now(ZoneInfo("Europe/Berlin")) - timedelta(days=30)).strftime('%Y-%m-%d'))
|
|
date_to = request.args.get('to', datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y-%m-%d'))
|
|
|
|
conn = get_connection()
|
|
cur = conn.cursor()
|
|
|
|
if report_type == 'usage':
|
|
# Auslastungsreport
|
|
query = """
|
|
SELECT
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
rp.status,
|
|
COUNT(DISTINCT rh.license_id) as unique_licenses,
|
|
COUNT(rh.id) as total_allocations,
|
|
MIN(rh.action_at) as first_used,
|
|
MAX(rh.action_at) as last_used
|
|
FROM resource_pools rp
|
|
LEFT JOIN resource_history rh ON rp.id = rh.resource_id
|
|
AND rh.action = 'allocated'
|
|
AND rh.action_at BETWEEN %s AND %s
|
|
GROUP BY rp.id, rp.resource_type, rp.resource_value, rp.status
|
|
ORDER BY rp.resource_type, total_allocations DESC
|
|
"""
|
|
cur.execute(query, (date_from, date_to))
|
|
columns = ['Typ', 'Ressource', 'Status', 'Unique Lizenzen', 'Gesamt Zuweisungen', 'Erste Nutzung', 'Letzte Nutzung']
|
|
|
|
elif report_type == 'performance':
|
|
# Performance-Report
|
|
query = """
|
|
SELECT
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
AVG(rm.performance_score) as avg_performance,
|
|
SUM(rm.usage_count) as total_usage,
|
|
SUM(rm.revenue) as total_revenue,
|
|
SUM(rm.cost) as total_cost,
|
|
SUM(rm.revenue - rm.cost) as profit,
|
|
SUM(rm.issues_count) as total_issues
|
|
FROM resource_pools rp
|
|
JOIN resource_metrics rm ON rp.id = rm.resource_id
|
|
WHERE rm.metric_date BETWEEN %s AND %s
|
|
GROUP BY rp.id, rp.resource_type, rp.resource_value
|
|
ORDER BY profit DESC
|
|
"""
|
|
cur.execute(query, (date_from, date_to))
|
|
columns = ['Typ', 'Ressource', 'Durchschn. Performance', 'Gesamt Nutzung', 'Umsatz', 'Kosten', 'Gewinn', 'Issues']
|
|
|
|
elif report_type == 'compliance':
|
|
# Compliance-Report
|
|
query = """
|
|
SELECT
|
|
rh.action_at,
|
|
rh.action,
|
|
rh.action_by,
|
|
rp.resource_type,
|
|
rp.resource_value,
|
|
l.license_key,
|
|
c.name as customer_name,
|
|
rh.ip_address
|
|
FROM resource_history rh
|
|
JOIN resource_pools rp ON rh.resource_id = rp.id
|
|
LEFT JOIN licenses l ON rh.license_id = l.id
|
|
LEFT JOIN customers c ON l.customer_id = c.id
|
|
WHERE rh.action_at BETWEEN %s AND %s
|
|
ORDER BY rh.action_at DESC
|
|
"""
|
|
cur.execute(query, (date_from, date_to))
|
|
columns = ['Zeit', 'Aktion', 'Von', 'Typ', 'Ressource', 'Lizenz', 'Kunde', 'IP-Adresse']
|
|
|
|
else: # inventory report
|
|
# Inventar-Report
|
|
query = """
|
|
SELECT
|
|
resource_type,
|
|
COUNT(*) FILTER (WHERE status = 'available') as available,
|
|
COUNT(*) FILTER (WHERE status = 'allocated') as allocated,
|
|
COUNT(*) FILTER (WHERE status = 'quarantine') as quarantine,
|
|
COUNT(*) as total
|
|
FROM resource_pools
|
|
GROUP BY resource_type
|
|
ORDER BY resource_type
|
|
"""
|
|
cur.execute(query)
|
|
columns = ['Typ', 'Verfügbar', 'Zugeteilt', 'Quarantäne', 'Gesamt']
|
|
|
|
# Convert to DataFrame
|
|
data = cur.fetchall()
|
|
df = pd.DataFrame(data, columns=columns)
|
|
|
|
cur.close()
|
|
conn.close()
|
|
|
|
# Generate file
|
|
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
|
|
filename = f"resource_report_{report_type}_{timestamp}"
|
|
|
|
if format_type == 'excel':
|
|
output = io.BytesIO()
|
|
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
|
df.to_excel(writer, sheet_name='Report', index=False)
|
|
|
|
# Auto-adjust columns width
|
|
worksheet = writer.sheets['Report']
|
|
for column in worksheet.columns:
|
|
max_length = 0
|
|
column = [cell for cell in column]
|
|
for cell in column:
|
|
try:
|
|
if len(str(cell.value)) > max_length:
|
|
max_length = len(str(cell.value))
|
|
except:
|
|
pass
|
|
adjusted_width = (max_length + 2)
|
|
worksheet.column_dimensions[column[0].column_letter].width = adjusted_width
|
|
|
|
output.seek(0)
|
|
|
|
log_audit('EXPORT', 'resource_report', None,
|
|
new_values={'type': report_type, 'format': 'excel', 'rows': len(df)},
|
|
additional_info=f"Resource Report {report_type} exportiert")
|
|
|
|
return send_file(output,
|
|
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.xlsx')
|
|
|
|
else: # CSV
|
|
output = io.StringIO()
|
|
df.to_csv(output, index=False, sep=';', encoding='utf-8-sig')
|
|
output.seek(0)
|
|
|
|
log_audit('EXPORT', 'resource_report', None,
|
|
new_values={'type': report_type, 'format': 'csv', 'rows': len(df)},
|
|
additional_info=f"Resource Report {report_type} exportiert")
|
|
|
|
return send_file(io.BytesIO(output.getvalue().encode('utf-8-sig')),
|
|
mimetype='text/csv',
|
|
as_attachment=True,
|
|
download_name=f'{filename}.csv')
|
|
|
|
# Wenn kein Download, zeige Report-Formular
|
|
return render_template('resource_report.html',
|
|
datetime=datetime,
|
|
timedelta=timedelta,
|
|
username=session.get('username'))
|
|
|
|
if __name__ == "__main__":
|
|
app.run(host="0.0.0.0", port=5000)
|