Dieser Commit ist enthalten in:
Claude Project Manager
2025-07-05 17:51:16 +02:00
Commit 0d7d888502
1594 geänderte Dateien mit 122839 neuen und 0 gelöschten Zeilen

Datei anzeigen

@ -0,0 +1 @@
# Utils module initialization

37
v2_adminpanel/utils/audit.py Normale Datei
Datei anzeigen

@ -0,0 +1,37 @@
import logging
from flask import session, request
from psycopg2.extras import Json
from db import get_db_connection, get_db_cursor
from utils.network import get_client_ip
logger = logging.getLogger(__name__)
def log_audit(action, entity_type, entity_id=None, old_values=None, new_values=None, additional_info=None):
"""Log changes to the audit log"""
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
try:
username = session.get('username', 'system')
ip_address = get_client_ip() if request else None
user_agent = request.headers.get('User-Agent') if request else None
# Debug logging
logger.info(f"Audit log - IP address captured: {ip_address}, Action: {action}, User: {username}")
# Convert dictionaries to JSONB
old_json = Json(old_values) if old_values else None
new_json = Json(new_values) if new_values else None
cur.execute("""
INSERT INTO audit_log
(username, action, entity_type, entity_id, old_values, new_values,
ip_address, user_agent, additional_info)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
""", (username, action, entity_type, entity_id, old_json, new_json,
ip_address, user_agent, Json(additional_info) if isinstance(additional_info, dict) else additional_info))
conn.commit()
except Exception as e:
logger.error(f"Audit log error: {e}")
conn.rollback()

223
v2_adminpanel/utils/backup.py Normale Datei
Datei anzeigen

@ -0,0 +1,223 @@
import os
import time
import gzip
import logging
import subprocess
from pathlib import Path
from datetime import datetime
from zoneinfo import ZoneInfo
from cryptography.fernet import Fernet
from db import get_db_connection, get_db_cursor
from config import BACKUP_DIR, DATABASE_CONFIG, EMAIL_ENABLED, BACKUP_ENCRYPTION_KEY
from utils.audit import log_audit
logger = logging.getLogger(__name__)
def get_or_create_encryption_key():
"""Get or create an encryption key"""
key_file = BACKUP_DIR / ".backup_key"
# Try to read key from environment variable
if BACKUP_ENCRYPTION_KEY:
try:
# Validate the key
Fernet(BACKUP_ENCRYPTION_KEY.encode())
return BACKUP_ENCRYPTION_KEY.encode()
except:
pass
# If no valid key in ENV, check file
if key_file.exists():
return key_file.read_bytes()
# Create new key
key = Fernet.generate_key()
key_file.write_bytes(key)
logger.info("New backup encryption key created")
return key
def create_backup(backup_type="manual", created_by=None):
"""Create an encrypted database backup"""
start_time = time.time()
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime("%Y%m%d_%H%M%S")
filename = f"backup_v2docker_{timestamp}_encrypted.sql.gz.enc"
filepath = BACKUP_DIR / filename
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
# Create backup entry
cur.execute("""
INSERT INTO backup_history
(filename, filepath, backup_type, status, created_by, is_encrypted)
VALUES (%s, %s, %s, %s, %s, %s)
RETURNING id
""", (filename, str(filepath), backup_type, 'in_progress',
created_by or 'system', True))
backup_id = cur.fetchone()[0]
conn.commit()
try:
# PostgreSQL dump command
dump_command = [
'pg_dump',
'-h', DATABASE_CONFIG['host'],
'-p', DATABASE_CONFIG['port'],
'-U', DATABASE_CONFIG['user'],
'-d', DATABASE_CONFIG['dbname'],
'--no-password',
'--verbose'
]
# Set PGPASSWORD
env = os.environ.copy()
env['PGPASSWORD'] = DATABASE_CONFIG['password']
# Execute dump
result = subprocess.run(dump_command, capture_output=True, text=True, env=env)
if result.returncode != 0:
raise Exception(f"pg_dump failed: {result.stderr}")
dump_data = result.stdout.encode('utf-8')
# Compress
compressed_data = gzip.compress(dump_data)
# Encrypt
key = get_or_create_encryption_key()
f = Fernet(key)
encrypted_data = f.encrypt(compressed_data)
# Save
filepath.write_bytes(encrypted_data)
# Collect statistics
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public'")
tables_count = cur.fetchone()[0]
cur.execute("SELECT SUM(n_live_tup) FROM pg_stat_user_tables")
records_count = cur.fetchone()[0] or 0
duration = time.time() - start_time
filesize = filepath.stat().st_size
# Update backup entry
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, filesize = %s, tables_count = %s,
records_count = %s, duration_seconds = %s
WHERE id = %s
""", ('success', filesize, tables_count, records_count, duration, backup_id))
conn.commit()
# Audit log
log_audit('BACKUP', 'database', backup_id,
additional_info=f"Backup created: {filename} ({filesize} bytes)")
# Email notification (if configured)
send_backup_notification(True, filename, filesize, duration)
logger.info(f"Backup successfully created: {filename}")
return True, filename
except Exception as e:
# Log error
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, error_message = %s, duration_seconds = %s
WHERE id = %s
""", ('failed', str(e), time.time() - start_time, backup_id))
conn.commit()
logger.error(f"Backup failed: {e}")
send_backup_notification(False, filename, error=str(e))
return False, str(e)
def restore_backup(backup_id, encryption_key=None):
"""Restore a backup"""
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
# Get backup info
cur.execute("""
SELECT filename, filepath, is_encrypted
FROM backup_history
WHERE id = %s
""", (backup_id,))
backup_info = cur.fetchone()
if not backup_info:
raise Exception("Backup not found")
filename, filepath, is_encrypted = backup_info
filepath = Path(filepath)
if not filepath.exists():
raise Exception("Backup file not found")
try:
# Read file
encrypted_data = filepath.read_bytes()
# Decrypt
if is_encrypted:
key = encryption_key.encode() if encryption_key else get_or_create_encryption_key()
try:
f = Fernet(key)
compressed_data = f.decrypt(encrypted_data)
except:
raise Exception("Decryption failed. Wrong password?")
else:
compressed_data = encrypted_data
# Decompress
dump_data = gzip.decompress(compressed_data)
sql_commands = dump_data.decode('utf-8')
# Restore database
restore_command = [
'psql',
'-h', DATABASE_CONFIG['host'],
'-p', DATABASE_CONFIG['port'],
'-U', DATABASE_CONFIG['user'],
'-d', DATABASE_CONFIG['dbname'],
'--no-password'
]
env = os.environ.copy()
env['PGPASSWORD'] = DATABASE_CONFIG['password']
result = subprocess.run(restore_command, input=sql_commands,
capture_output=True, text=True, env=env)
if result.returncode != 0:
raise Exception(f"Restore failed: {result.stderr}")
# Audit log
log_audit('RESTORE', 'database', backup_id,
additional_info=f"Backup restored: {filename}")
return True, "Backup successfully restored"
except Exception as e:
logger.error(f"Restore failed: {e}")
return False, str(e)
def send_backup_notification(success, filename, filesize=None, duration=None, error=None):
"""Send email notification (if configured)"""
if not EMAIL_ENABLED:
return
# Email function prepared but disabled
# TODO: Implement when email server is configured
logger.info(f"Email notification prepared: Backup {'successful' if success else 'failed'}")

203
v2_adminpanel/utils/export.py Normale Datei
Datei anzeigen

@ -0,0 +1,203 @@
import pandas as pd
from io import BytesIO, StringIO
from datetime import datetime
from zoneinfo import ZoneInfo
from openpyxl.utils import get_column_letter
from flask import send_file
import csv
def create_excel_export(data, columns, filename_prefix="export"):
"""Create an Excel file from data"""
df = pd.DataFrame(data, columns=columns)
# Create Excel file in memory
output = BytesIO()
with pd.ExcelWriter(output, engine='openpyxl') as writer:
df.to_excel(writer, index=False, sheet_name='Data')
# Auto-adjust column widths
worksheet = writer.sheets['Data']
for idx, col in enumerate(df.columns):
max_length = max(df[col].astype(str).map(len).max(), len(col)) + 2
worksheet.column_dimensions[get_column_letter(idx + 1)].width = min(max_length, 50)
output.seek(0)
# Generate filename with timestamp
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
filename = f"{filename_prefix}_{timestamp}.xlsx"
return send_file(
output,
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
as_attachment=True,
download_name=filename
)
def create_csv_export(data, columns, filename_prefix="export"):
"""Create a CSV file from data"""
# Create CSV in memory
output = StringIO()
writer = csv.writer(output)
# Write header
writer.writerow(columns)
# Write data
writer.writerows(data)
# Convert to bytes
output.seek(0)
output_bytes = BytesIO(output.getvalue().encode('utf-8-sig')) # UTF-8 with BOM for Excel compatibility
# Generate filename with timestamp
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
filename = f"{filename_prefix}_{timestamp}.csv"
return send_file(
output_bytes,
mimetype='text/csv',
as_attachment=True,
download_name=filename
)
def format_datetime_for_export(dt):
"""Format datetime for export"""
if dt:
if isinstance(dt, str):
try:
dt = datetime.fromisoformat(dt)
except:
return dt
# Remove timezone info for Excel compatibility
if hasattr(dt, 'replace') and dt.tzinfo is not None:
dt = dt.replace(tzinfo=None)
return dt.strftime('%Y-%m-%d %H:%M:%S')
return ''
def prepare_license_export_data(licenses):
"""Prepare license data for export"""
export_data = []
for license in licenses:
export_data.append([
license[0], # ID
license[1], # Key
license[2], # Customer Name
license[3], # Email
'Aktiv' if license[4] else 'Inaktiv', # Active
license[5], # Max Users
format_datetime_for_export(license[6]), # Valid From
format_datetime_for_export(license[7]), # Valid Until
format_datetime_for_export(license[8]), # Created At
license[9], # Device Limit
license[10] or 0, # Current Devices
'Fake' if license[11] else 'Full' # Is Test License
])
return export_data
def prepare_customer_export_data(customers):
"""Prepare customer data for export"""
export_data = []
for customer in customers:
export_data.append([
customer[0], # ID
customer[1], # Name
customer[2], # Email
customer[3], # Company
customer[4], # Address
customer[5], # Phone
format_datetime_for_export(customer[6]), # Created At
customer[7] or 0, # License Count
customer[8] or 0 # Active License Count
])
return export_data
def prepare_session_export_data(sessions):
"""Prepare session data for export"""
export_data = []
for session in sessions:
export_data.append([
session[0], # ID
session[1], # License Key
session[2], # Username
session[3], # Computer Name
session[4], # Hardware ID
format_datetime_for_export(session[5]), # Login Time
format_datetime_for_export(session[6]), # Last Activity
'Aktiv' if session[7] else 'Beendet', # Active
session[8], # IP Address
session[9], # App Version
session[10], # Customer Name
session[11] # Email
])
return export_data
def prepare_audit_export_data(audit_logs):
"""Prepare audit log data for export"""
export_data = []
for log in audit_logs:
export_data.append([
log['id'],
format_datetime_for_export(log['timestamp']),
log['username'],
log['action'],
log['entity_type'],
log['entity_id'] or '',
log['ip_address'] or '',
log['user_agent'] or '',
str(log['old_values']) if log['old_values'] else '',
str(log['new_values']) if log['new_values'] else '',
log['additional_info'] or ''
])
return export_data
def create_batch_export(licenses):
"""Create Excel export for batch licenses"""
export_data = []
for license in licenses:
export_data.append({
'Lizenzschlüssel': license['license_key'],
'Kunde': license.get('customer_name', ''),
'Email': license.get('customer_email', ''),
'Lizenztyp': license.get('license_type', 'full').upper(),
'Geräte-Limit': license.get('device_limit', 3),
'Gültig von': format_datetime_for_export(license.get('valid_from')),
'Gültig bis': format_datetime_for_export(license.get('valid_until')),
'Status': 'Aktiv' if license.get('is_active', True) else 'Inaktiv',
'Fake-Lizenz': 'Ja' if license.get('is_test', False) else 'Nein'
})
df = pd.DataFrame(export_data)
# Create Excel file in memory
output = BytesIO()
with pd.ExcelWriter(output, engine='openpyxl') as writer:
df.to_excel(writer, index=False, sheet_name='Batch-Lizenzen')
# Auto-adjust column widths
worksheet = writer.sheets['Batch-Lizenzen']
for idx, col in enumerate(df.columns):
max_length = max(df[col].astype(str).map(len).max(), len(col)) + 2
worksheet.column_dimensions[get_column_letter(idx + 1)].width = min(max_length, 50)
output.seek(0)
# Generate filename with timestamp
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime('%Y%m%d_%H%M%S')
filename = f"batch_licenses_{timestamp}.xlsx"
return send_file(
output,
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
as_attachment=True,
download_name=filename
)

Datei anzeigen

@ -0,0 +1,50 @@
import re
import secrets
from datetime import datetime
from zoneinfo import ZoneInfo
def generate_license_key(license_type='full'):
"""
Generate a license key in format: AF-F-YYYYMM-XXXX-YYYY-ZZZZ
AF = Account Factory (Product identifier)
F/T = F for Full version, T for Test version
YYYY = Year
MM = Month
XXXX-YYYY-ZZZZ = Random alphanumeric characters
"""
# Allowed characters (without confusing ones like 0/O, 1/I/l)
chars = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789'
# Date part
now = datetime.now(ZoneInfo("Europe/Berlin"))
date_part = now.strftime('%Y%m')
type_char = 'F' if license_type == 'full' else 'T'
# Generate random parts (3 blocks of 4 characters)
parts = []
for _ in range(3):
part = ''.join(secrets.choice(chars) for _ in range(4))
parts.append(part)
# Assemble key
key = f"AF-{type_char}-{date_part}-{parts[0]}-{parts[1]}-{parts[2]}"
return key
def validate_license_key(key):
"""
Validate the License Key Format
Expected: AF-F-YYYYMM-XXXX-YYYY-ZZZZ or AF-T-YYYYMM-XXXX-YYYY-ZZZZ
"""
if not key:
return False
# Pattern for the new format
# AF- (fixed) + F or T + - + 6 digits (YYYYMM) + - + 4 characters + - + 4 characters + - + 4 characters
pattern = r'^AF-[FT]-\d{6}-[A-Z0-9]{4}-[A-Z0-9]{4}-[A-Z0-9]{4}$'
# Uppercase for comparison
return bool(re.match(pattern, key.upper()))

Datei anzeigen

@ -0,0 +1,23 @@
import logging
from flask import request
logger = logging.getLogger(__name__)
def get_client_ip():
"""Get the real IP address of the client"""
# Debug logging
logger.info(f"Headers - X-Real-IP: {request.headers.get('X-Real-IP')}, "
f"X-Forwarded-For: {request.headers.get('X-Forwarded-For')}, "
f"Remote-Addr: {request.remote_addr}")
# Try X-Real-IP first (set by nginx)
if request.headers.get('X-Real-IP'):
return request.headers.get('X-Real-IP')
# Then X-Forwarded-For
elif request.headers.get('X-Forwarded-For'):
# X-Forwarded-For can contain multiple IPs, take the first one
return request.headers.get('X-Forwarded-For').split(',')[0].strip()
# Fallback to remote_addr
else:
return request.remote_addr

Datei anzeigen

@ -0,0 +1,114 @@
"""
Helper functions for managing partitioned tables
"""
import psycopg2
from datetime import datetime
from dateutil.relativedelta import relativedelta
import logging
logger = logging.getLogger(__name__)
def ensure_partition_exists(conn, table_name, timestamp):
"""
Ensure a partition exists for the given timestamp.
Creates the partition if it doesn't exist.
Args:
conn: Database connection
table_name: Base table name (e.g., 'license_heartbeats')
timestamp: Timestamp to check partition for
Returns:
bool: True if partition exists or was created, False on error
"""
try:
cursor = conn.cursor()
# Get year and month from timestamp
if isinstance(timestamp, str):
timestamp = datetime.fromisoformat(timestamp)
year = timestamp.year
month = timestamp.month
# Partition name
partition_name = f"{table_name}_{year}_{month:02d}"
# Check if partition exists
cursor.execute("""
SELECT EXISTS (
SELECT 1
FROM pg_tables
WHERE tablename = %s
)
""", (partition_name,))
if cursor.fetchone()[0]:
return True
# Create partition
start_date = f"{year}-{month:02d}-01"
if month == 12:
end_date = f"{year + 1}-01-01"
else:
end_date = f"{year}-{month + 1:02d}-01"
cursor.execute(f"""
CREATE TABLE IF NOT EXISTS {partition_name} PARTITION OF {table_name}
FOR VALUES FROM ('{start_date}') TO ('{end_date}')
""")
conn.commit()
logger.info(f"Created partition {partition_name}")
cursor.close()
return True
except Exception as e:
logger.error(f"Error ensuring partition exists: {e}")
return False
def create_future_partitions(conn, table_name, months_ahead=6):
"""
Create partitions for the next N months
Args:
conn: Database connection
table_name: Base table name
months_ahead: Number of months to create partitions for
"""
current_date = datetime.now()
for i in range(months_ahead + 1):
target_date = current_date + relativedelta(months=i)
ensure_partition_exists(conn, table_name, target_date)
def check_table_exists(conn, table_name):
"""
Check if a table exists in the database
Args:
conn: Database connection
table_name: Table name to check
Returns:
bool: True if table exists, False otherwise
"""
try:
cursor = conn.cursor()
cursor.execute("""
SELECT EXISTS (
SELECT 1
FROM information_schema.tables
WHERE table_name = %s
)
""", (table_name,))
exists = cursor.fetchone()[0]
cursor.close()
return exists
except Exception as e:
logger.error(f"Error checking if table exists: {e}")
return False

Datei anzeigen

@ -0,0 +1,39 @@
import logging
import requests
import config
def verify_recaptcha(response):
"""Verifiziert die reCAPTCHA v2 Response mit Google"""
secret_key = config.RECAPTCHA_SECRET_KEY
# Wenn kein Secret Key konfiguriert ist, CAPTCHA als bestanden werten (für PoC)
if not secret_key:
logging.warning("RECAPTCHA_SECRET_KEY nicht konfiguriert - CAPTCHA wird übersprungen")
return True
# Verifizierung bei Google
try:
verify_url = 'https://www.google.com/recaptcha/api/siteverify'
data = {
'secret': secret_key,
'response': response
}
# Timeout für Request setzen
r = requests.post(verify_url, data=data, timeout=5)
result = r.json()
# Log für Debugging
if not result.get('success'):
logging.warning(f"reCAPTCHA Validierung fehlgeschlagen: {result.get('error-codes', [])}")
return result.get('success', False)
except requests.exceptions.RequestException as e:
logging.error(f"reCAPTCHA Verifizierung fehlgeschlagen: {str(e)}")
# Bei Netzwerkfehlern CAPTCHA als bestanden werten
return True
except Exception as e:
logging.error(f"Unerwarteter Fehler bei reCAPTCHA: {str(e)}")
return False