Dateien
Hetzner-Backup/v2_adminpanel/utils/backup.py
2025-07-03 20:38:33 +00:00

500 Zeilen
18 KiB
Python

import os
import time
import gzip
import logging
import subprocess
import json
import shutil
from pathlib import Path
from datetime import datetime
from zoneinfo import ZoneInfo
from cryptography.fernet import Fernet
from db import get_db_connection, get_db_cursor
from config import BACKUP_DIR, DATABASE_CONFIG, EMAIL_ENABLED, BACKUP_ENCRYPTION_KEY
from utils.audit import log_audit
from utils.github_backup import GitHubBackupManager, create_server_backup_impl
logger = logging.getLogger(__name__)
def get_or_create_encryption_key():
"""Get or create an encryption key"""
key_file = BACKUP_DIR / ".backup_key"
# Try to read key from environment variable
if BACKUP_ENCRYPTION_KEY:
try:
# Validate the key
Fernet(BACKUP_ENCRYPTION_KEY.encode())
return BACKUP_ENCRYPTION_KEY.encode()
except:
pass
# If no valid key in ENV, check file
if key_file.exists():
return key_file.read_bytes()
# Create new key
key = Fernet.generate_key()
key_file.write_bytes(key)
logger.info("New backup encryption key created")
return key
def create_backup(backup_type="manual", created_by=None):
"""Create an encrypted database backup"""
start_time = time.time()
timestamp = datetime.now(ZoneInfo("Europe/Berlin")).strftime("%Y%m%d_%H%M%S")
filename = f"backup_v2docker_{timestamp}_encrypted.sql.gz.enc"
filepath = BACKUP_DIR / filename
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
# Create backup entry
cur.execute("""
INSERT INTO backup_history
(filename, filepath, backup_type, status, created_by, is_encrypted)
VALUES (%s, %s, %s, %s, %s, %s)
RETURNING id
""", (filename, str(filepath), backup_type, 'in_progress',
created_by or 'system', True))
backup_id = cur.fetchone()[0]
conn.commit()
try:
# PostgreSQL dump command
dump_command = [
'pg_dump',
'-h', DATABASE_CONFIG['host'],
'-p', DATABASE_CONFIG['port'],
'-U', DATABASE_CONFIG['user'],
'-d', DATABASE_CONFIG['dbname'],
'--no-password',
'--verbose'
]
# Set PGPASSWORD
env = os.environ.copy()
env['PGPASSWORD'] = DATABASE_CONFIG['password']
# Execute dump
result = subprocess.run(dump_command, capture_output=True, text=True, env=env)
if result.returncode != 0:
raise Exception(f"pg_dump failed: {result.stderr}")
dump_data = result.stdout.encode('utf-8')
# Compress
compressed_data = gzip.compress(dump_data)
# Encrypt
key = get_or_create_encryption_key()
f = Fernet(key)
encrypted_data = f.encrypt(compressed_data)
# Save
filepath.write_bytes(encrypted_data)
# Collect statistics
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public'")
tables_count = cur.fetchone()[0]
cur.execute("SELECT SUM(n_live_tup) FROM pg_stat_user_tables")
records_count = cur.fetchone()[0] or 0
duration = time.time() - start_time
filesize = filepath.stat().st_size
# Update backup entry
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, filesize = %s, tables_count = %s,
records_count = %s, duration_seconds = %s
WHERE id = %s
""", ('success', filesize, tables_count, records_count, duration, backup_id))
conn.commit()
# Audit log
log_audit('BACKUP', 'database', backup_id,
additional_info=f"Backup created: {filename} ({filesize} bytes)")
# Email notification (if configured)
send_backup_notification(True, filename, filesize, duration)
logger.info(f"Backup successfully created: {filename}")
# Apply retention policy - keep only last 5 local backups
cleanup_old_backups("database", 5)
return True, filename
except Exception as e:
# Log error
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, error_message = %s, duration_seconds = %s
WHERE id = %s
""", ('failed', str(e), time.time() - start_time, backup_id))
conn.commit()
logger.error(f"Backup failed: {e}")
send_backup_notification(False, filename, error=str(e))
return False, str(e)
def restore_backup(backup_id, encryption_key=None):
"""Restore a backup"""
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
# Get backup info
cur.execute("""
SELECT filename, filepath, is_encrypted
FROM backup_history
WHERE id = %s
""", (backup_id,))
backup_info = cur.fetchone()
if not backup_info:
raise Exception("Backup not found")
filename, filepath, is_encrypted = backup_info
filepath = Path(filepath)
if not filepath.exists():
raise Exception("Backup file not found")
try:
# Read file
encrypted_data = filepath.read_bytes()
# Decrypt
if is_encrypted:
key = encryption_key.encode() if encryption_key else get_or_create_encryption_key()
try:
f = Fernet(key)
compressed_data = f.decrypt(encrypted_data)
except:
raise Exception("Decryption failed. Wrong password?")
else:
compressed_data = encrypted_data
# Decompress
dump_data = gzip.decompress(compressed_data)
sql_commands = dump_data.decode('utf-8')
# Restore database
restore_command = [
'psql',
'-h', DATABASE_CONFIG['host'],
'-p', DATABASE_CONFIG['port'],
'-U', DATABASE_CONFIG['user'],
'-d', DATABASE_CONFIG['dbname'],
'--no-password'
]
env = os.environ.copy()
env['PGPASSWORD'] = DATABASE_CONFIG['password']
result = subprocess.run(restore_command, input=sql_commands,
capture_output=True, text=True, env=env)
if result.returncode != 0:
raise Exception(f"Restore failed: {result.stderr}")
# Audit log
log_audit('RESTORE', 'database', backup_id,
additional_info=f"Backup restored: {filename}")
return True, "Backup successfully restored"
except Exception as e:
logger.error(f"Restore failed: {e}")
return False, str(e)
def send_backup_notification(success, filename, filesize=None, duration=None, error=None):
"""Send email notification (if configured)"""
if not EMAIL_ENABLED:
return
# Email function prepared but disabled
# TODO: Implement when email server is configured
logger.info(f"Email notification prepared: Backup {'successful' if success else 'failed'}")
def cleanup_old_backups(backup_type="database", keep_count=5):
"""Clean up old local backups, keeping only the most recent ones"""
try:
# Get list of local backups from database
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
SELECT id, filename, filepath
FROM backup_history
WHERE backup_type = %s
AND status = 'success'
AND local_deleted = FALSE
AND filepath IS NOT NULL
ORDER BY created_at DESC
""", (backup_type,))
backups = cur.fetchall()
if len(backups) <= keep_count:
logger.info(f"No cleanup needed. Found {len(backups)} {backup_type} backups, keeping {keep_count}")
return
# Delete old backups
backups_to_delete = backups[keep_count:]
deleted_count = 0
for backup_id, filename, filepath in backups_to_delete:
try:
# Check if file exists
if filepath and os.path.exists(filepath):
os.unlink(filepath)
logger.info(f"Deleted old backup: {filename}")
# Update database
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET local_deleted = TRUE
WHERE id = %s
""", (backup_id,))
conn.commit()
deleted_count += 1
else:
# File doesn't exist, just update database
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET local_deleted = TRUE
WHERE id = %s
""", (backup_id,))
conn.commit()
except Exception as e:
logger.error(f"Failed to delete backup {filename}: {e}")
logger.info(f"Backup cleanup completed. Deleted {deleted_count} old {backup_type} backups")
except Exception as e:
logger.error(f"Backup cleanup failed: {e}")
def create_backup_with_github(backup_type="manual", created_by=None, push_to_github=True, delete_local=True):
"""Create backup and optionally push to GitHub"""
# Create the backup
success, result = create_backup(backup_type, created_by)
if not success:
return success, result
filename = result
filepath = BACKUP_DIR / filename
if push_to_github:
try:
# Move to database-backups directory
db_backup_dir = Path("/opt/v2-Docker/database-backups")
db_backup_dir.mkdir(exist_ok=True)
target_path = db_backup_dir / filename
# Use shutil.move instead of rename to handle cross-device links
shutil.move(str(filepath), str(target_path))
# Push to GitHub
github = GitHubBackupManager()
git_success, git_result = github.push_backup(target_path, "database")
if git_success:
logger.info(f"Backup pushed to GitHub: {filename}")
# Delete local file if requested
if delete_local:
target_path.unlink()
logger.info(f"Local backup deleted: {filename}")
# Update database record
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET github_uploaded = TRUE,
local_deleted = %s,
github_path = %s
WHERE filename = %s
""", (delete_local, f"database-backups/{filename}", filename))
conn.commit()
else:
logger.error(f"Failed to push to GitHub: {git_result}")
# Move file back using shutil
shutil.move(str(target_path), str(filepath))
except Exception as e:
logger.error(f"GitHub upload error: {str(e)}")
return True, f"{filename} (GitHub upload failed: {str(e)})"
return True, filename
def create_container_server_backup_info(created_by="system"):
"""Create a server info backup in container environment"""
try:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"server_backup_info_{timestamp}.json"
filepath = Path("/app/backups") / filename
# Collect server info available in container
server_info = {
"backup_type": "server_info",
"created_at": datetime.now().isoformat(),
"created_by": created_by,
"container_environment": True,
"message": "Full server backups nur über Host-System möglich. Dies ist eine Info-Datei.",
"docker_compose": None,
"env_vars": {},
"existing_backups": []
}
# Try to read docker-compose if mounted
if os.path.exists("/app/docker-compose.yaml"):
try:
with open("/app/docker-compose.yaml", 'r') as f:
server_info["docker_compose"] = f.read()
except:
pass
# Try to read env vars (without secrets)
if os.path.exists("/app/.env"):
try:
with open("/app/.env", 'r') as f:
for line in f:
if '=' in line and not any(secret in line.upper() for secret in ['PASSWORD', 'SECRET', 'KEY']):
key, value = line.strip().split('=', 1)
server_info["env_vars"][key] = "***" if len(value) > 20 else value
except:
pass
# List existing server backups
if os.path.exists("/app/server-backups"):
try:
server_info["existing_backups"] = sorted(os.listdir("/app/server-backups"))[-10:]
except:
pass
# Write info file
with open(filepath, 'w') as f:
json.dump(server_info, f, indent=2)
logger.info(f"Container server backup info created: {filename}")
return True, str(filepath)
except Exception as e:
logger.error(f"Container server backup info failed: {e}")
return False, str(e)
def create_server_backup(created_by=None, push_to_github=True, delete_local=True):
"""Create full server backup"""
start_time = time.time()
# Create backup entry
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
INSERT INTO backup_history
(backup_type, status, created_by, is_server_backup)
VALUES (%s, %s, %s, %s)
RETURNING id
""", ('server', 'in_progress', created_by or 'system', True))
backup_id = cur.fetchone()[0]
conn.commit()
try:
# Create server backup - always use full backup now
success, result = create_server_backup_impl(created_by)
if not success:
raise Exception(result)
backup_file = result
filename = os.path.basename(backup_file)
filesize = os.path.getsize(backup_file)
# Update backup entry
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, filename = %s, filepath = %s,
filesize = %s, duration_seconds = %s
WHERE id = %s
""", ('success', filename, backup_file, filesize,
time.time() - start_time, backup_id))
conn.commit()
if push_to_github:
try:
# Push to GitHub
github = GitHubBackupManager()
git_success, git_result = github.push_backup(backup_file, "server")
if git_success:
logger.info(f"Server backup pushed to GitHub: {filename}")
# Delete local file if requested
if delete_local:
os.unlink(backup_file)
logger.info(f"Local server backup deleted: {filename}")
# Update database record
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET github_uploaded = TRUE,
local_deleted = %s,
github_path = %s
WHERE id = %s
""", (delete_local, f"server-backups/{filename}", backup_id))
conn.commit()
else:
logger.error(f"Failed to push server backup to GitHub: {git_result}")
except Exception as e:
logger.error(f"GitHub upload error for server backup: {str(e)}")
# Audit log
log_audit('BACKUP', 'server', backup_id,
additional_info=f"Server backup created: {filename} ({filesize} bytes)")
# Apply retention policy - keep only last 5 local server backups
cleanup_old_backups("server", 5)
return True, filename
except Exception as e:
# Log error
with get_db_connection() as conn:
with get_db_cursor(conn) as cur:
cur.execute("""
UPDATE backup_history
SET status = %s, error_message = %s, duration_seconds = %s
WHERE id = %s
""", ('failed', str(e), time.time() - start_time, backup_id))
conn.commit()
logger.error(f"Server backup failed: {e}")
return False, str(e)