Dieser Commit ist enthalten in:
Claude Project Manager
2025-08-01 23:50:28 +02:00
Commit 04585e95b6
290 geänderte Dateien mit 64086 neuen und 0 gelöschten Zeilen

0
database/__init__.py Normale Datei
Datei anzeigen

Datei anzeigen

BIN
database/accounts.db Normale Datei

Binäre Datei nicht angezeigt.

589
database/db_manager.py Normale Datei
Datei anzeigen

@ -0,0 +1,589 @@
"""
Datenbankmanager für den Social Media Account Generator.
"""
import os
import json
import sqlite3
import logging
from datetime import datetime
from typing import Dict, List, Any, Optional, Tuple, Union
from config.paths import PathConfig
logger = logging.getLogger("db_manager")
class DatabaseManager:
"""Klasse zur Verwaltung der Datenbank für Account-Informationen."""
def __init__(self, db_path: str = None):
"""
Initialisiert den DatabaseManager.
Args:
db_path: Pfad zur Datenbank-Datei (falls None, wird PathConfig.MAIN_DB verwendet)
"""
self.db_path = db_path if db_path is not None else PathConfig.MAIN_DB
# Stelle sicher, dass das Datenbankverzeichnis existiert
os.makedirs(os.path.dirname(self.db_path), exist_ok=True)
# Datenbank initialisieren
self.init_db()
def init_db(self) -> None:
"""Initialisiert die Datenbank und erstellt die benötigten Tabellen, wenn sie nicht existieren."""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# Schema v2 laden und ausführen
try:
self._init_schema_v2(cursor)
conn.commit() # Commit nach Schema v2 Initialisierung
except Exception as e:
logger.warning(f"Konnte Schema v2 nicht initialisieren: {e}")
# Accounts-Tabelle erstellen
cursor.execute('''
CREATE TABLE IF NOT EXISTS accounts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
platform TEXT NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL,
email TEXT,
phone TEXT,
full_name TEXT,
created_at TEXT NOT NULL,
last_login TEXT,
notes TEXT,
cookies TEXT,
status TEXT,
fingerprint_id TEXT,
session_id TEXT,
last_session_update TEXT
)
''')
# Migration für bestehende Datenbanken
try:
cursor.execute("PRAGMA table_info(accounts)")
columns = [column[1] for column in cursor.fetchall()]
if "fingerprint_id" not in columns:
cursor.execute("ALTER TABLE accounts ADD COLUMN fingerprint_id TEXT")
logger.info("Added fingerprint_id column to accounts table")
if "session_id" not in columns:
cursor.execute("ALTER TABLE accounts ADD COLUMN session_id TEXT")
logger.info("Added session_id column to accounts table")
if "last_session_update" not in columns:
cursor.execute("ALTER TABLE accounts ADD COLUMN last_session_update TEXT")
logger.info("Added last_session_update column to accounts table")
except Exception as e:
logger.warning(f"Migration warning: {e}")
# Settings-Tabelle erstellen
cursor.execute('''
CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
)
''')
conn.commit()
conn.close()
logger.info("Datenbank initialisiert")
except sqlite3.Error as e:
logger.error(f"Fehler bei der Datenbankinitialisierung: {e}")
def add_account(self, account_data: Dict[str, Any]) -> int:
"""
Fügt einen Account zur Datenbank hinzu.
Args:
account_data: Dictionary mit Account-Daten
Returns:
ID des hinzugefügten Accounts oder -1 im Fehlerfall
"""
try:
# Prüfe, ob erforderliche Felder vorhanden sind
required_fields = ["platform", "username", "password"]
for field in required_fields:
if field not in account_data:
logger.error(f"Fehlendes Pflichtfeld: {field}")
return -1
# Sicherstellen, dass created_at vorhanden ist
if "created_at" not in account_data:
account_data["created_at"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# SQL-Anweisung vorbereiten
fields = ", ".join(account_data.keys())
placeholders = ", ".join(["?" for _ in account_data])
query = f"INSERT INTO accounts ({fields}) VALUES ({placeholders})"
# Anweisung ausführen
cursor.execute(query, list(account_data.values()))
# ID des hinzugefügten Datensatzes abrufen
account_id = cursor.lastrowid
conn.commit()
conn.close()
logger.info(f"Account hinzugefügt: {account_data['username']} (ID: {account_id})")
return account_id
except sqlite3.Error as e:
logger.error(f"Fehler beim Hinzufügen des Accounts: {e}")
return -1
def get_account(self, account_id: int) -> Optional[Dict[str, Any]]:
"""
Gibt einen Account anhand seiner ID zurück.
Args:
account_id: ID des Accounts
Returns:
Dictionary mit Account-Daten oder None, wenn der Account nicht gefunden wurde
"""
try:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row # Für dict-like Zugriff auf Zeilen
cursor = conn.cursor()
cursor.execute("SELECT * FROM accounts WHERE id = ?", (account_id,))
row = cursor.fetchone()
conn.close()
if row:
# Konvertiere Row in Dictionary
account = dict(row)
logger.debug(f"Account gefunden: {account['username']} (ID: {account_id})")
return account
else:
logger.warning(f"Account nicht gefunden: ID {account_id}")
return None
except sqlite3.Error as e:
logger.error(f"Fehler beim Abrufen des Accounts: {e}")
return None
def get_all_accounts(self) -> List[Dict[str, Any]]:
"""
Gibt alle Accounts zurück.
Returns:
Liste von Dictionaries mit Account-Daten
"""
try:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("SELECT * FROM accounts ORDER BY id DESC")
rows = cursor.fetchall()
conn.close()
# Konvertiere Rows in Dictionaries
accounts = [dict(row) for row in rows]
logger.info(f"{len(accounts)} Accounts abgerufen")
return accounts
except sqlite3.Error as e:
logger.error(f"Fehler beim Abrufen aller Accounts: {e}")
return []
def get_accounts_by_platform(self, platform: str) -> List[Dict[str, Any]]:
"""
Gibt alle Accounts einer bestimmten Plattform zurück.
Args:
platform: Plattformname (z.B. "instagram")
Returns:
Liste von Dictionaries mit Account-Daten
"""
try:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("SELECT * FROM accounts WHERE platform = ? ORDER BY id DESC", (platform.lower(),))
rows = cursor.fetchall()
conn.close()
# Konvertiere Rows in Dictionaries
accounts = [dict(row) for row in rows]
logger.info(f"{len(accounts)} Accounts für Plattform '{platform}' abgerufen")
return accounts
except sqlite3.Error as e:
logger.error(f"Fehler beim Abrufen der Accounts für Plattform '{platform}': {e}")
return []
def update_account(self, account_id: int, update_data: Dict[str, Any]) -> bool:
"""
Aktualisiert einen Account in der Datenbank.
Args:
account_id: ID des zu aktualisierenden Accounts
update_data: Dictionary mit zu aktualisierenden Feldern
Returns:
True bei Erfolg, False im Fehlerfall
"""
if not update_data:
logger.warning("Keine Aktualisierungsdaten bereitgestellt")
return False
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# SQL-Anweisung vorbereiten
set_clause = ", ".join([f"{field} = ?" for field in update_data.keys()])
values = list(update_data.values())
values.append(account_id)
query = f"UPDATE accounts SET {set_clause} WHERE id = ?"
# Anweisung ausführen
cursor.execute(query, values)
conn.commit()
conn.close()
logger.info(f"Account aktualisiert: ID {account_id}")
return True
except sqlite3.Error as e:
logger.error(f"Fehler beim Aktualisieren des Accounts: {e}")
return False
def delete_account(self, account_id: int) -> bool:
"""
Löscht einen Account aus der Datenbank.
Args:
account_id: ID des zu löschenden Accounts
Returns:
True bei Erfolg, False im Fehlerfall
"""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("DELETE FROM accounts WHERE id = ?", (account_id,))
conn.commit()
conn.close()
logger.info(f"Account gelöscht: ID {account_id}")
return True
except sqlite3.Error as e:
logger.error(f"Fehler beim Löschen des Accounts: {e}")
return False
def search_accounts(self, query: str, platform: Optional[str] = None) -> List[Dict[str, Any]]:
"""
Sucht nach Accounts in der Datenbank.
Args:
query: Suchbegriff
platform: Optional, Plattform für die Einschränkung der Suche
Returns:
Liste von Dictionaries mit gefundenen Account-Daten
"""
try:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
# Suchbegriff für LIKE-Operator vorbereiten
search_term = f"%{query}%"
if platform:
query_sql = """
SELECT * FROM accounts
WHERE (username LIKE ? OR email LIKE ? OR phone LIKE ? OR full_name LIKE ?)
AND platform = ?
ORDER BY id DESC
"""
cursor.execute(query_sql, (search_term, search_term, search_term, search_term, platform.lower()))
else:
query_sql = """
SELECT * FROM accounts
WHERE username LIKE ? OR email LIKE ? OR phone LIKE ? OR full_name LIKE ?
ORDER BY id DESC
"""
cursor.execute(query_sql, (search_term, search_term, search_term, search_term))
rows = cursor.fetchall()
conn.close()
# Konvertiere Rows in Dictionaries
accounts = [dict(row) for row in rows]
logger.info(f"{len(accounts)} Accounts gefunden für Suchbegriff '{query}'")
return accounts
except sqlite3.Error as e:
logger.error(f"Fehler bei der Suche nach Accounts: {e}")
return []
def get_connection(self) -> sqlite3.Connection:
"""
Gibt eine neue Datenbankverbindung zurück.
Returns:
SQLite Connection Objekt
"""
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
return conn
def get_account_count(self, platform: Optional[str] = None) -> int:
"""
Gibt die Anzahl der Accounts zurück.
Args:
platform: Optional, Plattform für die Einschränkung der Zählung
Returns:
Anzahl der Accounts
"""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
if platform:
cursor.execute("SELECT COUNT(*) FROM accounts WHERE platform = ?", (platform.lower(),))
else:
cursor.execute("SELECT COUNT(*) FROM accounts")
count = cursor.fetchone()[0]
conn.close()
return count
except sqlite3.Error as e:
logger.error(f"Fehler beim Zählen der Accounts: {e}")
return 0
def get_setting(self, key: str, default: Any = None) -> Any:
"""
Gibt einen Einstellungswert zurück.
Args:
key: Schlüssel der Einstellung
default: Standardwert, falls die Einstellung nicht gefunden wurde
Returns:
Wert der Einstellung oder der Standardwert
"""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = ?", (key,))
row = cursor.fetchone()
conn.close()
if row:
# Versuche, den Wert als JSON zu parsen
try:
return json.loads(row[0])
except json.JSONDecodeError:
# Wenn kein gültiges JSON, gib den Rohwert zurück
return row[0]
else:
return default
except sqlite3.Error as e:
logger.error(f"Fehler beim Abrufen der Einstellung '{key}': {e}")
return default
def set_setting(self, key: str, value: Any) -> bool:
"""
Setzt einen Einstellungswert.
Args:
key: Schlüssel der Einstellung
value: Wert der Einstellung (wird als JSON gespeichert, wenn es kein String ist)
Returns:
True bei Erfolg, False im Fehlerfall
"""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# Wert als JSON speichern, wenn es kein String ist
if not isinstance(value, str):
value = json.dumps(value)
# Prüfen, ob die Einstellung bereits existiert
cursor.execute("SELECT COUNT(*) FROM settings WHERE key = ?", (key,))
exists = cursor.fetchone()[0] > 0
if exists:
cursor.execute("UPDATE settings SET value = ? WHERE key = ?", (value, key))
else:
cursor.execute("INSERT INTO settings (key, value) VALUES (?, ?)", (key, value))
conn.commit()
conn.close()
logger.info(f"Einstellung gespeichert: {key}")
return True
except sqlite3.Error as e:
logger.error(f"Fehler beim Speichern der Einstellung '{key}': {e}")
return False
def delete_setting(self, key: str) -> bool:
"""
Löscht eine Einstellung.
Args:
key: Schlüssel der zu löschenden Einstellung
Returns:
True bei Erfolg, False im Fehlerfall
"""
try:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("DELETE FROM settings WHERE key = ?", (key,))
conn.commit()
conn.close()
logger.info(f"Einstellung gelöscht: {key}")
return True
except sqlite3.Error as e:
logger.error(f"Fehler beim Löschen der Einstellung '{key}': {e}")
return False
def backup_database(self, backup_path: Optional[str] = None) -> bool:
"""
Erstellt ein Backup der Datenbank.
Args:
backup_path: Optional, Pfad für das Backup
Returns:
True bei Erfolg, False im Fehlerfall
"""
if not backup_path:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = f"database/backup/accounts_{timestamp}.db"
# Stelle sicher, dass das Backup-Verzeichnis existiert
os.makedirs(os.path.dirname(backup_path), exist_ok=True)
try:
# SQLite-Backup-API verwenden
conn = sqlite3.connect(self.db_path)
backup_conn = sqlite3.connect(backup_path)
conn.backup(backup_conn)
conn.close()
backup_conn.close()
logger.info(f"Datenbank-Backup erstellt: {backup_path}")
return True
except sqlite3.Error as e:
logger.error(f"Fehler beim Erstellen des Datenbank-Backups: {e}")
return False
def _init_schema_v2(self, cursor) -> None:
"""Initialisiert das Schema v2 mit Session-Tabellen."""
schema_path = PathConfig.SCHEMA_V2
try:
# Versuche schema_v2.sql zu laden
if PathConfig.file_exists(schema_path):
logger.info(f"Lade Schema v2 aus {schema_path}")
with open(schema_path, 'r', encoding='utf-8') as f:
schema_sql = f.read()
# Führe alle SQL-Statements aus
# SQLite unterstützt nur ein Statement pro execute(),
# daher müssen wir die Statements aufteilen
statements = [s.strip() for s in schema_sql.split(';') if s.strip()]
for statement in statements:
if statement: # Ignoriere leere Statements
cursor.execute(statement)
logger.info("Schema v2 erfolgreich aus SQL-Datei geladen")
else:
logger.warning(f"schema_v2.sql nicht gefunden unter {schema_path}")
# Fallback: Erstelle minimal notwendige Tabellen
self._create_minimal_v2_tables(cursor)
except Exception as e:
logger.error(f"Fehler beim Laden von Schema v2: {e}")
# Fallback: Erstelle minimal notwendige Tabellen
self._create_minimal_v2_tables(cursor)
def _create_minimal_v2_tables(self, cursor) -> None:
"""Erstellt minimal notwendige v2 Tabellen als Fallback."""
try:
# Nur die wichtigsten Tabellen für One-Click-Login
cursor.execute('''
CREATE TABLE IF NOT EXISTS browser_sessions (
id TEXT PRIMARY KEY,
fingerprint_id TEXT NOT NULL,
cookies TEXT NOT NULL,
local_storage TEXT,
session_storage TEXT,
account_id TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_used TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
health_score REAL DEFAULT 1.0
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS browser_fingerprints (
id TEXT PRIMARY KEY,
canvas_noise_config TEXT NOT NULL,
webrtc_config TEXT NOT NULL,
fonts TEXT NOT NULL,
hardware_config TEXT NOT NULL,
navigator_props TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
logger.info("Minimale v2 Tabellen erstellt")
except sqlite3.Error as e:
logger.error(f"Fehler beim Erstellen der minimalen v2 Tabellen: {e}")

Datei anzeigen

@ -0,0 +1,19 @@
-- Migration: Add browser storage columns to browser_sessions table
-- This migration adds columns for storing LocalStorage and SessionStorage data
-- Add local_storage column
ALTER TABLE browser_sessions ADD COLUMN local_storage TEXT;
-- Add session_storage column
ALTER TABLE browser_sessions ADD COLUMN session_storage TEXT;
-- Add consent_data column for tracking cookie consent status
ALTER TABLE browser_sessions ADD COLUMN consent_data TEXT;
-- Add storage_updated_at to track when storage was last updated
ALTER TABLE browser_sessions ADD COLUMN storage_updated_at DATETIME;
-- Update existing sessions to have NULL storage (backward compatibility)
UPDATE browser_sessions
SET storage_updated_at = updated_at
WHERE storage_updated_at IS NULL;

Datei anzeigen

@ -0,0 +1,66 @@
-- Migration: Add fingerprint persistence fields for account-bound fingerprints
-- Date: 2025-01-13
-- Add new columns to browser_fingerprints table for persistent fingerprint support
ALTER TABLE browser_fingerprints ADD COLUMN static_components TEXT; -- JSON: Unchangeable hardware/platform values
ALTER TABLE browser_fingerprints ADD COLUMN rotation_seed TEXT; -- Seed for deterministic noise generation
ALTER TABLE browser_fingerprints ADD COLUMN rotation_policy TEXT DEFAULT 'normal'; -- strict/normal/relaxed
ALTER TABLE browser_fingerprints ADD COLUMN last_major_rotation TIMESTAMP;
ALTER TABLE browser_fingerprints ADD COLUMN trust_score REAL DEFAULT 0.0; -- How established this fingerprint is
ALTER TABLE browser_fingerprints ADD COLUMN evolution_history TEXT; -- JSON: Track gradual changes
ALTER TABLE browser_fingerprints ADD COLUMN account_bound BOOLEAN DEFAULT 0; -- Is this bound to specific account(s)
-- Create table for fingerprint-account associations (many-to-many)
CREATE TABLE IF NOT EXISTS fingerprint_accounts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
fingerprint_id TEXT NOT NULL,
account_id TEXT NOT NULL,
assigned_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
primary_fingerprint BOOLEAN DEFAULT 0,
last_used TIMESTAMP,
success_count INTEGER DEFAULT 0,
failure_count INTEGER DEFAULT 0,
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id),
FOREIGN KEY (account_id) REFERENCES accounts(id),
UNIQUE(fingerprint_id, account_id)
);
-- Create table for fingerprint rotation history
CREATE TABLE IF NOT EXISTS fingerprint_rotation_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
fingerprint_id TEXT NOT NULL,
rotation_type TEXT NOT NULL, -- 'minor', 'gradual', 'major'
rotated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
previous_values TEXT NOT NULL, -- JSON: What changed
new_values TEXT NOT NULL, -- JSON: New values
trigger_reason TEXT, -- Why rotation happened
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
);
-- Create indexes for performance
CREATE INDEX IF NOT EXISTS idx_fingerprints_account_bound ON browser_fingerprints(account_bound);
CREATE INDEX IF NOT EXISTS idx_fingerprints_trust_score ON browser_fingerprints(trust_score);
CREATE INDEX IF NOT EXISTS idx_fingerprints_rotation_policy ON browser_fingerprints(rotation_policy);
CREATE INDEX IF NOT EXISTS idx_fingerprint_accounts_account ON fingerprint_accounts(account_id);
CREATE INDEX IF NOT EXISTS idx_fingerprint_accounts_fingerprint ON fingerprint_accounts(fingerprint_id);
CREATE INDEX IF NOT EXISTS idx_rotation_history_fingerprint ON fingerprint_rotation_history(fingerprint_id);
CREATE INDEX IF NOT EXISTS idx_rotation_history_timestamp ON fingerprint_rotation_history(rotated_at);
-- Create view for account fingerprint status
CREATE VIEW IF NOT EXISTS v_account_fingerprints AS
SELECT
a.id as account_id,
a.username,
bf.id as fingerprint_id,
bf.trust_score,
bf.rotation_policy,
bf.last_major_rotation,
fa.primary_fingerprint,
fa.last_used,
fa.success_count,
fa.failure_count,
ROUND(CAST(fa.success_count AS REAL) / NULLIF(fa.success_count + fa.failure_count, 0), 2) as success_rate
FROM accounts a
LEFT JOIN fingerprint_accounts fa ON a.id = fa.account_id
LEFT JOIN browser_fingerprints bf ON fa.fingerprint_id = bf.id
WHERE fa.primary_fingerprint = 1 OR fa.fingerprint_id IS NOT NULL;

Datei anzeigen

@ -0,0 +1,18 @@
-- Migration: Add fingerprint support to accounts table
-- This migration adds fingerprint_id column to accounts table
-- Add fingerprint_id column to accounts table if it doesn't exist
ALTER TABLE accounts ADD COLUMN fingerprint_id TEXT;
-- Add session_id column to accounts table if it doesn't exist
ALTER TABLE accounts ADD COLUMN session_id TEXT;
-- Add last_session_update column to track session health
ALTER TABLE accounts ADD COLUMN last_session_update TEXT;
-- Create index for faster lookups
CREATE INDEX IF NOT EXISTS idx_accounts_fingerprint ON accounts(fingerprint_id);
CREATE INDEX IF NOT EXISTS idx_accounts_session ON accounts(session_id);
-- Update existing accounts to have NULL fingerprint_id (will be generated on login)
UPDATE accounts SET fingerprint_id = NULL WHERE fingerprint_id IS NULL;

Datei anzeigen

@ -0,0 +1,156 @@
-- Migration: Add Method Rotation System
-- Version: 2025-07-24-001
-- Description: Adds complete method rotation infrastructure for tracking and managing
-- registration/login method strategies across all platforms
-- Method strategies table - stores configuration and performance data for each method
CREATE TABLE IF NOT EXISTS method_strategies (
id TEXT PRIMARY KEY,
platform TEXT NOT NULL,
method_name TEXT NOT NULL,
priority INTEGER NOT NULL DEFAULT 5,
success_rate REAL DEFAULT 0.0,
failure_rate REAL DEFAULT 0.0,
last_success TIMESTAMP,
last_failure TIMESTAMP,
cooldown_period INTEGER DEFAULT 0, -- seconds
max_daily_attempts INTEGER DEFAULT 10,
risk_level TEXT DEFAULT 'MEDIUM', -- LOW, MEDIUM, HIGH
is_active BOOLEAN DEFAULT 1,
configuration TEXT, -- JSON configuration for method-specific settings
tags TEXT, -- JSON array for method categorization
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(platform, method_name)
);
-- Rotation sessions table - tracks active rotation sessions
CREATE TABLE IF NOT EXISTS rotation_sessions (
id TEXT PRIMARY KEY,
platform TEXT NOT NULL,
account_id TEXT,
current_method TEXT NOT NULL,
attempted_methods TEXT, -- JSON array of attempted method names
session_start TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_rotation TIMESTAMP,
rotation_count INTEGER DEFAULT 0,
success_count INTEGER DEFAULT 0,
failure_count INTEGER DEFAULT 0,
is_active BOOLEAN DEFAULT 1,
rotation_reason TEXT,
fingerprint_id TEXT,
session_metadata TEXT, -- JSON for additional session data
FOREIGN KEY (account_id) REFERENCES accounts(id),
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
);
-- Rotation events table - detailed event logging for all rotation activities
CREATE TABLE IF NOT EXISTS rotation_events (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
method_name TEXT NOT NULL,
event_type TEXT NOT NULL, -- SUCCESS, FAILURE, ROTATION, COOLDOWN, CONFIG_CHANGE
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
details TEXT, -- JSON event-specific details
error_message TEXT,
performance_metrics TEXT, -- JSON: execution_time, memory_usage, etc.
correlation_id TEXT, -- For linking related events
FOREIGN KEY (session_id) REFERENCES rotation_sessions(id)
);
-- Method performance analytics table - aggregated daily performance data
CREATE TABLE IF NOT EXISTS method_performance_analytics (
id TEXT PRIMARY KEY,
platform TEXT NOT NULL,
method_name TEXT NOT NULL,
date DATE NOT NULL,
total_attempts INTEGER DEFAULT 0,
successful_attempts INTEGER DEFAULT 0,
failed_attempts INTEGER DEFAULT 0,
avg_execution_time REAL DEFAULT 0.0,
avg_success_rate REAL DEFAULT 0.0,
peak_usage_hour INTEGER, -- 0-23 hour when most used
error_categories TEXT, -- JSON: categorized error types and counts
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(platform, method_name, date)
);
-- Method cooldowns table - tracks temporary method restrictions
CREATE TABLE IF NOT EXISTS method_cooldowns (
id TEXT PRIMARY KEY,
platform TEXT NOT NULL,
method_name TEXT NOT NULL,
cooldown_until TIMESTAMP NOT NULL,
reason TEXT NOT NULL,
applied_by TEXT DEFAULT 'system',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(platform, method_name)
);
-- Platform method states table - stores platform-specific rotation state
CREATE TABLE IF NOT EXISTS platform_method_states (
id TEXT PRIMARY KEY,
platform TEXT NOT NULL,
last_successful_method TEXT,
last_successful_at TIMESTAMP,
preferred_methods TEXT, -- JSON array of method names in preference order
blocked_methods TEXT, -- JSON array of temporarily blocked methods
daily_attempt_counts TEXT, -- JSON: {"email": 3, "phone": 1}
reset_date DATE, -- When daily counts reset
rotation_strategy TEXT DEFAULT 'adaptive', -- sequential, random, adaptive, smart
emergency_mode BOOLEAN DEFAULT 0,
metadata TEXT, -- JSON: additional platform-specific state
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(platform)
);
-- Indexes for performance optimization
CREATE INDEX IF NOT EXISTS idx_method_strategies_platform ON method_strategies(platform);
CREATE INDEX IF NOT EXISTS idx_method_strategies_active ON method_strategies(platform, is_active);
CREATE INDEX IF NOT EXISTS idx_method_strategies_priority ON method_strategies(platform, priority DESC, success_rate DESC);
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_platform ON rotation_sessions(platform);
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_active ON rotation_sessions(platform, is_active);
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_account ON rotation_sessions(account_id);
CREATE INDEX IF NOT EXISTS idx_rotation_events_session ON rotation_events(session_id);
CREATE INDEX IF NOT EXISTS idx_rotation_events_timestamp ON rotation_events(timestamp);
CREATE INDEX IF NOT EXISTS idx_rotation_events_method ON rotation_events(method_name);
CREATE INDEX IF NOT EXISTS idx_method_performance_platform_date ON method_performance_analytics(platform, date);
CREATE INDEX IF NOT EXISTS idx_method_performance_method ON method_performance_analytics(method_name);
CREATE INDEX IF NOT EXISTS idx_method_cooldowns_platform_method ON method_cooldowns(platform, method_name);
CREATE INDEX IF NOT EXISTS idx_method_cooldowns_until ON method_cooldowns(cooldown_until);
CREATE INDEX IF NOT EXISTS idx_platform_method_states_platform ON platform_method_states(platform);
-- Insert default method strategies for existing platforms
INSERT OR IGNORE INTO method_strategies (id, platform, method_name, priority, max_daily_attempts, cooldown_period, risk_level, configuration, tags) VALUES
-- Instagram methods
('instagram_email', 'instagram', 'email', 8, 20, 300, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": false, "auto_verify_email": true}', '["primary", "reliable"]'),
('instagram_phone', 'instagram', 'phone', 6, 10, 600, 'MEDIUM', '{"require_email_backup": true, "phone_verification_timeout": 300}', '["secondary", "verification"]'),
('instagram_social', 'instagram', 'social_login', 4, 5, 1800, 'HIGH', '{"supported_providers": ["facebook"], "fallback_to_email": true}', '["alternative", "high_risk"]'),
-- TikTok methods
('tiktok_email', 'tiktok', 'email', 8, 25, 240, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": false}', '["primary", "reliable"]'),
('tiktok_phone', 'tiktok', 'phone', 7, 15, 480, 'MEDIUM', '{"require_email_backup": false, "phone_verification_timeout": 180}', '["secondary", "fast"]'),
-- X (Twitter) methods
('x_email', 'x', 'email', 8, 15, 360, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": true}', '["primary", "stable"]'),
('x_phone', 'x', 'phone', 6, 8, 720, 'MEDIUM', '{"require_email_backup": true, "phone_verification_timeout": 300}', '["secondary", "verification"]'),
-- Gmail methods
('gmail_standard', 'gmail', 'standard_registration', 9, 30, 180, 'LOW', '{"recovery_email": false, "recovery_phone": false}', '["primary", "google"]'),
('gmail_recovery', 'gmail', 'recovery_registration', 7, 10, 600, 'MEDIUM', '{"recovery_email": true, "recovery_phone": false}', '["secondary", "secure"]);
-- Insert default platform method states
INSERT OR IGNORE INTO platform_method_states (id, platform, preferred_methods, rotation_strategy, reset_date) VALUES
('state_instagram', 'instagram', '["email", "phone", "social_login"]', 'adaptive', DATE('now')),
('state_tiktok', 'tiktok', '["email", "phone"]', 'adaptive', DATE('now')),
('state_x', 'x', '["email", "phone"]', 'adaptive', DATE('now')),
('state_gmail', 'gmail', '["standard_registration", "recovery_registration"]', 'adaptive', DATE('now'));
-- Migration completed successfully
INSERT OR IGNORE INTO schema_migrations (version, description, applied_at) VALUES
('2025-07-24-001', 'Add Method Rotation System', CURRENT_TIMESTAMP);

Datei anzeigen

@ -0,0 +1,60 @@
-- Migration: Remove unused fingerprint columns and tables
-- Date: 2025-01-13
-- Description: Removes evolution history, trust score, rotation policy and related unused columns
-- Drop unused table
DROP TABLE IF EXISTS fingerprint_rotation_history;
-- Drop unused view
DROP VIEW IF EXISTS v_account_fingerprints;
-- Create temporary table with desired schema
CREATE TABLE browser_fingerprints_new (
id TEXT PRIMARY KEY,
canvas_noise_config TEXT,
webrtc_config TEXT,
fonts TEXT,
hardware_config TEXT,
navigator_props TEXT,
webgl_vendor TEXT,
webgl_renderer TEXT,
audio_context_config TEXT,
timezone TEXT,
timezone_offset INTEGER,
plugins TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_rotated TIMESTAMP,
platform_specific TEXT,
static_components TEXT,
rotation_seed TEXT,
account_bound BOOLEAN DEFAULT FALSE
);
-- Copy data from old table (excluding unused columns)
INSERT INTO browser_fingerprints_new (
id, canvas_noise_config, webrtc_config, fonts,
hardware_config, navigator_props, webgl_vendor,
webgl_renderer, audio_context_config, timezone,
timezone_offset, plugins, created_at, last_rotated,
platform_specific, static_components, rotation_seed,
account_bound
)
SELECT
id, canvas_noise_config, webrtc_config, fonts,
hardware_config, navigator_props, webgl_vendor,
webgl_renderer, audio_context_config, timezone,
timezone_offset, plugins, created_at, last_rotated,
platform_specific, static_components, rotation_seed,
account_bound
FROM browser_fingerprints;
-- Drop old table
DROP TABLE browser_fingerprints;
-- Rename new table to original name
ALTER TABLE browser_fingerprints_new RENAME TO browser_fingerprints;
-- Recreate indexes
CREATE INDEX idx_fingerprints_created ON browser_fingerprints(created_at);
CREATE INDEX idx_fingerprints_rotated ON browser_fingerprints(last_rotated);
CREATE INDEX idx_fingerprints_account_bound ON browser_fingerprints(account_bound);

187
database/schema_v2.sql Normale Datei
Datei anzeigen

@ -0,0 +1,187 @@
-- Clean Architecture Database Schema v2
-- Erweitert das bestehende Schema um neue Tabellen
-- Session Management
CREATE TABLE IF NOT EXISTS browser_sessions (
id TEXT PRIMARY KEY,
fingerprint_id TEXT NOT NULL,
cookies TEXT NOT NULL, -- JSON encrypted
local_storage TEXT, -- JSON encrypted
session_storage TEXT, -- JSON encrypted
proxy_config TEXT, -- JSON
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
health_score REAL DEFAULT 1.0,
account_id TEXT,
user_agent TEXT,
viewport_width INTEGER DEFAULT 1920,
viewport_height INTEGER DEFAULT 1080,
locale TEXT DEFAULT 'de-DE',
timezone TEXT DEFAULT 'Europe/Berlin',
active BOOLEAN DEFAULT 1,
error_count INTEGER DEFAULT 0,
success_count INTEGER DEFAULT 0,
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id),
FOREIGN KEY (account_id) REFERENCES accounts(id)
);
-- Fingerprints
CREATE TABLE IF NOT EXISTS browser_fingerprints (
id TEXT PRIMARY KEY,
canvas_noise_config TEXT NOT NULL, -- JSON
webrtc_config TEXT NOT NULL, -- JSON
fonts TEXT NOT NULL, -- JSON array
hardware_config TEXT NOT NULL, -- JSON
navigator_props TEXT NOT NULL, -- JSON
webgl_vendor TEXT,
webgl_renderer TEXT,
audio_context_config TEXT, -- JSON
timezone TEXT,
timezone_offset INTEGER,
plugins TEXT, -- JSON array
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_rotated TIMESTAMP,
platform_specific TEXT, -- Platform-spezifische Anpassungen
static_components TEXT, -- JSON: Unchangeable hardware/platform values
rotation_seed TEXT, -- Seed for deterministic noise generation
account_bound BOOLEAN DEFAULT 0 -- Is this bound to specific account(s)
);
-- Rate Limiting
CREATE TABLE IF NOT EXISTS rate_limit_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
action_type TEXT NOT NULL,
duration_ms INTEGER NOT NULL,
success BOOLEAN NOT NULL,
response_code INTEGER,
session_id TEXT,
url TEXT,
element_selector TEXT,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
metadata TEXT, -- JSON
FOREIGN KEY (session_id) REFERENCES browser_sessions(id)
);
-- Analytics
CREATE TABLE IF NOT EXISTS account_creation_analytics (
id INTEGER PRIMARY KEY AUTOINCREMENT,
event_id TEXT UNIQUE NOT NULL,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
account_id TEXT,
session_id TEXT NOT NULL,
fingerprint_id TEXT NOT NULL,
duration_seconds REAL NOT NULL,
success BOOLEAN NOT NULL,
error_type TEXT,
error_message TEXT,
workflow_steps TEXT NOT NULL, -- JSON
metadata TEXT, -- JSON
total_retry_count INTEGER DEFAULT 0,
network_requests INTEGER DEFAULT 0,
screenshots_taken INTEGER DEFAULT 0,
proxy_used BOOLEAN DEFAULT 0,
proxy_type TEXT,
browser_type TEXT DEFAULT 'chromium',
headless BOOLEAN DEFAULT 0,
success_rate REAL,
FOREIGN KEY (account_id) REFERENCES accounts(id),
FOREIGN KEY (session_id) REFERENCES browser_sessions(id),
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
);
-- Error Events
CREATE TABLE IF NOT EXISTS error_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
error_id TEXT UNIQUE NOT NULL,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
error_type TEXT NOT NULL,
error_message TEXT NOT NULL,
stack_trace TEXT,
context TEXT NOT NULL, -- JSON
recovery_attempted BOOLEAN DEFAULT 0,
recovery_successful BOOLEAN DEFAULT 0,
recovery_attempts TEXT, -- JSON array
severity TEXT DEFAULT 'medium',
platform TEXT,
session_id TEXT,
account_id TEXT,
correlation_id TEXT,
user_impact BOOLEAN DEFAULT 1,
system_impact BOOLEAN DEFAULT 0,
data_loss BOOLEAN DEFAULT 0,
FOREIGN KEY (session_id) REFERENCES browser_sessions(id),
FOREIGN KEY (account_id) REFERENCES accounts(id)
);
-- Rate Limit Policies
CREATE TABLE IF NOT EXISTS rate_limit_policies (
id INTEGER PRIMARY KEY AUTOINCREMENT,
action_type TEXT UNIQUE NOT NULL,
min_delay REAL NOT NULL,
max_delay REAL NOT NULL,
adaptive BOOLEAN DEFAULT 1,
backoff_multiplier REAL DEFAULT 1.5,
max_retries INTEGER DEFAULT 3,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
-- Session Pool Status
CREATE TABLE IF NOT EXISTS session_pool_status (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
total_sessions INTEGER NOT NULL,
active_sessions INTEGER NOT NULL,
healthy_sessions INTEGER NOT NULL,
failed_sessions INTEGER NOT NULL,
avg_health_score REAL,
metadata TEXT -- JSON
);
-- Indexes for performance
CREATE INDEX IF NOT EXISTS idx_sessions_last_used ON browser_sessions(last_used);
CREATE INDEX IF NOT EXISTS idx_sessions_health ON browser_sessions(health_score);
CREATE INDEX IF NOT EXISTS idx_sessions_active ON browser_sessions(active);
CREATE INDEX IF NOT EXISTS idx_rate_limits_timestamp ON rate_limit_events(timestamp);
CREATE INDEX IF NOT EXISTS idx_rate_limits_action ON rate_limit_events(action_type);
CREATE INDEX IF NOT EXISTS idx_analytics_timestamp ON account_creation_analytics(timestamp);
CREATE INDEX IF NOT EXISTS idx_analytics_success ON account_creation_analytics(success);
CREATE INDEX IF NOT EXISTS idx_analytics_platform ON account_creation_analytics(metadata);
CREATE INDEX IF NOT EXISTS idx_errors_timestamp ON error_events(timestamp);
CREATE INDEX IF NOT EXISTS idx_errors_type ON error_events(error_type);
CREATE INDEX IF NOT EXISTS idx_errors_severity ON error_events(severity);
-- Views für häufige Abfragen
CREATE VIEW IF NOT EXISTS v_session_health AS
SELECT
bs.id,
bs.health_score,
bs.error_count,
bs.success_count,
bs.last_used,
COUNT(aca.id) as total_accounts,
AVG(aca.success_rate) as avg_success_rate
FROM browser_sessions bs
LEFT JOIN account_creation_analytics aca ON bs.id = aca.session_id
GROUP BY bs.id;
CREATE VIEW IF NOT EXISTS v_daily_analytics AS
SELECT
DATE(timestamp) as date,
COUNT(*) as total_attempts,
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful,
AVG(duration_seconds) as avg_duration,
AVG(total_retry_count) as avg_retries
FROM account_creation_analytics
GROUP BY DATE(timestamp);
CREATE VIEW IF NOT EXISTS v_error_summary AS
SELECT
error_type,
COUNT(*) as error_count,
MIN(timestamp) as first_occurrence,
MAX(timestamp) as last_occurrence,
AVG(CASE WHEN recovery_successful = 1 THEN 1.0 ELSE 0.0 END) as recovery_rate
FROM error_events
GROUP BY error_type;