From a25a26a01a2d397dc0497efc8f3ce9132fbd3ba3 Mon Sep 17 00:00:00 2001 From: Claude Project Manager Date: Sun, 18 Jan 2026 18:15:34 +0100 Subject: [PATCH] Update changes --- .claude/settings.local.json | 9 +- CLAUDE_PROJECT_README.md | 13 +- browser/cookie_consent_handler.py | 57 +- browser/instagram_video_bypass.py | 521 ---- browser/playwright_manager.py | 224 +- check_rotation_system.py | 154 -- config/implementation_switch.py | 35 - config/license_config.json | 11 - .../platform_controllers/base_controller.py | 35 +- .../base_worker_thread.py | 74 +- .../facebook_controller.py | 14 +- .../platform_controllers/gmail_controller.py | 16 +- .../instagram_controller.py | 36 +- .../method_rotation_mixin.py | 17 +- .../platform_controllers/ok_ru_controller.py | 14 +- .../platform_controllers/tiktok_controller.py | 14 +- .../platform_controllers/x_controller.py | 14 +- controllers/session_controller.py | 23 +- database/accounts.db | Bin 409600 -> 417792 bytes debug_video_issue.py | 267 -- docs/overview.md | 2224 +++++++++++++++++ docs/production-architecture.md | 503 ++++ install_requirements.py | 108 - resources/icons/aegissight-logo-dark.svg | 20 + resources/icons/aegissight-logo.svg | 20 + resources/icons/intelsight-dark.svg | 53 - resources/icons/intelsight-logo.svg | 53 - resources/themes/dark.qss | 2 +- resources/themes/light.qss | 2 +- .../facebook/facebook_ui_helper.py | 133 +- .../instagram/instagram_registration.py | 158 +- .../instagram/instagram_ui_helper.py | 144 +- social_networks/tiktok/tiktok_registration.py | 220 +- tests/test_generator_tab_factory.py | 150 -- tests/test_method_rotation.py | 611 ----- themes/theme_config.py | 6 +- utils/human_behavior.py | 192 +- utils/modal_test.py | 195 -- utils/performance_monitor.py | 412 --- utils/process_guard.py | 252 +- utils/profile_export_service.py | 4 +- utils/proxy_rotator.py | 112 +- utils/rate_limit_handler.py | 351 +++ utils/theme_manager.py | 4 +- utils/username_generator.py | 221 +- views/about_dialog.py | 8 +- views/main_window.py | 6 +- 47 files changed, 4756 insertions(+), 2956 deletions(-) delete mode 100644 browser/instagram_video_bypass.py delete mode 100644 check_rotation_system.py delete mode 100644 config/implementation_switch.py delete mode 100644 config/license_config.json delete mode 100644 debug_video_issue.py create mode 100644 docs/overview.md create mode 100644 docs/production-architecture.md delete mode 100644 install_requirements.py create mode 100644 resources/icons/aegissight-logo-dark.svg create mode 100644 resources/icons/aegissight-logo.svg delete mode 100644 resources/icons/intelsight-dark.svg delete mode 100644 resources/icons/intelsight-logo.svg delete mode 100644 tests/test_generator_tab_factory.py delete mode 100644 tests/test_method_rotation.py delete mode 100644 utils/modal_test.py delete mode 100644 utils/performance_monitor.py create mode 100644 utils/rate_limit_handler.py diff --git a/.claude/settings.local.json b/.claude/settings.local.json index f350487..159d51c 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -9,7 +9,14 @@ "Bash(find:*)", "Bash(pkill:*)", "Bash(lsof:*)", - "Bash(ls:*)" + "Bash(ls:*)", + "Bash(Remove-Item -Path \"A:\\\\GiTea\\\\AccountForger\\\\debug_video_issue.py\" -Force)", + "Bash(Remove-Item -Path \"A:\\\\GiTea\\\\AccountForger\\\\utils\\\\modal_test.py\" -Force)", + "Bash(Remove-Item -Path \"A:\\\\GiTea\\\\AccountForger\\\\browser\\\\instagram_video_bypass.py\" -Force)", + "Bash(dir:*)", + "Bash(python -m py_compile:*)", + "WebSearch", + "Bash(grep:*)" ], "deny": [], "ask": [], diff --git a/CLAUDE_PROJECT_README.md b/CLAUDE_PROJECT_README.md index 8e0ef3e..ccb3f0d 100644 --- a/CLAUDE_PROJECT_README.md +++ b/CLAUDE_PROJECT_README.md @@ -5,9 +5,9 @@ ## Project Overview - **Path**: `A:\GiTea\AccountForger` -- **Files**: 1394 files -- **Size**: 257.7 MB -- **Last Modified**: 2025-11-27 19:34 +- **Files**: 1574 files +- **Size**: 269.4 MB +- **Last Modified**: 2025-12-20 22:41 ## Technology Stack @@ -78,8 +78,7 @@ controllers/ │ ├── method_rotation_worker_mixin.py │ ├── ok_ru_controller.py │ ├── rotation_error_handler.py -│ ├── safe_imports.py -│ └── mixins +│ └── safe_imports.py database/ │ ├── accounts.db │ ├── account_repository.py @@ -106,7 +105,6 @@ domain/ │ │ ├── platform.py │ │ ├── rate_limit_policy.py │ │ └── __init__.py -│ ├── enums │ ├── repositories/ │ │ ├── analytics_repository.py │ │ ├── fingerprint_repository.py @@ -439,3 +437,6 @@ This project is managed with Claude Project Manager. To work with this project: - README updated on 2025-11-27 19:33:36 - README updated on 2025-11-27 19:34:18 - README updated on 2025-11-27 19:41:04 +- README updated on 2025-12-20 22:29:45 +- README updated on 2025-12-20 22:41:18 +- README updated on 2025-12-31 01:00:22 diff --git a/browser/cookie_consent_handler.py b/browser/cookie_consent_handler.py index 4143e50..586b5a0 100644 --- a/browser/cookie_consent_handler.py +++ b/browser/cookie_consent_handler.py @@ -1,11 +1,14 @@ """ Cookie Consent Handler für Browser-Sessions -Behandelt Cookie-Consent-Seiten bei der Session-Wiederherstellung +Behandelt Cookie-Consent-Seiten bei der Session-Wiederherstellung. +Enthält Anti-Detection-Maßnahmen wie Lese-Pausen. """ import logging -from typing import Optional +import time +import random +from typing import Optional, Any from playwright.sync_api import Page logger = logging.getLogger(__name__) @@ -15,20 +18,25 @@ class CookieConsentHandler: """Behandelt Cookie-Consent-Dialoge verschiedener Plattformen""" @staticmethod - def handle_instagram_consent(page: Page) -> bool: + def handle_instagram_consent(page: Page, human_behavior: Any = None) -> bool: """ - Behandelt Instagram's Cookie-Consent-Seite - + Behandelt Instagram's Cookie-Consent-Seite mit realistischer Lese-Zeit. + + Diese Methode simuliert menschliches Verhalten durch eine Pause + bevor der Cookie-Dialog geklickt wird. Echte Menschen lesen den + Cookie-Text bevor sie auf einen Button klicken. + Args: page: Playwright Page-Objekt - + human_behavior: Optional HumanBehavior-Instanz für realistische Delays + Returns: bool: True wenn Consent behandelt wurde, False sonst """ try: # Warte kurz auf Seitenladung page.wait_for_load_state('networkidle', timeout=5000) - + # Prüfe ob wir auf der Cookie-Consent-Seite sind consent_indicators = [ # Deutsche Texte @@ -57,7 +65,28 @@ class CookieConsentHandler: button = page.locator(button_selector).first if button.is_visible(): logger.info(f"Found consent decline button: {button_selector}") - + + # ANTI-DETECTION: Realistische Lese-Pause bevor Cookie-Dialog geklickt wird + # Simuliert das Lesen der Cookie-Informationen (3-8 Sekunden) + if human_behavior and hasattr(human_behavior, 'anti_detection_delay'): + logger.debug("Cookie-Banner erkannt - simuliere Lesen...") + human_behavior.anti_detection_delay("cookie_reading") + else: + # Fallback ohne HumanBehavior + read_delay = random.uniform(3.0, 8.0) + logger.debug(f"Cookie-Banner Lese-Pause: {read_delay:.1f}s") + time.sleep(read_delay) + + # Gelegentlich etwas scrollen um "mehr zu lesen" (30% Chance) + if random.random() < 0.3: + try: + page.evaluate("window.scrollBy(0, 50)") + time.sleep(random.uniform(0.8, 1.5)) + page.evaluate("window.scrollBy(0, -50)") + time.sleep(random.uniform(0.3, 0.6)) + except: + pass + # Verwende robuste Click-Methoden für Cookie-Consent success = False try: @@ -233,19 +262,21 @@ class CookieConsentHandler: return False @staticmethod - def check_and_handle_consent(page: Page, platform: str = "instagram") -> bool: + def check_and_handle_consent(page: Page, platform: str = "instagram", + human_behavior: Any = None) -> bool: """ - Prüft und behandelt Cookie-Consent für die angegebene Plattform - + Prüft und behandelt Cookie-Consent für die angegebene Plattform. + Args: page: Playwright Page-Objekt platform: Plattform-Name (default: "instagram") - + human_behavior: Optional HumanBehavior-Instanz für realistische Delays + Returns: bool: True wenn Consent behandelt wurde, False sonst """ if platform.lower() == "instagram": - return CookieConsentHandler.handle_instagram_consent(page) + return CookieConsentHandler.handle_instagram_consent(page, human_behavior) else: logger.warning(f"No consent handler implemented for platform: {platform}") return False \ No newline at end of file diff --git a/browser/instagram_video_bypass.py b/browser/instagram_video_bypass.py deleted file mode 100644 index 2e27b53..0000000 --- a/browser/instagram_video_bypass.py +++ /dev/null @@ -1,521 +0,0 @@ -# Instagram Video Bypass - Emergency Deep Level Fixes -""" -Tiefgreifende Instagram Video Bypass Techniken -""" - -import logging -import time -import random -from typing import Any, Dict, Optional - -logger = logging.getLogger("instagram_video_bypass") - -class InstagramVideoBypass: - """Deep-level Instagram video bypass techniques""" - - def __init__(self, page: Any): - self.page = page - - def apply_emergency_bypass(self) -> None: - """Wendet Emergency Deep-Level Bypass an""" - - # 1. Complete Automation Signature Removal - automation_removal_script = """ - () => { - // Remove ALL automation signatures - - // 1. Navigator properties cleanup - delete navigator.__webdriver_script_fn; - delete navigator.__fxdriver_evaluate; - delete navigator.__driver_unwrapped; - delete navigator.__webdriver_unwrapped; - delete navigator.__driver_evaluate; - delete navigator.__selenium_unwrapped; - delete navigator.__fxdriver_unwrapped; - - // 2. Window properties cleanup - delete window.navigator.webdriver; - delete window.webdriver; - delete window.chrome.webdriver; - delete window.callPhantom; - delete window._phantom; - delete window.__nightmare; - delete window._selenium; - delete window.calledSelenium; - delete window.$cdc_asdjflasutopfhvcZLmcfl_; - delete window.$chrome_asyncScriptInfo; - delete window.__webdriver_evaluate; - delete window.__selenium_evaluate; - delete window.__webdriver_script_function; - delete window.__webdriver_script_func; - delete window.__webdriver_script_fn; - delete window.__fxdriver_evaluate; - delete window.__driver_unwrapped; - delete window.__webdriver_unwrapped; - delete window.__driver_evaluate; - delete window.__selenium_unwrapped; - delete window.__fxdriver_unwrapped; - - // 3. Document cleanup - delete document.__webdriver_script_fn; - delete document.__selenium_unwrapped; - delete document.__webdriver_unwrapped; - delete document.__driver_evaluate; - delete document.__webdriver_evaluate; - delete document.__fxdriver_evaluate; - delete document.__fxdriver_unwrapped; - delete document.__driver_unwrapped; - - // 4. Chrome object enhancement - if (!window.chrome) { - window.chrome = {}; - } - if (!window.chrome.runtime) { - window.chrome.runtime = { - onConnect: {addListener: function() {}}, - onMessage: {addListener: function() {}}, - connect: function() { return {postMessage: function() {}, onMessage: {addListener: function() {}}} } - }; - } - if (!window.chrome.app) { - window.chrome.app = { - isInstalled: false, - InstallState: {DISABLED: 'disabled', INSTALLED: 'installed', NOT_INSTALLED: 'not_installed'}, - RunningState: {CANNOT_RUN: 'cannot_run', READY_TO_RUN: 'ready_to_run', RUNNING: 'running'} - }; - } - - // 5. Plugin array enhancement - const fakePlugins = [ - {name: 'Chrome PDF Plugin', filename: 'internal-pdf-viewer', description: 'Portable Document Format'}, - {name: 'Chrome PDF Viewer', filename: 'mhjfbmdgcfjbbpaeojofohoefgiehjai', description: 'Portable Document Format'}, - {name: 'Native Client', filename: 'internal-nacl-plugin', description: 'Native Client'} - ]; - - Object.defineProperty(navigator, 'plugins', { - get: () => { - const pluginArray = [...fakePlugins]; - pluginArray.length = fakePlugins.length; - pluginArray.item = function(index) { return this[index] || null; }; - pluginArray.namedItem = function(name) { return this.find(p => p.name === name) || null; }; - pluginArray.refresh = function() {}; - return pluginArray; - }, - configurable: true - }); - } - """ - - # 2. Instagram-specific video API spoofing - instagram_video_api_script = """ - () => { - // Instagram Video API Deep Spoofing - - // 1. MSE (Media Source Extensions) proper support - if (window.MediaSource) { - const originalIsTypeSupported = MediaSource.isTypeSupported; - MediaSource.isTypeSupported = function(type) { - const supportedTypes = [ - 'video/mp4; codecs="avc1.42E01E"', - 'video/mp4; codecs="avc1.4D401F"', - 'video/mp4; codecs="avc1.640028"', - 'video/webm; codecs="vp8"', - 'video/webm; codecs="vp9"', - 'audio/mp4; codecs="mp4a.40.2"', - 'audio/webm; codecs="opus"' - ]; - - if (supportedTypes.includes(type)) { - return true; - } - return originalIsTypeSupported.call(this, type); - }; - } - - // 2. Encrypted Media Extensions deep spoofing - if (navigator.requestMediaKeySystemAccess) { - const originalRequestAccess = navigator.requestMediaKeySystemAccess; - navigator.requestMediaKeySystemAccess = function(keySystem, supportedConfigurations) { - if (keySystem === 'com.widevine.alpha') { - return Promise.resolve({ - keySystem: 'com.widevine.alpha', - getConfiguration: () => ({ - initDataTypes: ['cenc', 'keyids', 'webm'], - audioCapabilities: [ - {contentType: 'audio/mp4; codecs="mp4a.40.2"', robustness: 'SW_SECURE_CRYPTO'}, - {contentType: 'audio/webm; codecs="opus"', robustness: 'SW_SECURE_CRYPTO'} - ], - videoCapabilities: [ - {contentType: 'video/mp4; codecs="avc1.42E01E"', robustness: 'SW_SECURE_DECODE'}, - {contentType: 'video/mp4; codecs="avc1.4D401F"', robustness: 'SW_SECURE_DECODE'}, - {contentType: 'video/webm; codecs="vp9"', robustness: 'SW_SECURE_DECODE'} - ], - distinctiveIdentifier: 'optional', - persistentState: 'required', - sessionTypes: ['temporary', 'persistent-license'] - }), - createMediaKeys: () => Promise.resolve({ - createSession: (sessionType) => { - const session = { - sessionId: 'session_' + Math.random().toString(36).substr(2, 9), - expiration: NaN, - closed: Promise.resolve(), - keyStatuses: new Map(), - addEventListener: function() {}, - removeEventListener: function() {}, - generateRequest: function(initDataType, initData) { - setTimeout(() => { - if (this.onmessage) { - this.onmessage({ - type: 'message', - message: new ArrayBuffer(8) - }); - } - }, 100); - return Promise.resolve(); - }, - load: function() { return Promise.resolve(false); }, - update: function(response) { - setTimeout(() => { - if (this.onkeystatuseschange) { - this.onkeystatuseschange(); - } - }, 50); - return Promise.resolve(); - }, - close: function() { return Promise.resolve(); }, - remove: function() { return Promise.resolve(); } - }; - - // Add event target methods - session.dispatchEvent = function() {}; - - return session; - }, - setServerCertificate: () => Promise.resolve(true) - }) - }); - } - return originalRequestAccess.apply(this, arguments); - }; - } - - // 3. Hardware media key handling - if (navigator.mediaSession) { - navigator.mediaSession.setActionHandler = function() {}; - navigator.mediaSession.playbackState = 'playing'; - } else { - navigator.mediaSession = { - metadata: null, - playbackState: 'playing', - setActionHandler: function() {}, - setPositionState: function() {} - }; - } - - // 4. Picture-in-Picture API - if (!document.pictureInPictureEnabled) { - Object.defineProperty(document, 'pictureInPictureEnabled', { - get: () => true, - configurable: true - }); - } - - // 5. Web Audio API enhancement for video - if (window.AudioContext || window.webkitAudioContext) { - const AudioCtx = window.AudioContext || window.webkitAudioContext; - const originalAudioContext = AudioCtx; - - window.AudioContext = function(...args) { - const ctx = new originalAudioContext(...args); - - // Override audio context properties for consistency - Object.defineProperty(ctx, 'baseLatency', { - get: () => 0.01, - configurable: true - }); - - Object.defineProperty(ctx, 'outputLatency', { - get: () => 0.02, - configurable: true - }); - - return ctx; - }; - - // Copy static methods - Object.keys(originalAudioContext).forEach(key => { - window.AudioContext[key] = originalAudioContext[key]; - }); - } - } - """ - - # 3. Network request interception for video - network_interception_script = """ - () => { - // Advanced network request interception for Instagram videos - - const originalFetch = window.fetch; - window.fetch = function(input, init) { - const url = typeof input === 'string' ? input : input.url; - - // Instagram video CDN requests - if (url.includes('instagram.com') || url.includes('fbcdn.net') || url.includes('cdninstagram.com')) { - const enhancedInit = { - ...init, - headers: { - ...init?.headers, - 'Accept': '*/*', - 'Accept-Encoding': 'identity;q=1, *;q=0', - 'Accept-Language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7', - 'Cache-Control': 'no-cache', - 'DNT': '1', - 'Origin': 'https://www.instagram.com', - 'Pragma': 'no-cache', - 'Referer': 'https://www.instagram.com/', - 'Sec-Ch-Ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"', - 'Sec-Ch-Ua-Mobile': '?0', - 'Sec-Ch-Ua-Platform': '"Windows"', - 'Sec-Fetch-Dest': 'video', - 'Sec-Fetch-Mode': 'cors', - 'Sec-Fetch-Site': 'cross-site', - 'User-Agent': navigator.userAgent, - 'X-Asbd-Id': '129477', - 'X-Fb-Lsd': document.querySelector('[name="fb_dtsg"]')?.value || '', - 'X-Instagram-Ajax': '1' - } - }; - - // Remove problematic headers that might indicate automation - delete enhancedInit.headers['sec-ch-ua-arch']; - delete enhancedInit.headers['sec-ch-ua-bitness']; - delete enhancedInit.headers['sec-ch-ua-full-version']; - delete enhancedInit.headers['sec-ch-ua-full-version-list']; - delete enhancedInit.headers['sec-ch-ua-model']; - delete enhancedInit.headers['sec-ch-ua-wow64']; - - return originalFetch.call(this, input, enhancedInit); - } - - return originalFetch.apply(this, arguments); - }; - - // XMLHttpRequest interception - const originalXHROpen = XMLHttpRequest.prototype.open; - XMLHttpRequest.prototype.open = function(method, url, async, user, password) { - this._url = url; - return originalXHROpen.apply(this, arguments); - }; - - const originalXHRSend = XMLHttpRequest.prototype.send; - XMLHttpRequest.prototype.send = function(body) { - if (this._url && (this._url.includes('instagram.com') || this._url.includes('fbcdn.net'))) { - // Add video-specific headers - this.setRequestHeader('Accept', '*/*'); - this.setRequestHeader('Accept-Language', 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7'); - this.setRequestHeader('Cache-Control', 'no-cache'); - this.setRequestHeader('Pragma', 'no-cache'); - this.setRequestHeader('X-Requested-With', 'XMLHttpRequest'); - } - return originalXHRSend.apply(this, arguments); - }; - } - """ - - # 4. Timing and behavior normalization - timing_script = """ - () => { - // Normalize timing functions to avoid detection - - // Performance timing spoofing - if (window.performance && window.performance.timing) { - const timing = performance.timing; - const now = Date.now(); - - Object.defineProperty(performance.timing, 'navigationStart', { - get: () => now - Math.floor(Math.random() * 1000) - 1000, - configurable: true - }); - - Object.defineProperty(performance.timing, 'loadEventEnd', { - get: () => now - Math.floor(Math.random() * 500), - configurable: true - }); - } - - // Date/Time consistency - const originalDate = Date; - const startTime = originalDate.now(); - - Date.now = function() { - return startTime + (originalDate.now() - startTime); - }; - - // Remove timing inconsistencies that indicate automation - const originalSetTimeout = window.setTimeout; - window.setTimeout = function(fn, delay, ...args) { - // Add slight randomization to timing - const randomDelay = delay + Math.floor(Math.random() * 10) - 5; - return originalSetTimeout.call(this, fn, Math.max(0, randomDelay), ...args); - }; - } - """ - - # Apply all scripts in sequence - scripts = [ - automation_removal_script, - instagram_video_api_script, - network_interception_script, - timing_script - ] - - for i, script in enumerate(scripts): - try: - self.page.add_init_script(script) - logger.info(f"Applied emergency bypass script {i+1}/4") - time.sleep(0.1) # Small delay between scripts - except Exception as e: - logger.error(f"Failed to apply emergency bypass script {i+1}: {e}") - - logger.info("Emergency Instagram video bypass applied") - - def inject_video_session_data(self) -> None: - """Injiziert realistische Video-Session-Daten""" - - session_script = """ - () => { - // Inject realistic video session data - - // 1. Video viewing history - localStorage.setItem('instagram_video_history', JSON.stringify({ - last_viewed: Date.now() - Math.floor(Math.random() * 86400000), - view_count: Math.floor(Math.random() * 50) + 10, - preferences: { - autoplay: true, - quality: 'auto', - captions: false - } - })); - - // 2. Media session state - localStorage.setItem('media_session_state', JSON.stringify({ - hasInteracted: true, - lastInteraction: Date.now() - Math.floor(Math.random() * 3600000), - playbackRate: 1, - volume: 0.8 - })); - - // 3. DRM license cache simulation - sessionStorage.setItem('drm_licenses', JSON.stringify({ - widevine: { - version: '4.10.2449.0', - lastUpdate: Date.now() - Math.floor(Math.random() * 604800000), - status: 'valid' - } - })); - - // 4. Instagram session tokens - const csrfToken = document.querySelector('[name="csrfmiddlewaretoken"]')?.value || - document.querySelector('meta[name="csrf-token"]')?.content || - 'missing'; - - if (csrfToken !== 'missing') { - sessionStorage.setItem('csrf_token', csrfToken); - } - } - """ - - try: - self.page.evaluate(session_script) - logger.info("Video session data injected successfully") - except Exception as e: - logger.error(f"Failed to inject video session data: {e}") - - def simulate_user_interaction(self) -> None: - """Simuliert authentische Benutzerinteraktion""" - - try: - # Random mouse movements - for _ in range(3): - x = random.randint(100, 800) - y = random.randint(100, 600) - self.page.mouse.move(x, y) - time.sleep(random.uniform(0.1, 0.3)) - - # Random scroll - self.page.mouse.wheel(0, random.randint(-200, 200)) - time.sleep(random.uniform(0.2, 0.5)) - - # Click somewhere safe (not on video) - self.page.click('body', position={'x': random.randint(50, 100), 'y': random.randint(50, 100)}) - time.sleep(random.uniform(0.3, 0.7)) - - logger.info("User interaction simulation completed") - - except Exception as e: - logger.error(f"Failed to simulate user interaction: {e}") - - def check_video_errors(self) -> Dict[str, Any]: - """Überprüft Video-Fehler und DRM-Status""" - - try: - result = self.page.evaluate(""" - () => { - const errors = []; - const diagnostics = { - drm_support: false, - media_source: false, - codec_support: {}, - video_elements: 0, - error_messages: [] - }; - - // Check for DRM support - if (navigator.requestMediaKeySystemAccess) { - diagnostics.drm_support = true; - } - - // Check Media Source Extensions - if (window.MediaSource) { - diagnostics.media_source = true; - diagnostics.codec_support = { - h264: MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E"'), - vp9: MediaSource.isTypeSupported('video/webm; codecs="vp9"'), - aac: MediaSource.isTypeSupported('audio/mp4; codecs="mp4a.40.2"') - }; - } - - // Count video elements - diagnostics.video_elements = document.querySelectorAll('video').length; - - // Look for error messages - const errorElements = document.querySelectorAll('[class*="error"], [class*="fail"]'); - errorElements.forEach(el => { - if (el.textContent.includes('Video') || el.textContent.includes('video')) { - diagnostics.error_messages.push(el.textContent.trim()); - } - }); - - // Console errors - const consoleErrors = []; - const originalConsoleError = console.error; - console.error = function(...args) { - consoleErrors.push(args.join(' ')); - originalConsoleError.apply(console, arguments); - }; - - return { - diagnostics, - console_errors: consoleErrors, - timestamp: Date.now() - }; - } - """) - - logger.info(f"Video diagnostics: {result}") - return result - - except Exception as e: - logger.error(f"Video error check failed: {e}") - return {} \ No newline at end of file diff --git a/browser/playwright_manager.py b/browser/playwright_manager.py index f7eadbb..555f68d 100644 --- a/browser/playwright_manager.py +++ b/browser/playwright_manager.py @@ -17,6 +17,7 @@ from infrastructure.services.browser_protection_service import BrowserProtection # Konfiguriere Logger logger = logging.getLogger("playwright_manager") + class PlaywrightManager: """ Verwaltet Browser-Sitzungen mit Playwright, einschließlich Stealth-Modus und Proxy-Einstellungen. @@ -25,6 +26,9 @@ class PlaywrightManager: # Klassen-Variable: Zählt aktive Browser-Instanzen (Feature 5: Browser-Instanz Schutz) _active_count = 0 + # Klassen-Variable: Referenz auf die zuletzt gestartete Instanz (für Cleanup) + _current_instance: "PlaywrightManager" = None + def __init__(self, headless: bool = False, proxy: Optional[Dict[str, str]] = None, @@ -149,24 +153,30 @@ class PlaywrightManager: # Feature 5: Browser-Instanz Schutz - Nur eine Instanz gleichzeitig if PlaywrightManager._active_count >= 1: - # Safety-Check: Prüfe ob Counter "hängt" (Absturz-Schutz) - # Wenn ProcessGuard NICHT locked ist, aber Counter > 0, dann ist Counter "tot" - from utils.process_guard import get_guard - guard = get_guard() + # Es gibt noch einen alten Browser - prüfe ob er geschlossen werden kann + old_instance = PlaywrightManager._current_instance - if not guard.is_locked(): - # Counter hängt! Process Guard ist frei, aber Counter sagt Browser läuft + if old_instance is not None and old_instance is not self: + # Alte Instanz existiert und ist nicht diese - schließen + logger.warning( + f"⚠️ BROWSER-CLEANUP: Alte Browser-Instanz wird geschlossen " + f"(Counter war {PlaywrightManager._active_count})" + ) + try: + old_instance.close() + except Exception as e: + logger.warning(f"Fehler beim Schließen alter Browser-Instanz: {e}") + + # Counter und Referenz zurücksetzen + PlaywrightManager._active_count = 0 + PlaywrightManager._current_instance = None + else: + # Keine alte Instanz gefunden, aber Counter > 0 - Safety Reset logger.warning( f"⚠️ SAFETY-RESET: _active_count war {PlaywrightManager._active_count}, " - f"aber ProcessGuard ist nicht locked. Counter wird zurückgesetzt." + f"aber keine alte Instanz gefunden. Counter wird zurückgesetzt." ) PlaywrightManager._active_count = 0 - else: - # Guard ist locked UND Counter ist > 0 → echte parallele Instanz - raise RuntimeError( - "Browser bereits aktiv. Nur eine Browser-Instanz gleichzeitig erlaubt. " - "Beenden Sie den aktuellen Prozess." - ) try: self.playwright = sync_playwright().start() @@ -281,8 +291,9 @@ class PlaywrightManager: self.browser.on("disconnected", self._on_browser_disconnected) logger.debug("Browser-Disconnect-Handler registriert") - # Feature 5: Browser-Instanz Counter erhöhen + # Feature 5: Browser-Instanz Counter erhöhen und aktuelle Instanz speichern PlaywrightManager._active_count += 1 + PlaywrightManager._current_instance = self logger.info(f"Browser gestartet (aktive Instanzen: {PlaywrightManager._active_count})") return self.page @@ -524,15 +535,22 @@ class PlaywrightManager: key = f"fill_{selector}" return self._retry_action(key, lambda: self.fill_form_field(selector, value, timeout)) - def click_element(self, selector: str, force: bool = False, timeout: int = 5000) -> bool: + def click_element(self, selector: str, force: bool = False, timeout: int = 5000, + use_bezier_mouse: bool = True) -> bool: """ Klickt auf ein Element mit Anti-Bot-Bypass-Strategien. - + + Diese Methode simuliert menschliches Klick-Verhalten durch: + - Bézier-Kurven-Mausbewegungen zum Element + - Natürliches Scrolling (auch bei sichtbaren Elementen) + - Variable Verzögerungen + Args: selector: Selektor für das Element force: Force-Click verwenden timeout: Timeout in Millisekunden - + use_bezier_mouse: Ob Bézier-Mausbewegung vor dem Klick verwendet werden soll + Returns: bool: True bei Erfolg, False bei Fehler """ @@ -541,24 +559,164 @@ class PlaywrightManager: element = self.wait_for_selector(selector, timeout) if not element: return False - - # Scroll zum Element - self.page.evaluate("element => element.scrollIntoView({ behavior: 'smooth', block: 'center' })", element) - time.sleep(random.uniform(0.3, 0.7)) - - # Menschenähnliches Verhalten - leichte Verzögerung vor dem Klick - time.sleep(random.uniform(0.2, 0.5)) - - # Element klicken - element.click(force=force, delay=random.uniform(20, 100)) - + + # Verbessertes Scrolling mit menschlichem Verhalten + self._human_scroll_to_element(element) + + # Bézier-Mausbewegung zum Element (Anti-Detection) + if use_bezier_mouse: + self._move_mouse_to_element_bezier(element) + + # Menschenähnliches Verhalten - variable Verzögerung vor dem Klick + time.sleep(random.uniform(0.15, 0.4)) + + # Element klicken mit variablem Delay + element.click(force=force, delay=random.uniform(30, 120)) + logger.info(f"Element geklickt: {selector}") return True - + except Exception as e: logger.error(f"Fehler beim Klicken auf {selector}: {e}") # Bei Fehlern verwende robuste Click-Strategien return self.robust_click(selector, timeout) + + def _human_scroll_to_element(self, element) -> None: + """ + Scrollt zum Element mit menschlichem Verhalten. + + Simuliert natürliches Scrollverhalten: + - Gelegentlich erst in falsche Richtung scrollen + - Auch bei sichtbaren Elementen leicht scrollen + - Variable Scroll-Geschwindigkeit + + Args: + element: Das Playwright ElementHandle + """ + try: + # Gelegentlich erst in "falsche" Richtung scrollen (15% Chance) + if random.random() < 0.15: + wrong_direction = random.choice(['up', 'down']) + scroll_amount = random.randint(50, 150) + if wrong_direction == 'up': + self.page.evaluate(f"window.scrollBy(0, -{scroll_amount})") + else: + self.page.evaluate(f"window.scrollBy(0, {scroll_amount})") + time.sleep(random.uniform(0.2, 0.5)) + logger.debug(f"Korrektur-Scroll: erst {wrong_direction}") + + # Scroll-Verhalten zufällig wählen + scroll_behavior = random.choice(['smooth', 'smooth', 'auto']) # 66% smooth + scroll_block = random.choice(['center', 'center', 'nearest']) # 66% center + + # Hauptscroll zum Element + scroll_script = f""" + (element) => {{ + const rect = element.getBoundingClientRect(); + const isFullyVisible = rect.top >= 0 && rect.bottom <= window.innerHeight; + + // Auch wenn sichtbar, leicht scrollen für natürliches Verhalten (60% Chance) + if (isFullyVisible && Math.random() < 0.6) {{ + const smallScroll = Math.floor(Math.random() * 60) - 30; + window.scrollBy(0, smallScroll); + }} + + // Zum Element scrollen + element.scrollIntoView({{ behavior: '{scroll_behavior}', block: '{scroll_block}' }}); + }} + """ + self.page.evaluate(scroll_script, element) + + # Variable Wartezeit nach Scroll + time.sleep(random.uniform(0.4, 1.0)) + + except Exception as e: + logger.warning(f"Fehler beim Human-Scroll: {e}") + # Fallback: einfaches Scroll + try: + self.page.evaluate("element => element.scrollIntoView({ behavior: 'smooth', block: 'center' })", element) + time.sleep(random.uniform(0.3, 0.7)) + except: + pass + + def _move_mouse_to_element_bezier(self, element) -> None: + """ + Bewegt die Maus mit Bézier-Kurve zum Element. + + Simuliert realistische menschliche Mausbewegungen: + - Kubische Bézier-Kurve mit 2 Kontrollpunkten + - Variable Geschwindigkeit (langsamer am Anfang/Ende) + - Leichtes Zittern für Natürlichkeit + - Gelegentliche Mikro-Pausen + + Args: + element: Das Playwright ElementHandle + """ + try: + # Aktuelle Mausposition oder zufälliger Startpunkt + viewport = self.page.viewport_size + if viewport: + current_x = random.randint(100, viewport['width'] - 100) + current_y = random.randint(100, viewport['height'] - 100) + else: + current_x = random.randint(100, 1820) + current_y = random.randint(100, 980) + + # Zielpunkt: Mitte des Elements mit leichter Variation + box = element.bounding_box() + if not box: + logger.debug("Kein Bounding-Box für Element, überspringe Bézier-Bewegung") + return + + # Zielposition mit leichter Variation (nicht exakt Mitte) + target_x = box['x'] + box['width'] / 2 + random.uniform(-5, 5) + target_y = box['y'] + box['height'] / 2 + random.uniform(-5, 5) + + # Entfernung berechnen + distance = ((target_x - current_x)**2 + (target_y - current_y)**2)**0.5 + + # Anzahl der Schritte basierend auf Entfernung (mehr Schritte = flüssiger) + steps = max(25, int(distance / 8)) + + # Kontrollpunkte für kubische Bézier-Kurve + ctrl_variance = distance / 4 + ctrl1_x = current_x + (target_x - current_x) * random.uniform(0.2, 0.4) + random.uniform(-ctrl_variance, ctrl_variance) + ctrl1_y = current_y + (target_y - current_y) * random.uniform(0.1, 0.3) + random.uniform(-ctrl_variance, ctrl_variance) + ctrl2_x = current_x + (target_x - current_x) * random.uniform(0.6, 0.8) + random.uniform(-ctrl_variance, ctrl_variance) + ctrl2_y = current_y + (target_y - current_y) * random.uniform(0.7, 0.9) + random.uniform(-ctrl_variance, ctrl_variance) + + # Mausbewegung ausführen + for i in range(steps + 1): + t = i / steps + + # Kubische Bézier-Formel: B(t) = (1-t)³P₀ + 3(1-t)²tP₁ + 3(1-t)t²P₂ + t³P₃ + x = (1-t)**3 * current_x + 3*(1-t)**2*t * ctrl1_x + 3*(1-t)*t**2 * ctrl2_x + t**3 * target_x + y = (1-t)**3 * current_y + 3*(1-t)**2*t * ctrl1_y + 3*(1-t)*t**2 * ctrl2_y + t**3 * target_y + + # Leichtes Zittern hinzufügen für Realismus + x += random.uniform(-1, 1) + y += random.uniform(-1, 1) + + # Maus bewegen + self.page.mouse.move(x, y) + + # Variable Geschwindigkeit: langsamer am Anfang/Ende, schneller in der Mitte + if i < steps * 0.15 or i > steps * 0.85: + # Langsamer am Anfang und Ende (Beschleunigung/Abbremsen) + time.sleep(random.uniform(0.008, 0.018)) + else: + # Schneller in der Mitte + time.sleep(random.uniform(0.003, 0.008)) + + # Gelegentliche Mikro-Pause (5% Chance) - simuliert Zögern + if random.random() < 0.05: + time.sleep(random.uniform(0.02, 0.08)) + + logger.debug(f"Bézier-Mausbewegung: ({current_x:.0f},{current_y:.0f}) -> ({target_x:.0f},{target_y:.0f})") + + except Exception as e: + logger.warning(f"Bézier-Mausbewegung fehlgeschlagen: {e}") + # Kein Fallback nötig - Klick funktioniert auch ohne Mausbewegung def robust_click(self, selector: str, timeout: int = 5000) -> bool: """ @@ -1017,6 +1175,10 @@ class PlaywrightManager: else: logger.warning("Browser disconnected aber Counter war bereits 0") + # Aktuelle Instanz-Referenz löschen wenn diese Instanz die aktuelle war + if PlaywrightManager._current_instance is self: + PlaywrightManager._current_instance = None + def close(self): """Schließt den Browser und gibt Ressourcen frei.""" try: @@ -1072,6 +1234,10 @@ class PlaywrightManager: else: logger.debug("Counter wurde bereits durch disconnected-Event dekrementiert") + # Aktuelle Instanz-Referenz löschen wenn diese Instanz die aktuelle war + if PlaywrightManager._current_instance is self: + PlaywrightManager._current_instance = None + logger.info("Browser-Sitzung erfolgreich geschlossen") except Exception as e: diff --git a/check_rotation_system.py b/check_rotation_system.py deleted file mode 100644 index fe4cd15..0000000 --- a/check_rotation_system.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python3 -""" -Quick check script to verify method rotation system status. -Run this to ensure everything is working before starting main.py -""" - -import sys -import os -from pathlib import Path - -# Add project root to path -project_root = Path(__file__).parent -sys.path.insert(0, str(project_root)) - -def check_imports(): - """Check if all rotation system imports work""" - print("🔍 Checking imports...") - - try: - from domain.entities.method_rotation import MethodStrategy, RotationSession - print("✅ Domain entities: OK") - except Exception as e: - print(f"❌ Domain entities: {e}") - return False - - try: - from application.use_cases.method_rotation_use_case import MethodRotationUseCase - print("✅ Use cases: OK") - except Exception as e: - print(f"❌ Use cases: {e}") - return False - - try: - from controllers.platform_controllers.method_rotation_mixin import MethodRotationMixin - print("✅ Controller mixin: OK") - except Exception as e: - print(f"❌ Controller mixin: {e}") - return False - - return True - -def check_database(): - """Check database and tables""" - print("\n🗄️ Checking database...") - - db_path = project_root / "database" / "accounts.db" - if not db_path.exists(): - print(f"❌ Database not found: {db_path}") - return False - - print(f"✅ Database found: {db_path}") - - try: - import sqlite3 - conn = sqlite3.connect(str(db_path)) - cursor = conn.cursor() - - # Check for rotation tables - cursor.execute(""" - SELECT name FROM sqlite_master - WHERE type='table' AND ( - name = 'method_strategies' OR - name = 'rotation_sessions' OR - name = 'platform_method_states' - ) - """) - tables = [row[0] for row in cursor.fetchall()] - conn.close() - - if len(tables) >= 3: - print(f"✅ Rotation tables found: {tables}") - return True - else: - print(f"⚠️ Missing rotation tables. Found: {tables}") - return False - - except Exception as e: - print(f"❌ Database check failed: {e}") - return False - -def check_config(): - """Check configuration files""" - print("\n⚙️ Checking configuration...") - - config_path = project_root / "config" / "method_rotation_config.json" - if config_path.exists(): - print("✅ Rotation config found") - return True - else: - print("⚠️ Rotation config not found (will use defaults)") - return True # Not critical - -def check_controllers(): - """Check if controllers can be imported""" - print("\n🎮 Checking controllers...") - - try: - from controllers.platform_controllers.base_controller import BasePlatformController - print("✅ Base controller: OK") - - from controllers.platform_controllers.instagram_controller import InstagramController - print("✅ Instagram controller: OK") - - return True - except Exception as e: - print(f"❌ Controller check failed: {e}") - return False - -def main(): - """Main check function""" - print("🔧 Method Rotation System - Status Check") - print("=" * 50) - - checks = [ - ("Imports", check_imports), - ("Database", check_database), - ("Config", check_config), - ("Controllers", check_controllers) - ] - - all_good = True - for name, check_func in checks: - try: - result = check_func() - if not result: - all_good = False - except Exception as e: - print(f"❌ {name} check crashed: {e}") - all_good = False - - print("\n" + "=" * 50) - if all_good: - print("✅ Method rotation system is ready!") - print("🚀 You can safely start main.py") - print("\n💡 Expected behavior:") - print(" - Account creation works as before") - print(" - Additional rotation logs will appear") - print(" - Automatic method switching on failures") - print(" - Graceful fallback if any issues occur") - else: - print("⚠️ Some issues detected, but main.py should still work") - print("🔄 Rotation system will fall back to original behavior") - print("\n🛠️ To fix issues:") - print(" 1. Run: python3 run_migration.py") - print(" 2. Check file permissions") - print(" 3. Restart main.py") - - print("\n📝 To test rotation manually:") - print(" - Create an account on any platform") - print(" - Check logs for rotation messages") - print(" - Simulate failures to see rotation in action") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/config/implementation_switch.py b/config/implementation_switch.py deleted file mode 100644 index 5dff498..0000000 --- a/config/implementation_switch.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Einfacher Switch zwischen alter und neuer Implementation für schnelles Rollback -""" - - -class ImplementationSwitch: - """Einfacher Switch zwischen alter und neuer Implementation""" - - # Direkt aktivieren im Testbetrieb - USE_REFACTORED_CODE = True - - @classmethod - def rollback_to_legacy(cls): - """Schneller Rollback wenn nötig""" - cls.USE_REFACTORED_CODE = False - print("WARNUNG: Rollback zu Legacy-Implementation aktiviert!") - - @classmethod - def use_refactored_code(cls): - """Aktiviert die refaktorierte Implementation""" - cls.USE_REFACTORED_CODE = True - print("INFO: Refaktorierte Implementation aktiviert") - - @classmethod - def is_refactored_active(cls) -> bool: - """Prüft ob refaktorierte Implementation aktiv ist""" - return cls.USE_REFACTORED_CODE - - @classmethod - def get_status(cls) -> str: - """Gibt den aktuellen Status zurück""" - if cls.USE_REFACTORED_CODE: - return "Refaktorierte Implementation (NEU)" - else: - return "Legacy Implementation (ALT)" \ No newline at end of file diff --git a/config/license_config.json b/config/license_config.json deleted file mode 100644 index 73857f6..0000000 --- a/config/license_config.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "key": "", - "status": "inactive", - "hardware_id": "", - "activation_date": null, - "expiry_date": null, - "features": [], - "last_check": null, - "session_ip_mode": "auto", - "ip_fallback": "0.0.0.0" - } \ No newline at end of file diff --git a/controllers/platform_controllers/base_controller.py b/controllers/platform_controllers/base_controller.py index d60fdf2..3cf1d69 100644 --- a/controllers/platform_controllers/base_controller.py +++ b/controllers/platform_controllers/base_controller.py @@ -255,21 +255,7 @@ class BasePlatformController(QObject): # Normale Verarbeitung self.handle_account_created(result) - - def stop_account_creation(self): - """Stoppt die Account-Erstellung""" - if self.worker_thread and self.worker_thread.isRunning(): - self.worker_thread.stop() - generator_tab = self.get_generator_tab() - generator_tab.add_log(f"{self.platform_name}-Account-Erstellung wurde abgebrochen") - generator_tab.set_running(False) - generator_tab.set_progress(0) - - # Forge-Dialog schließen falls vorhanden - if hasattr(self, 'forge_dialog') and self.forge_dialog: - self.forge_dialog.close() - self.forge_dialog = None - + def handle_account_created(self, result): """ Verarbeitet erfolgreich erstellte Accounts. @@ -287,19 +273,14 @@ class BasePlatformController(QObject): Diese Methode kann von Unterklassen überschrieben werden für spezielle Anforderungen. Sie stellt sicher dass: - 1. Der Process Guard freigegeben wird - 2. Der Worker-Thread gestoppt wird - 3. Die UI zurückgesetzt wird - 4. Dialoge geschlossen werden - """ - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei Stop (BaseController)") + 1. Der Worker-Thread gestoppt wird (Worker gibt Guard frei mit release()) + 2. Die UI zurückgesetzt wird + 3. Dialoge geschlossen werden - # Worker stoppen falls vorhanden + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ + # Worker stoppen falls vorhanden (Worker.stop() gibt Guard frei) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/platform_controllers/base_worker_thread.py b/controllers/platform_controllers/base_worker_thread.py index 8d2575b..df1c042 100644 --- a/controllers/platform_controllers/base_worker_thread.py +++ b/controllers/platform_controllers/base_worker_thread.py @@ -8,19 +8,23 @@ from typing import Dict, Any, Optional from utils.text_similarity import TextSimilarity from domain.value_objects.browser_protection_style import BrowserProtectionStyle, ProtectionLevel import traceback +import logging +import threading + +logger = logging.getLogger(__name__) class BaseAccountCreationWorkerThread(QThread): """Basis-Klasse für alle Platform Worker Threads""" - + # Signals MÜSSEN identisch zu bestehenden sein update_signal = pyqtSignal(str) log_signal = pyqtSignal(str) progress_signal = pyqtSignal(int) finished_signal = pyqtSignal(dict) error_signal = pyqtSignal(str) - - def __init__(self, params: Dict[str, Any], platform_name: str, + + def __init__(self, params: Dict[str, Any], platform_name: str, session_controller: Optional[Any] = None, generator_tab: Optional[Any] = None): super().__init__() @@ -29,10 +33,19 @@ class BaseAccountCreationWorkerThread(QThread): self.session_controller = session_controller self.generator_tab = generator_tab self.running = True - + + # Thread-Safety Lock für Guard-Flags + self._guard_lock = threading.Lock() + + # Flag: Wurde der Worker durch User-Abbruch gestoppt? + self._was_cancelled = False + + # Flag: Wurde Guard bereits vom Worker freigegeben? + self._guard_released = False + # TextSimilarity für robustes Fehler-Matching self.text_similarity = TextSimilarity(default_threshold=0.8) - + # Platform-spezifische Error-Patterns (überschreibbar) self.error_interpretations = self.get_error_interpretations() @@ -209,10 +222,8 @@ class BaseAccountCreationWorkerThread(QThread): self.progress_signal.emit(0) # Reset progress on error finally: - # Feature 5: Process Guard freigeben - from utils.process_guard import get_guard - guard = get_guard() - guard.end(success) + # Process Guard freigeben (thread-safe) + self._release_guard_if_needed(success) def _interpret_error(self, error_message: str) -> str: """Interpretiert Fehler mit Fuzzy-Matching""" @@ -282,24 +293,47 @@ class BaseAccountCreationWorkerThread(QThread): return save_result + def _release_guard_if_needed(self, success: bool = False, is_cancel: bool = False): + """ + Thread-safe Guard-Freigabe. + + Args: + success: War der Prozess erfolgreich? (nur relevant bei end()) + is_cancel: Wird von stop() aufgerufen? (erzwingt release()) + """ + with self._guard_lock: + if self._guard_released: + # Bereits freigegeben - nichts zu tun + return + + # Wenn von stop() aufgerufen, IMMER als Cancel markieren + if is_cancel: + self._was_cancelled = True + + from utils.process_guard import get_guard + guard = get_guard() + + if self._was_cancelled: + # User-Abbruch: release() zählt nicht als Failure + guard.release() + logger.info(f"{self.platform_name}: Guard released (User-Abbruch)") + else: + # Normale Beendigung: end() mit Erfolgs-Status + guard.end(success) + logger.info(f"{self.platform_name}: Guard ended (success={success})") + + self._guard_released = True + def stop(self): """ Stoppt den Thread sauber mit Guard-Freigabe. - WICHTIG: Guard wird SOFORT freigegeben, da terminate() den finally-Block überspringt. + User-Abbruch wird mit release() behandelt (zählt NICHT als Failure). """ - import logging - logger = logging.getLogger(__name__) - self.running = False - # Guard SOFORT freigeben bevor terminate() - # Grund: terminate() überspringt den finally-Block in run() - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - logger.info("Guard freigegeben bei Worker-Stop (vor terminate)") + # Guard thread-safe freigeben (is_cancel=True setzt _was_cancelled im Lock) + self._release_guard_if_needed(is_cancel=True) # Jetzt Thread beenden self.terminate() diff --git a/controllers/platform_controllers/facebook_controller.py b/controllers/platform_controllers/facebook_controller.py index f1e6d25..7392878 100644 --- a/controllers/platform_controllers/facebook_controller.py +++ b/controllers/platform_controllers/facebook_controller.py @@ -307,15 +307,13 @@ class FacebookController(BasePlatformController): self.forge_dialog = None def stop_account_creation(self): - """Stoppt die Facebook-Account-Erstellung mit Guard-Freigabe.""" - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei Facebook Stop") + """ + Stoppt die Facebook-Account-Erstellung. - # Worker stoppen + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/platform_controllers/gmail_controller.py b/controllers/platform_controllers/gmail_controller.py index a7d888e..f48cfdb 100644 --- a/controllers/platform_controllers/gmail_controller.py +++ b/controllers/platform_controllers/gmail_controller.py @@ -198,17 +198,15 @@ class GmailController(BasePlatformController): logger.info(f"[GMAIL] start_account_creation abgeschlossen") def stop_account_creation(self): - """Stoppt die laufende Account-Erstellung mit Guard-Freigabe""" + """ + Stoppt die Gmail-Account-Erstellung. + + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ logger.info("[GMAIL] Stoppe Account-Erstellung") - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - logger.info("Guard freigegeben bei Gmail Stop") - - # Worker stoppen + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() self.worker_thread.wait() diff --git a/controllers/platform_controllers/instagram_controller.py b/controllers/platform_controllers/instagram_controller.py index a2d06af..5c4fccb 100644 --- a/controllers/platform_controllers/instagram_controller.py +++ b/controllers/platform_controllers/instagram_controller.py @@ -198,8 +198,9 @@ class InstagramController(BasePlatformController): is_valid, error_msg = self.validate_inputs(params) if not is_valid: # Guard freigeben da Worker nicht gestartet wird + # release() statt end() - Validierungsfehler ist kein "echter" Failure from utils.process_guard import get_guard - get_guard().end(success=False) + get_guard().release() self.get_generator_tab().show_error(error_msg) return @@ -213,8 +214,10 @@ class InstagramController(BasePlatformController): # Schmiedeanimation-Dialog erstellen und anzeigen parent_widget = generator_tab.window() # Hauptfenster als Parent self.forge_dialog = ForgeAnimationDialog(parent_widget, "Instagram") - self.forge_dialog.cancel_clicked.connect(self.stop_account_creation) - self.forge_dialog.closed.connect(self.stop_account_creation) + + # NUR cancel_clicked verbinden - closed wird durch close() in stop_account_creation + # getriggert und würde sonst zu Doppelaufrufen führen + self.forge_dialog.cancel_clicked.connect(self._on_user_cancel) # Fensterposition vom Hauptfenster holen if parent_widget: @@ -276,28 +279,35 @@ class InstagramController(BasePlatformController): # Kritischer Fehler VOR Worker-Start → Guard freigeben! logger.error(f"Fehler beim Start der Account-Erstellung: {e}", exc_info=True) + # release() - technischer Fehler vor Start ist kein User-Failure from utils.process_guard import get_guard - get_guard().end(success=False) + get_guard().release() # Dialog schließen falls vorhanden if hasattr(self, 'forge_dialog') and self.forge_dialog: self.forge_dialog.close() + self.forge_dialog = None # UI zurücksetzen generator_tab = self.get_generator_tab() generator_tab.set_running(False) generator_tab.show_error(f"Fehler beim Start: {str(e)}") - def stop_account_creation(self): - """Stoppt die Instagram-Account-Erstellung mit Guard-Freigabe.""" - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei Instagram Stop") + def _on_user_cancel(self): + """ + Handler für User-Abbruch (Cancel-Button im Dialog). + Ruft stop_account_creation auf und verhindert Doppelaufrufe. + """ + self.stop_account_creation() - # Worker stoppen + def stop_account_creation(self): + """ + Stoppt die Instagram-Account-Erstellung. + + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + Der Controller macht KEINE eigene Guard-Freigabe mehr. + """ + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/platform_controllers/method_rotation_mixin.py b/controllers/platform_controllers/method_rotation_mixin.py index 2c8fda2..5319e08 100644 --- a/controllers/platform_controllers/method_rotation_mixin.py +++ b/controllers/platform_controllers/method_rotation_mixin.py @@ -267,17 +267,28 @@ class MethodRotationMixin: def _create_rotation_context(self, params: Dict[str, Any]) -> RotationContext: """ Create rotation context from account creation parameters. - + Args: params: Account creation parameters - + Returns: RotationContext for method selection """ + # Handle both BrowserFingerprint objects and dictionaries + fingerprint_data = params.get('fingerprint') + fingerprint_id = None + if fingerprint_data: + if hasattr(fingerprint_data, 'fingerprint_id'): + # BrowserFingerprint object + fingerprint_id = fingerprint_data.fingerprint_id + elif isinstance(fingerprint_data, dict): + # Dictionary + fingerprint_id = fingerprint_data.get('fingerprint_id') + return RotationContext( platform=self.platform_name.lower(), account_id=params.get('account_id'), - fingerprint_id=params.get('fingerprint', {}).get('fingerprint_id'), + fingerprint_id=fingerprint_id, excluded_methods=params.get('_excluded_methods', []), max_risk_level=RiskLevel(params.get('_max_risk_level', 'HIGH')), emergency_mode=params.get('_emergency_mode', False), diff --git a/controllers/platform_controllers/ok_ru_controller.py b/controllers/platform_controllers/ok_ru_controller.py index a574246..2685ba9 100644 --- a/controllers/platform_controllers/ok_ru_controller.py +++ b/controllers/platform_controllers/ok_ru_controller.py @@ -131,15 +131,13 @@ class OkRuController(BasePlatformController): self.forge_dialog.show() def stop_account_creation(self): - """Stoppt die OK.ru-Account-Erstellung mit Guard-Freigabe.""" - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei OK.ru Stop") + """ + Stoppt die OK.ru-Account-Erstellung. - # Worker stoppen + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/platform_controllers/tiktok_controller.py b/controllers/platform_controllers/tiktok_controller.py index d4fa361..2b05149 100644 --- a/controllers/platform_controllers/tiktok_controller.py +++ b/controllers/platform_controllers/tiktok_controller.py @@ -273,15 +273,13 @@ class TikTokController(BasePlatformController): self.forge_dialog.show() def stop_account_creation(self): - """Stoppt die TikTok-Account-Erstellung mit Guard-Freigabe.""" - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei TikTok Stop") + """ + Stoppt die TikTok-Account-Erstellung. - # Worker stoppen + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/platform_controllers/x_controller.py b/controllers/platform_controllers/x_controller.py index f5a24ea..273a9e1 100644 --- a/controllers/platform_controllers/x_controller.py +++ b/controllers/platform_controllers/x_controller.py @@ -271,15 +271,13 @@ class XController(BasePlatformController): self.forge_dialog.show() def stop_account_creation(self): - """Stoppt die X-Account-Erstellung mit Guard-Freigabe.""" - # Guard-Freigabe (wichtig: VOR Worker-Stop) - from utils.process_guard import get_guard - guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - self.logger.info("Guard freigegeben bei X Stop") + """ + Stoppt die X-Account-Erstellung. - # Worker stoppen + WICHTIG: Guard-Freigabe erfolgt im Worker.stop() mit release(). + User-Abbruch zählt NICHT als Failure. + """ + # Worker stoppen (Worker.stop() gibt Guard frei mit release()) if self.worker_thread and self.worker_thread.isRunning(): self.worker_thread.stop() generator_tab = self.get_generator_tab() diff --git a/controllers/session_controller.py b/controllers/session_controller.py index 3c7b7e4..cbe118d 100644 --- a/controllers/session_controller.py +++ b/controllers/session_controller.py @@ -118,27 +118,28 @@ class SessionController(QObject): else: error_msg = f"Account mit ID {account_id} nicht gefunden" logger.error(error_msg) - # Feature 5: Guard freigeben da Worker nicht gestartet wird - guard.end(success=False) + # Guard freigeben - kein User-Fehler, daher release() + guard.release() self.login_failed.emit(account_id, error_msg) except Exception as e: logger.error(f"Fehler beim Ein-Klick-Login: {e}") - # Feature 5: Guard freigeben bei Fehler vor Worker-Start - guard.end(success=False) + # Guard freigeben - technischer Fehler vor Start, daher release() + guard.release() self.login_failed.emit(account_id, str(e)) - + def _cancel_login(self, account_id: str): - """Bricht den Login-Prozess ab mit Guard-Freigabe""" + """ + Bricht den Login-Prozess ab. + User-Abbruch zählt NICHT als Failure. + """ logger.info(f"Login für Account {account_id} wurde abgebrochen") - # Guard IMMER freigeben bei Cancel (einfacher + robuster) - # Doppelte Freigabe ist kein Problem (wird von ProcessGuard ignoriert) + # Guard freigeben - User-Abbruch, daher release() from utils.process_guard import get_guard guard = get_guard() - if guard.is_locked(): - guard.end(success=False) - logger.info("Guard freigegeben bei Login-Cancel") + guard.release() # Idempotent - mehrfacher Aufruf ist sicher + logger.info("Guard released bei Login-Cancel") # Dialog schließen if hasattr(self, 'login_dialog') and self.login_dialog: diff --git a/database/accounts.db b/database/accounts.db index 22121747c2d9669eecf5400faf2ce659cfcf805f..a62213d04c0fc033c63936a83a0f1a0be83871f5 100644 GIT binary patch delta 2077 zcmb7FTWnNC7(QoqX1l%YrL}asrR{FPQVN{*T;|+Lr7aXtN+KWygh)BNdk&ORTDApC zL(<(6i$*Y2gx?DS8i|pDB8DtJ7>F+A=lMWm*aCmP5@tW`@BAn&&AO7UO#r_@=j*3GebOG zREZxJW})K$pq>oWlYx3N&;|G*?{e~wK~6J+=AX+ynSYF(dL<7ow&A>>ljjQY+j|n- z);9Z}X?!0GrZ2XU)?!@3J1+2SBD35G<=x{Qwpr#IQ=we}E;P65S*}LO6xhyj{heL? zJzZT^U%#E`=<4ZpIz&ZpS=PO+-B_vj)Mf*jbqN1~%a^xJ03%DSzPO!`MOhO>Ns!ct zs5Hn%gQVCRHEr9F1d}?tKy6tSOgR=4EY(pJNr}ajsFSMh4{+gH9gcUB;<|pTZ!^2P zGttu_%5;&kMvSTbgRKK=t(Gl|5)1oUzxW^vVF*!%mFrMh>*|R;kruYI|36uXW`iiR z5-7Vn%C6N#fjXKZSbAI+qM~g%k|bJDEt)cCkp(J?x?st1RiKg98;?jnr1vrrK*fsktp&;fPlV zaUL#rDy2ZnB~)BNnZCMiDw?WGl1}GEX7t&1d`3gWpqdHRR{S_5-^Vg6ID)ER)g4j< zjkRt+cu%2Rc=`kmfPNGCVC*E$1HA+Lptqa_1_s$oy^R87m%Y6+0n(SKJbm?BypM;Q z%gHQAUPh}A0|~%aH*i&upqYaP-Ao2$YliaApswfj|ho7LZJhGWI2vbpg|y(OkmF z&*Y~`hM`ENB5Q^LE#IRc1cJl^jYqM+WRe0CZ$urF%Amj-?sXHhXEU%(=4`kmxJ zWd)j(e&ReT+-_n9h;Fss=QrhH3k%B-STvOk7X4Psn5qVblb= zU?GE~p<+CPsxinpp$rTqnb{?cjcmmN4D#Bhkqr7^afa!;lUOUZw`Q}}OyOp9WiVot zVPKrj@pUR6o8eSPHhmWER_>Uo1#DMYn5-Fsrh9&5R0XlBr)99JFh*~m&d8R_$85k6 e0Q3)==k_20wyW%u6%4c|KS*QQ9wNcEMjZgO3{)ck diff --git a/debug_video_issue.py b/debug_video_issue.py deleted file mode 100644 index 4eb2191..0000000 --- a/debug_video_issue.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/env python3 -""" -Debug Video Issue - Final Diagnostic Script -""" - -import asyncio -import json -import logging -from pathlib import Path -from browser.playwright_manager import PlaywrightManager - -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger("video_debug") - -async def debug_video_issue(): - """Comprehensive video issue debugging""" - - print("🔍 STARTING COMPREHENSIVE VIDEO DEBUG ANALYSIS") - - # Test with fresh manager - manager = PlaywrightManager(headless=False) - - try: - page = manager.start() - - print("📋 STEP 1: Navigating to Instagram...") - success = manager.navigate_to("https://www.instagram.com") - - if not success: - print("❌ Failed to navigate to Instagram") - return - - print("📋 STEP 2: Checking browser capabilities...") - - # Check all video-related capabilities - capabilities = page.evaluate(""" - () => { - const results = { - // Basic video support - video_element: !!document.createElement('video'), - video_can_play_mp4: document.createElement('video').canPlayType('video/mp4'), - video_can_play_webm: document.createElement('video').canPlayType('video/webm'), - - // DRM Support - widevine_support: !!navigator.requestMediaKeySystemAccess, - media_source: !!window.MediaSource, - encrypted_media: !!window.MediaKeys, - - // Chrome APIs - chrome_present: !!window.chrome, - chrome_runtime: !!(window.chrome && window.chrome.runtime), - chrome_app: window.chrome ? window.chrome.app : 'missing', - chrome_csi: !!(window.chrome && window.chrome.csi), - chrome_loadtimes: !!(window.chrome && window.chrome.loadTimes), - - // Media Devices - media_devices: !!(navigator.mediaDevices), - enumerate_devices: !!(navigator.mediaDevices && navigator.mediaDevices.enumerateDevices), - get_user_media: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia), - - // Performance API - performance_now: !!performance.now, - performance_timing: !!performance.timing, - - // Automation markers - webdriver_present: !!navigator.webdriver, - automation_markers: { - webdriver_script_fn: !!navigator.__webdriver_script_fn, - webdriver_evaluate: !!window.__webdriver_evaluate, - selenium_unwrapped: !!document.__selenium_unwrapped, - chrome_webdriver: !!(window.chrome && window.chrome.webdriver) - }, - - // User agent analysis - user_agent: navigator.userAgent, - platform: navigator.platform, - vendor: navigator.vendor, - languages: navigator.languages, - - // Screen info - screen_width: screen.width, - screen_height: screen.height, - device_pixel_ratio: devicePixelRatio, - - // Timing - page_load_time: performance.now() - }; - - return results; - } - """) - - print("📊 BROWSER CAPABILITIES:") - for key, value in capabilities.items(): - print(f" {key}: {value}") - - print("\n📋 STEP 3: Testing video element creation...") - - video_test = page.evaluate(""" - () => { - // Create video element and test - const video = document.createElement('video'); - video.style.display = 'none'; - document.body.appendChild(video); - - const results = { - video_created: true, - video_properties: { - autoplay: video.autoplay, - controls: video.controls, - muted: video.muted, - preload: video.preload, - crossOrigin: video.crossOrigin - }, - video_methods: { - canPlayType: typeof video.canPlayType, - play: typeof video.play, - pause: typeof video.pause, - load: typeof video.load - }, - codec_support: { - mp4_h264: video.canPlayType('video/mp4; codecs="avc1.42E01E"'), - mp4_h265: video.canPlayType('video/mp4; codecs="hev1.1.6.L93.B0"'), - webm_vp8: video.canPlayType('video/webm; codecs="vp8"'), - webm_vp9: video.canPlayType('video/webm; codecs="vp9"'), - audio_aac: video.canPlayType('audio/mp4; codecs="mp4a.40.2"'), - audio_opus: video.canPlayType('audio/webm; codecs="opus"') - } - }; - - document.body.removeChild(video); - return results; - } - """) - - print("\n📊 VIDEO ELEMENT TEST:") - for key, value in video_test.items(): - print(f" {key}: {value}") - - print("\n📋 STEP 4: Checking console errors...") - - # Wait a bit for any console errors - await asyncio.sleep(2) - - # Check for specific Instagram video errors - print("\n📋 STEP 5: Looking for Instagram-specific issues...") - - # Try to find any video elements or error messages - video_status = page.evaluate(""" - () => { - const results = { - video_elements_count: document.querySelectorAll('video').length, - error_messages: [], - instagram_classes: { - video_error_present: !!document.querySelector('.x6s0dn4.xatbrnm.x9f619'), - video_containers: document.querySelectorAll('[class*="video"]').length, - error_spans: [] - } - }; - - // Look for error messages - const errorSpans = document.querySelectorAll('span'); - errorSpans.forEach(span => { - const text = span.textContent.trim(); - if (text.includes('Video') || text.includes('video') || text.includes('abgespielt') || text.includes('richtig')) { - results.instagram_classes.error_spans.push({ - text: text, - classes: span.className - }); - } - }); - - return results; - } - """) - - print("\n📊 INSTAGRAM VIDEO STATUS:") - for key, value in video_status.items(): - print(f" {key}: {value}") - - print("\n📋 STEP 6: Testing DRM capabilities...") - - drm_test = page.evaluate(""" - () => { - return new Promise((resolve) => { - if (!navigator.requestMediaKeySystemAccess) { - resolve({drm_support: false, error: 'No requestMediaKeySystemAccess'}); - return; - } - - navigator.requestMediaKeySystemAccess('com.widevine.alpha', [{ - initDataTypes: ['cenc'], - videoCapabilities: [{contentType: 'video/mp4; codecs="avc1.42E01E"'}] - }]).then(access => { - resolve({ - drm_support: true, - key_system: access.keySystem, - configuration: access.getConfiguration() - }); - }).catch(error => { - resolve({ - drm_support: false, - error: error.message - }); - }); - }); - } - """) - - print("\n📊 DRM TEST RESULTS:") - print(f" {drm_test}") - - print("\n🎯 FINAL DIAGNOSIS:") - print("=" * 50) - - # Analyze results - issues = [] - - if not capabilities.get('video_element'): - issues.append("❌ Video elements not supported") - - if capabilities.get('webdriver_present'): - issues.append("❌ Webdriver detection present") - - if not capabilities.get('widevine_support'): - issues.append("❌ Widevine DRM not supported") - - if video_status.get('instagram_classes', {}).get('video_error_present'): - issues.append("❌ Instagram video error message detected") - - if not drm_test.get('drm_support'): - issues.append(f"❌ DRM test failed: {drm_test.get('error', 'Unknown')}") - - automation_markers = capabilities.get('automation_markers', {}) - detected_markers = [k for k, v in automation_markers.items() if v] - if detected_markers: - issues.append(f"❌ Automation markers detected: {detected_markers}") - - if issues: - print("🚨 CRITICAL ISSUES FOUND:") - for issue in issues: - print(f" {issue}") - else: - print("✅ No obvious technical issues detected") - print("🤔 The problem might be:") - print(" - Account-specific restrictions") - print(" - Geographic blocking") - print(" - Instagram A/B testing") - print(" - Specific video content restrictions") - - print("\n📋 RECOMMENDATION:") - if len(issues) > 3: - print(" 🔄 Technical fixes needed - automation still detectable") - elif len(issues) > 0: - print(" 🔧 Some technical issues remain") - else: - print(" 💡 Technical setup appears correct - likely policy/account issue") - - except Exception as e: - logger.error(f"Debug failed: {e}") - print(f"❌ Debug script failed: {e}") - - finally: - manager.close() - -if __name__ == "__main__": - asyncio.run(debug_video_issue()) \ No newline at end of file diff --git a/docs/overview.md b/docs/overview.md new file mode 100644 index 0000000..0146f73 --- /dev/null +++ b/docs/overview.md @@ -0,0 +1,2224 @@ +# AccountForger Backend - Verifikationsserver Spezifikation + +## 1. Executive Summary + +**AccountForger** ist ein Desktop-Tool zur automatisierten Erstellung von Social-Media-Accounts (Instagram, Facebook, TikTok, X, etc.). Die Browser-Automation läuft lokal beim Kunden, aber die **Verifikation** (Email-Codes, SMS-Codes) wird über einen zentralen Server abgewickelt. + +### Architektur-Übersicht + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ KUNDE │ +│ ┌─────────────────┐ ┌──────────────────┐ │ +│ │ Desktop Client │ │ RUTX11 Router │ │ +│ │ (AccountForger)│ │ (SMS-Empfang) │ │ +│ └────────┬────────┘ └────────┬─────────┘ │ +└───────────┼────────────────────────────────┼────────────────────┘ + │ HTTPS (API) │ HTTPS (Webhook) + ▼ ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ VERIFIKATIONSSERVER (VPS) │ +│ │ +│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────────┐ │ +│ │ Email │ │ SMS │ │ Client Registry │ │ +│ │ Service │ │ Service │ │ (API-Key → Nummern) │ │ +│ │ IMAP Polling│ │ Webhook │ │ │ │ +│ └─────────────┘ └─────────────┘ └─────────────────────────┘ │ +│ │ +│ PostgreSQL FastAPI │ +└─────────────────────────────────────────────────────────────────┘ +``` + +### Scope-Abgrenzung + +| In Scope (dieses Backend) | Out of Scope | +|---------------------------|--------------| +| Email-Verifikations-API | Admin-Panel/Dashboard | +| SMS-Verifikations-API | Kundenverwaltung (CRUD) | +| RUTX11 Webhook-Empfang | Router-Konfiguration | +| Phone-Rotation-Logik | Abrechnung/Billing | +| API-Key-Validierung | Lizenz-Management | +| Health-Check & Monitoring | Desktop-Client | + +**Wichtig:** Clients, Router und Telefonnummern werden manuell oder über ein separates Admin-System in der Datenbank angelegt. Dieses Backend geht davon aus, dass diese Daten bereits existieren. + +--- + +## 2. Client-Architektur (Kontext) + +Der Desktop-Client folgt einer **Clean Architecture** mit Domain-Driven Design. Relevante Komponenten für die Backend-Integration: + +### 2.1 Hauptkomponenten + +``` +AccountForger/ +├── social_networks/ +│ ├── base_automation.py # Basis-Klasse für alle Plattformen +│ ├── instagram/ +│ │ ├── instagram_automation.py # Hauptlogik +│ │ └── instagram_verification.py # Email-Code-Eingabe +│ ├── facebook/ +│ │ └── facebook_verification.py +│ └── ... (tiktok, x, gmail, ok_ru, vk) +├── controllers/platform_controllers/ +│ └── base_worker_thread.py # QThread für Account-Erstellung +├── utils/ +│ └── email_handler.py # AKTUELL: IMAP-Polling (wird ersetzt) +└── browser/ + └── playwright_manager.py # Browser-Steuerung +``` + +### 2.2 Aktuelle Email-Verifikation (zu ersetzen) + +Der `EmailHandler` (`utils/email_handler.py`) macht aktuell direktes IMAP-Polling: + +```python +# AKTUELLE Implementierung (wird ersetzt) +class EmailHandler: + def get_verification_code(self, target_email, platform, max_attempts=30): + # Verbindet direkt zu IMAP-Server + mail = imaplib.IMAP4_SSL(self.config["imap_server"]) + mail.login(self.config["imap_user"], self.config["imap_pass"]) + # Sucht nach Emails, extrahiert Code per Regex + ... +``` + +**Problem:** IMAP-Credentials liegen im Client. + +**Lösung:** Client fragt Server-API, Server macht IMAP-Polling. + +### 2.3 Aktuelle SMS-Verifikation (nicht implementiert) + +SMS-Verifikation ist ein Placeholder: + +```python +# social_networks/facebook/facebook_verification.py +def handle_sms_verification(self, phone_number: str, timeout: int = 120): + logger.warning("SMS-Verifikation noch nicht implementiert") + return None +``` + +--- + +## 3. Backend API-Spezifikation + +### 3.1 OpenAPI-Endpunkte + +```yaml +openapi: 3.0.3 +info: + title: AccountForger Verification API + version: 1.0.0 + description: Backend für Email- und SMS-Verifikation + +servers: + - url: https://verify.example.com/api/v1 + +security: + - ApiKeyAuth: [] + +components: + securitySchemes: + ApiKeyAuth: + type: apiKey + in: header + name: X-API-Key + description: Client-spezifischer API-Key + + schemas: + EmailRequest: + type: object + required: + - platform + properties: + platform: + type: string + enum: [instagram, facebook, tiktok, x, gmail, twitter, vk, ok_ru] + description: Zielplattform für die Registrierung + preferred_domain: + type: string + description: Bevorzugte Email-Domain (optional) + + EmailResponse: + type: object + properties: + request_id: + type: string + format: uuid + email_address: + type: string + format: email + expires_at: + type: string + format: date-time + + SMSRequest: + type: object + required: + - platform + properties: + platform: + type: string + enum: [instagram, facebook, tiktok, x, gmail, twitter, vk, ok_ru] + + SMSResponse: + type: object + properties: + request_id: + type: string + format: uuid + phone_number: + type: string + description: Telefonnummer im Format +49... + expires_at: + type: string + format: date-time + + CodeResponse: + type: object + properties: + status: + type: string + enum: [pending, received, expired, failed] + code: + type: string + nullable: true + description: Verifikationscode (null wenn noch nicht empfangen) + received_at: + type: string + format: date-time + nullable: true + + WebhookPayload: + type: object + required: + - sender + - message + properties: + sender: + type: string + description: Absender-Nummer + message: + type: string + description: SMS-Inhalt + timestamp: + type: string + format: date-time + sim_slot: + type: integer + enum: [1, 2] + + PhoneAvailability: + type: object + properties: + available_count: + type: integer + phones: + type: array + items: + type: object + properties: + phone_number: + type: string + cooldown_until: + type: string + format: date-time + nullable: true + last_platform: + type: string + nullable: true + + Error: + type: object + properties: + error: + type: string + message: + type: string + retry_after: + type: integer + description: Sekunden bis Retry (bei Rate-Limit) + +paths: + /verification/email/request: + post: + summary: Email-Adresse für Verifikation anfordern + operationId: requestEmail + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/EmailRequest' + responses: + '201': + description: Email-Adresse zugewiesen + content: + application/json: + schema: + $ref: '#/components/schemas/EmailResponse' + '401': + description: Ungültiger API-Key + '429': + description: Rate-Limit erreicht + headers: + Retry-After: + schema: + type: integer + '503': + description: Keine Email-Adresse verfügbar + + /verification/email/code/{request_id}: + get: + summary: Email-Verifikationscode abfragen (Polling) + operationId: getEmailCode + parameters: + - name: request_id + in: path + required: true + schema: + type: string + format: uuid + responses: + '200': + description: Status des Verifikationscodes + content: + application/json: + schema: + $ref: '#/components/schemas/CodeResponse' + '404': + description: Request nicht gefunden + '408': + description: Timeout - kein Code empfangen + + /verification/sms/request: + post: + summary: Telefonnummer für SMS-Verifikation anfordern + operationId: requestSMS + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/SMSRequest' + responses: + '201': + description: Telefonnummer zugewiesen + content: + application/json: + schema: + $ref: '#/components/schemas/SMSResponse' + '401': + description: Ungültiger API-Key + '429': + description: Rate-Limit erreicht + '503': + description: Keine Telefonnummer verfügbar + + /verification/sms/code/{request_id}: + get: + summary: SMS-Verifikationscode abfragen (Polling) + operationId: getSMSCode + parameters: + - name: request_id + in: path + required: true + schema: + type: string + format: uuid + responses: + '200': + description: Status des Verifikationscodes + content: + application/json: + schema: + $ref: '#/components/schemas/CodeResponse' + '404': + description: Request nicht gefunden + '408': + description: Timeout - kein Code empfangen + + /webhook/rutx11/sms: + post: + summary: SMS-Webhook vom RUTX11 Router + operationId: receiveSMS + security: + - RouterToken: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/WebhookPayload' + responses: + '200': + description: SMS empfangen und verarbeitet + '401': + description: Ungültiger Router-Token + '422': + description: Ungültiges Payload-Format + + /phone/available: + get: + summary: Verfügbare Telefonnummern abfragen + operationId: getAvailablePhones + responses: + '200': + description: Liste verfügbarer Nummern + content: + application/json: + schema: + $ref: '#/components/schemas/PhoneAvailability' + + /health: + get: + summary: Health-Check + operationId: healthCheck + security: [] + responses: + '200': + description: Service ist gesund + content: + application/json: + schema: + type: object + properties: + status: + type: string + enum: [healthy, degraded] + database: + type: string + redis: + type: string + imap_connections: + type: integer +``` + +### 3.2 PostgreSQL-Schema + +```sql +-- ===================================================== +-- EXTENSION für UUID +-- ===================================================== +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +-- ===================================================== +-- CLIENTS (Lizenznehmer) +-- Werden MANUELL oder über separates Admin-System angelegt +-- ===================================================== +CREATE TABLE clients ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + license_key VARCHAR(50) UNIQUE NOT NULL, + name VARCHAR(255) NOT NULL, + api_key_hash VARCHAR(255) NOT NULL, -- bcrypt hash + tier VARCHAR(20) DEFAULT 'standard' CHECK (tier IN ('standard', 'premium', 'enterprise')), + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE INDEX idx_clients_api_key_hash ON clients(api_key_hash); +CREATE INDEX idx_clients_license_key ON clients(license_key); + +-- ===================================================== +-- TIER LIMITS (Rate-Limiting pro Tier) +-- ===================================================== +CREATE TABLE tier_limits ( + tier VARCHAR(20) PRIMARY KEY, + email_requests_per_hour INTEGER NOT NULL, + sms_requests_per_hour INTEGER NOT NULL, + max_concurrent_verifications INTEGER NOT NULL +); + +INSERT INTO tier_limits VALUES + ('standard', 50, 20, 5), + ('premium', 200, 100, 20), + ('enterprise', 1000, 500, 100); + +-- ===================================================== +-- ROUTERS (RUTX11 beim Kunden) +-- Werden MANUELL angelegt nach Router-Versand +-- ===================================================== +CREATE TABLE routers ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + client_id UUID NOT NULL REFERENCES clients(id) ON DELETE CASCADE, + router_token VARCHAR(100) UNIQUE NOT NULL, -- Für Webhook-Auth + model VARCHAR(50) DEFAULT 'RUTX11', + serial_number VARCHAR(100), + webhook_url VARCHAR(500), -- Für Health-Checks + is_online BOOLEAN DEFAULT false, + last_seen_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE INDEX idx_routers_client_id ON routers(client_id); +CREATE INDEX idx_routers_token ON routers(router_token); + +-- ===================================================== +-- PHONE NUMBERS (eSIMs in den Routern) +-- Werden MANUELL angelegt nach eSIM-Aktivierung +-- ===================================================== +CREATE TABLE phone_numbers ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + router_id UUID NOT NULL REFERENCES routers(id) ON DELETE CASCADE, + phone_number VARCHAR(20) UNIQUE NOT NULL, -- Format: +49... + esim_slot INTEGER NOT NULL CHECK (esim_slot IN (1, 2)), + carrier VARCHAR(100), -- z.B. "Telekom", "Vodafone" + is_active BOOLEAN DEFAULT true, + cooldown_until TIMESTAMP WITH TIME ZONE, -- Gesperrt bis + usage_count INTEGER DEFAULT 0, + last_used_at TIMESTAMP WITH TIME ZONE, + last_platform VARCHAR(50), -- Letzte Plattform + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + + UNIQUE(router_id, esim_slot) +); + +CREATE INDEX idx_phone_numbers_router_id ON phone_numbers(router_id); +CREATE INDEX idx_phone_numbers_cooldown ON phone_numbers(cooldown_until) WHERE is_active = true; + +-- ===================================================== +-- EMAIL ACCOUNTS (Catch-All Domains für Server) +-- Werden MANUELL angelegt +-- ===================================================== +CREATE TABLE email_accounts ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + email_address VARCHAR(255) UNIQUE NOT NULL, -- z.B. info@domain.com + imap_server VARCHAR(255) NOT NULL, + imap_port INTEGER DEFAULT 993, + password_encrypted BYTEA NOT NULL, -- AES-256 verschlüsselt + domain VARCHAR(255) NOT NULL, -- Catch-All Domain + is_active BOOLEAN DEFAULT true, + last_checked_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE INDEX idx_email_accounts_domain ON email_accounts(domain) WHERE is_active = true; + +-- ===================================================== +-- VERIFICATION REQUESTS (Kernentität) +-- ===================================================== +CREATE TYPE verification_type AS ENUM ('email', 'sms'); +CREATE TYPE verification_status AS ENUM ('pending', 'polling', 'received', 'expired', 'failed'); + +CREATE TABLE verification_requests ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + client_id UUID NOT NULL REFERENCES clients(id), + + -- Request-Details + platform VARCHAR(50) NOT NULL, + type verification_type NOT NULL, + + -- Email-spezifisch + email_address VARCHAR(255), + email_account_id UUID REFERENCES email_accounts(id), + + -- SMS-spezifisch + phone_number_id UUID REFERENCES phone_numbers(id), + + -- Status + status verification_status DEFAULT 'pending', + verification_code VARCHAR(20), + + -- Timestamps + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + expires_at TIMESTAMP WITH TIME ZONE NOT NULL, + code_received_at TIMESTAMP WITH TIME ZONE, + + -- Constraints + CHECK ( + (type = 'email' AND email_address IS NOT NULL) OR + (type = 'sms' AND phone_number_id IS NOT NULL) + ) +); + +CREATE INDEX idx_verification_requests_client ON verification_requests(client_id); +CREATE INDEX idx_verification_requests_status ON verification_requests(status) WHERE status = 'pending'; +CREATE INDEX idx_verification_requests_email ON verification_requests(email_address) WHERE type = 'email'; +CREATE INDEX idx_verification_requests_phone ON verification_requests(phone_number_id) WHERE type = 'sms'; +CREATE INDEX idx_verification_requests_expires ON verification_requests(expires_at) WHERE status IN ('pending', 'polling'); + +-- ===================================================== +-- SMS MESSAGES (Empfangene SMS vom Router) +-- ===================================================== +CREATE TABLE sms_messages ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + router_id UUID NOT NULL REFERENCES routers(id), + phone_number VARCHAR(20) NOT NULL, -- Empfänger-Nummer + sender VARCHAR(50) NOT NULL, -- Absender (z.B. "Instagram") + message_body TEXT NOT NULL, + sim_slot INTEGER, + + -- Matching + processed BOOLEAN DEFAULT false, + matched_request_id UUID REFERENCES verification_requests(id), + extracted_code VARCHAR(20), + + -- Timestamps + received_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + processed_at TIMESTAMP WITH TIME ZONE +); + +CREATE INDEX idx_sms_messages_router ON sms_messages(router_id); +CREATE INDEX idx_sms_messages_unprocessed ON sms_messages(received_at) WHERE processed = false; +CREATE INDEX idx_sms_messages_phone ON sms_messages(phone_number, received_at DESC); + +-- ===================================================== +-- RATE LIMIT TRACKING +-- ===================================================== +CREATE TABLE rate_limit_tracking ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + client_id UUID NOT NULL REFERENCES clients(id), + request_type VARCHAR(20) NOT NULL, -- 'email' oder 'sms' + window_start TIMESTAMP WITH TIME ZONE NOT NULL, + request_count INTEGER DEFAULT 1, + + UNIQUE(client_id, request_type, window_start) +); + +CREATE INDEX idx_rate_limit_client ON rate_limit_tracking(client_id, request_type, window_start DESC); + +-- ===================================================== +-- AUDIT LOG (Optional, für Debugging) +-- ===================================================== +CREATE TABLE audit_log ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + client_id UUID REFERENCES clients(id), + action VARCHAR(100) NOT NULL, + details JSONB, + ip_address INET, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE INDEX idx_audit_log_client ON audit_log(client_id, created_at DESC); +``` + +### 3.3 Entity-Relationship-Diagramm + +```mermaid +erDiagram + CLIENTS ||--o{ ROUTERS : "besitzt" + CLIENTS ||--o{ VERIFICATION_REQUESTS : "erstellt" + CLIENTS }o--|| TIER_LIMITS : "hat" + + ROUTERS ||--o{ PHONE_NUMBERS : "enthält" + ROUTERS ||--o{ SMS_MESSAGES : "empfängt" + + PHONE_NUMBERS ||--o{ VERIFICATION_REQUESTS : "wird_verwendet_für" + + EMAIL_ACCOUNTS ||--o{ VERIFICATION_REQUESTS : "wird_verwendet_für" + + VERIFICATION_REQUESTS ||--o| SMS_MESSAGES : "wird_gematcht_mit" + + CLIENTS { + uuid id PK + string license_key UK + string api_key_hash + string tier + boolean is_active + } + + ROUTERS { + uuid id PK + uuid client_id FK + string router_token UK + boolean is_online + } + + PHONE_NUMBERS { + uuid id PK + uuid router_id FK + string phone_number UK + int esim_slot + timestamp cooldown_until + } + + EMAIL_ACCOUNTS { + uuid id PK + string email_address UK + string imap_server + bytea password_encrypted + string domain + } + + VERIFICATION_REQUESTS { + uuid id PK + uuid client_id FK + string platform + enum type + string email_address + uuid phone_number_id FK + enum status + string verification_code + } + + SMS_MESSAGES { + uuid id PK + uuid router_id FK + string sender + string message_body + uuid matched_request_id FK + } +``` + +--- + +## 4. Services - Implementierungsdetails + +### 4.1 EmailService + +**Zweck:** IMAP-Polling nach Verifikations-Emails und Code-Extraktion. + +```python +# app/services/email_service.py + +import asyncio +import aioimaplib +import re +from typing import Optional +from datetime import datetime, timedelta +from email import message_from_bytes +from email.header import decode_header + +class EmailService: + """Service für IMAP-Polling und Email-Code-Extraktion.""" + + # Plattform-spezifische Regex-Patterns für Code-Extraktion + CODE_PATTERNS = { + "instagram": [ + r"(\d{6}) ist dein Instagram-Code", + r"(\d{6}) is your Instagram code", + r"Dein Code ist (\d{6})", + r"Your code is (\d{6})", + r"\b(\d{6})\b" # Fallback: 6-stellige Zahl + ], + "facebook": [ + r"FB-(\d{5})", + r"Bestätigungscode lautet (\d{5})", + r"\b(\d{5})\b" # Fallback: 5-stellige Zahl + ], + "tiktok": [ + r"(\d{6}) ist dein Bestätigungscode", + r"(\d{6}) is your confirmation code", + r"\b(\d{6})\b" + ], + "x": [ + r"(\d{6}) ist dein X Verifizierungscode", + r"(\d{6}) is your X verification code", + r"\b(\d{6})\b" + ], + "default": [ + r"[Cc]ode[:\s]*(\d{4,8})", + r"\b(\d{6})\b" + ] + } + + # Betreff-Keywords pro Plattform + SUBJECT_KEYWORDS = { + "instagram": ["instagram", "bestätigungscode", "verification code"], + "facebook": ["facebook", "fb-", "bestätigungscode"], + "tiktok": ["tiktok", "bestätigungscode", "confirmation"], + "x": ["verification code", "verifizierungscode"], + "default": ["code", "verification", "bestätigung"] + } + + def __init__(self, db_session, encryption_service): + self.db = db_session + self.encryption = encryption_service + self._imap_connections = {} + + async def poll_for_code( + self, + request_id: str, + email_address: str, + platform: str, + timeout_seconds: int = 120 + ) -> Optional[str]: + """ + Pollt IMAP-Server nach Verifikations-Email. + + Args: + request_id: ID der Verifikationsanfrage + email_address: Ziel-Email-Adresse + platform: Plattform (instagram, facebook, etc.) + timeout_seconds: Maximale Wartezeit + + Returns: + Verifikationscode oder None + """ + # Email-Account für Domain ermitteln + domain = email_address.split("@")[1] + email_account = await self._get_email_account_for_domain(domain) + + if not email_account: + raise ValueError(f"Kein Email-Account für Domain {domain} konfiguriert") + + # IMAP-Verbindung herstellen/wiederverwenden + imap = await self._get_imap_connection(email_account) + + start_time = datetime.utcnow() + poll_interval = 2 # Sekunden + + while (datetime.utcnow() - start_time).seconds < timeout_seconds: + # Nach neuen Emails suchen + code = await self._search_and_extract_code( + imap, + email_address, + platform, + since=start_time - timedelta(minutes=5) + ) + + if code: + # Request aktualisieren + await self._update_request_with_code(request_id, code) + return code + + await asyncio.sleep(poll_interval) + + # Timeout + await self._mark_request_expired(request_id) + return None + + async def _search_and_extract_code( + self, + imap: aioimaplib.IMAP4_SSL, + target_email: str, + platform: str, + since: datetime + ) -> Optional[str]: + """Durchsucht Emails nach Verifikationscode.""" + + # INBOX auswählen + await imap.select("INBOX") + + # Suche nach Emails seit timestamp + date_str = since.strftime("%d-%b-%Y") + _, data = await imap.search(f'(SINCE "{date_str}")') + + email_ids = data[0].split() + + # Neueste zuerst + for email_id in reversed(email_ids[-20:]): # Max 20 Emails prüfen + _, msg_data = await imap.fetch(email_id, "(RFC822)") + + if not msg_data or not msg_data[1]: + continue + + msg = message_from_bytes(msg_data[1]) + + # Empfänger prüfen + to_addr = self._extract_email_from_header(msg.get("To", "")) + if to_addr.lower() != target_email.lower(): + continue + + # Betreff prüfen + subject = self._decode_header(msg.get("Subject", "")) + if not self._is_relevant_subject(subject, platform): + continue + + # Body extrahieren und Code suchen + body = self._extract_body(msg) + code = self._extract_code(body, platform) + + if code: + return code + + return None + + def _extract_code(self, text: str, platform: str) -> Optional[str]: + """Extrahiert Verifikationscode aus Text.""" + patterns = self.CODE_PATTERNS.get(platform, []) + self.CODE_PATTERNS["default"] + + for pattern in patterns: + match = re.search(pattern, text, re.IGNORECASE) + if match: + return match.group(1) + + return None + + def _is_relevant_subject(self, subject: str, platform: str) -> bool: + """Prüft ob Betreff zur Plattform passt.""" + keywords = self.SUBJECT_KEYWORDS.get(platform, []) + self.SUBJECT_KEYWORDS["default"] + subject_lower = subject.lower() + return any(kw in subject_lower for kw in keywords) +``` + +### 4.2 SMSService + +**Zweck:** Verarbeitung eingehender SMS via Webhook und Matching mit offenen Requests. + +```python +# app/services/sms_service.py + +import re +from typing import Optional +from datetime import datetime + +class SMSService: + """Service für SMS-Verarbeitung und Request-Matching.""" + + # Code-Patterns für SMS (kürzer als Email-Patterns) + SMS_CODE_PATTERNS = { + "instagram": [r"(\d{6})"], + "facebook": [r"(\d{5})", r"FB-(\d{5})"], + "tiktok": [r"(\d{6})"], + "x": [r"(\d{6})"], + "default": [r"(\d{4,8})"] + } + + # Absender-Keywords pro Plattform + SENDER_KEYWORDS = { + "instagram": ["instagram", "32665"], + "facebook": ["facebook", "32665", "fb"], + "tiktok": ["tiktok"], + "x": ["twitter", "x.com"], + } + + def __init__(self, db_session): + self.db = db_session + + async def process_incoming_sms( + self, + router_token: str, + sender: str, + message: str, + phone_number: str, + sim_slot: Optional[int] = None + ) -> dict: + """ + Verarbeitet eingehende SMS vom Router-Webhook. + + Args: + router_token: Token zur Router-Authentifizierung + sender: Absender der SMS + message: SMS-Inhalt + phone_number: Empfänger-Nummer (eSIM) + sim_slot: SIM-Slot (1 oder 2) + + Returns: + dict mit Verarbeitungsergebnis + """ + # Router validieren + router = await self._validate_router_token(router_token) + if not router: + raise PermissionError("Ungültiger Router-Token") + + # Router als online markieren + await self._update_router_last_seen(router.id) + + # SMS in DB speichern + sms_record = await self._store_sms( + router_id=router.id, + phone_number=phone_number, + sender=sender, + message=message, + sim_slot=sim_slot + ) + + # Offenen Request finden + request = await self._find_matching_request(phone_number) + + if not request: + # Kein offener Request - SMS trotzdem speichern + return { + "status": "stored", + "message": "SMS gespeichert, kein offener Request", + "sms_id": str(sms_record.id) + } + + # Code extrahieren + code = self._extract_code_from_sms(message, request.platform) + + if code: + # Request aktualisieren + await self._update_request_with_code(request.id, code, sms_record.id) + + return { + "status": "matched", + "request_id": str(request.id), + "code": code + } + + return { + "status": "stored", + "message": "SMS gespeichert, kein Code extrahiert" + } + + async def _find_matching_request(self, phone_number: str): + """ + Findet offenen Request für Telefonnummer. + + Matching-Logik: + 1. Status = 'pending' oder 'polling' + 2. phone_number stimmt überein + 3. Noch nicht abgelaufen + 4. Ältester Request zuerst (FIFO) + """ + query = """ + SELECT vr.* FROM verification_requests vr + JOIN phone_numbers pn ON vr.phone_number_id = pn.id + WHERE pn.phone_number = $1 + AND vr.status IN ('pending', 'polling') + AND vr.expires_at > NOW() + AND vr.type = 'sms' + ORDER BY vr.created_at ASC + LIMIT 1 + """ + return await self.db.fetch_one(query, phone_number) + + def _extract_code_from_sms(self, message: str, platform: str) -> Optional[str]: + """Extrahiert Code aus SMS basierend auf Plattform.""" + patterns = self.SMS_CODE_PATTERNS.get(platform, []) + self.SMS_CODE_PATTERNS["default"] + + for pattern in patterns: + match = re.search(pattern, message) + if match: + return match.group(1) + + return None +``` + +### 4.3 PhoneRotationService + +**Zweck:** Intelligente Auswahl von Telefonnummern mit Cooldown und Platform-Awareness. + +```python +# app/services/phone_rotation_service.py + +from datetime import datetime, timedelta +from typing import Optional, List +import random + +class PhoneRotationService: + """Service für intelligente Telefonnummern-Rotation.""" + + # Cooldown-Zeiten pro Plattform (in Minuten) + PLATFORM_COOLDOWNS = { + "instagram": 60, # 1 Stunde + "facebook": 45, + "tiktok": 30, + "x": 30, + "default": 30 + } + + def __init__(self, db_session): + self.db = db_session + + async def get_available_phone( + self, + client_id: str, + platform: str + ) -> Optional[dict]: + """ + Wählt optimale Telefonnummer für Client und Plattform. + + Auswahlkriterien (Priorität): + 1. Nummer gehört zum Client (über Router) + 2. Nummer ist aktiv + 3. Cooldown abgelaufen + 4. Wurde nicht kürzlich für dieselbe Plattform verwendet + 5. Geringste Gesamtnutzung (Load-Balancing) + + Returns: + dict mit phone_number und id, oder None + """ + query = """ + SELECT + pn.id, + pn.phone_number, + pn.usage_count, + pn.last_platform, + pn.last_used_at, + pn.cooldown_until + FROM phone_numbers pn + JOIN routers r ON pn.router_id = r.id + WHERE r.client_id = $1 + AND pn.is_active = true + AND r.is_online = true + AND (pn.cooldown_until IS NULL OR pn.cooldown_until < NOW()) + ORDER BY + -- Bevorzuge Nummern die nicht für diese Plattform verwendet wurden + CASE WHEN pn.last_platform = $2 THEN 1 ELSE 0 END ASC, + -- Dann nach Nutzungscount (Load-Balancing) + pn.usage_count ASC, + -- Dann zufällig für Variation + RANDOM() + LIMIT 1 + """ + + phone = await self.db.fetch_one(query, client_id, platform) + + if phone: + return { + "id": str(phone["id"]), + "phone_number": phone["phone_number"] + } + + return None + + async def mark_phone_used( + self, + phone_id: str, + platform: str + ) -> None: + """ + Markiert Nummer als verwendet und setzt Cooldown. + """ + cooldown_minutes = self.PLATFORM_COOLDOWNS.get(platform, 30) + cooldown_until = datetime.utcnow() + timedelta(minutes=cooldown_minutes) + + query = """ + UPDATE phone_numbers + SET + usage_count = usage_count + 1, + last_used_at = NOW(), + last_platform = $2, + cooldown_until = $3 + WHERE id = $1 + """ + + await self.db.execute(query, phone_id, platform, cooldown_until) + + async def get_available_count(self, client_id: str) -> int: + """Gibt Anzahl verfügbarer Nummern zurück.""" + query = """ + SELECT COUNT(*) FROM phone_numbers pn + JOIN routers r ON pn.router_id = r.id + WHERE r.client_id = $1 + AND pn.is_active = true + AND r.is_online = true + AND (pn.cooldown_until IS NULL OR pn.cooldown_until < NOW()) + """ + result = await self.db.fetch_one(query, client_id) + return result["count"] + + async def release_phone(self, phone_id: str) -> None: + """Gibt Nummer vorzeitig frei (z.B. bei Fehler).""" + query = """ + UPDATE phone_numbers + SET cooldown_until = NULL + WHERE id = $1 + """ + await self.db.execute(query, phone_id) +``` + +### 4.4 AuthService + +**Zweck:** API-Key-Validierung und Rate-Limiting. + +```python +# app/services/auth_service.py + +import bcrypt +from datetime import datetime, timedelta +from typing import Optional +from fastapi import HTTPException, Header + +class AuthService: + """Service für Authentifizierung und Rate-Limiting.""" + + def __init__(self, db_session): + self.db = db_session + + async def validate_api_key(self, api_key: str) -> dict: + """ + Validiert API-Key und gibt Client-Info zurück. + + Returns: + dict mit client_id, tier, etc. + + Raises: + HTTPException 401 bei ungültigem Key + """ + if not api_key: + raise HTTPException(status_code=401, detail="API-Key fehlt") + + # Alle aktiven Clients laden + query = """ + SELECT id, license_key, api_key_hash, tier, name + FROM clients + WHERE is_active = true + """ + clients = await self.db.fetch_all(query) + + # Key gegen alle Hashes prüfen + for client in clients: + if bcrypt.checkpw(api_key.encode(), client["api_key_hash"].encode()): + return { + "client_id": str(client["id"]), + "license_key": client["license_key"], + "tier": client["tier"], + "name": client["name"] + } + + raise HTTPException(status_code=401, detail="Ungültiger API-Key") + + async def check_rate_limit( + self, + client_id: str, + tier: str, + request_type: str # 'email' oder 'sms' + ) -> bool: + """ + Prüft und aktualisiert Rate-Limit. + + Returns: + True wenn Request erlaubt, sonst HTTPException 429 + """ + # Tier-Limits laden + limit_query = """ + SELECT email_requests_per_hour, sms_requests_per_hour + FROM tier_limits + WHERE tier = $1 + """ + limits = await self.db.fetch_one(limit_query, tier) + + if not limits: + limits = {"email_requests_per_hour": 50, "sms_requests_per_hour": 20} + + max_requests = ( + limits["email_requests_per_hour"] + if request_type == "email" + else limits["sms_requests_per_hour"] + ) + + # Aktuelle Stunde + window_start = datetime.utcnow().replace(minute=0, second=0, microsecond=0) + + # Anzahl Requests in dieser Stunde + count_query = """ + SELECT COALESCE(SUM(request_count), 0) as total + FROM rate_limit_tracking + WHERE client_id = $1 + AND request_type = $2 + AND window_start >= $3 + """ + result = await self.db.fetch_one(count_query, client_id, request_type, window_start) + current_count = result["total"] + + if current_count >= max_requests: + # Rate-Limit erreicht + retry_after = 3600 - (datetime.utcnow() - window_start).seconds + raise HTTPException( + status_code=429, + detail="Rate-Limit erreicht", + headers={"Retry-After": str(retry_after)} + ) + + # Request zählen + upsert_query = """ + INSERT INTO rate_limit_tracking (client_id, request_type, window_start, request_count) + VALUES ($1, $2, $3, 1) + ON CONFLICT (client_id, request_type, window_start) + DO UPDATE SET request_count = rate_limit_tracking.request_count + 1 + """ + await self.db.execute(upsert_query, client_id, request_type, window_start) + + return True + +# FastAPI Dependency +async def get_current_client( + x_api_key: str = Header(..., alias="X-API-Key"), + db = Depends(get_db) +) -> dict: + """FastAPI Dependency für API-Key-Validierung.""" + auth_service = AuthService(db) + return await auth_service.validate_api_key(x_api_key) +``` + +--- + +## 5. Sequenzdiagramme + +### 5.1 Email-Verifikations-Flow + +```mermaid +sequenceDiagram + participant C as Desktop Client + participant S as Verification Server + participant DB as PostgreSQL + participant IMAP as IMAP Server + participant P as Platform (Instagram) + + C->>S: POST /verification/email/request
{platform: "instagram"} + S->>S: API-Key validieren + S->>DB: Rate-Limit prüfen + S->>DB: Freie Email-Adresse wählen + S->>DB: Verification Request anlegen + S-->>C: 201 {request_id, email_address} + + Note over C: Client registriert Account
mit der Email-Adresse + + C->>P: Account-Registrierung mit Email + P->>IMAP: Verifikations-Email senden + + loop Polling (max 120s) + C->>S: GET /verification/email/code/{request_id} + S->>IMAP: Emails abrufen (IMAP) + + alt Email gefunden + S->>S: Code extrahieren (Regex) + S->>DB: Code speichern + S-->>C: 200 {status: "received", code: "123456"} + else Email nicht gefunden + S-->>C: 200 {status: "pending", code: null} + end + end + + Note over C: Client gibt Code ein + C->>P: Verifikationscode eingeben + P-->>C: Account verifiziert +``` + +### 5.2 SMS-Verifikations-Flow + +```mermaid +sequenceDiagram + participant C as Desktop Client + participant S as Verification Server + participant DB as PostgreSQL + participant R as RUTX11 Router + participant P as Platform (Instagram) + + C->>S: POST /verification/sms/request
{platform: "instagram"} + S->>S: API-Key validieren + S->>DB: Rate-Limit prüfen + S->>DB: PhoneRotation: Freie Nummer wählen + S->>DB: Verification Request anlegen + S->>DB: Cooldown setzen + S-->>C: 201 {request_id, phone_number: "+49..."} + + Note over C: Client registriert Account
mit der Telefonnummer + + C->>P: Account-Registrierung mit Telefon + P->>R: SMS senden an +49... + + Note over R: Router empfängt SMS + R->>S: POST /webhook/rutx11/sms
{sender, message, sim_slot} + S->>S: Router-Token validieren + S->>DB: SMS speichern + S->>DB: Offenen Request finden (phone_number match) + S->>S: Code extrahieren (Regex) + S->>DB: Request mit Code aktualisieren + S-->>R: 200 OK + + loop Polling (parallel) + C->>S: GET /verification/sms/code/{request_id} + alt Code empfangen + S->>DB: Request Status abrufen + S-->>C: 200 {status: "received", code: "123456"} + else Noch kein Code + S-->>C: 200 {status: "pending", code: null} + end + end + + Note over C: Client gibt Code ein + C->>P: Verifikationscode eingeben + P-->>C: Account verifiziert +``` + +### 5.3 Webhook-Processing + +```mermaid +sequenceDiagram + participant R as RUTX11 Router + participant S as Verification Server + participant DB as PostgreSQL + + R->>S: POST /webhook/rutx11/sms + Note over R,S: Header: X-Router-Token: ABC123 + Note over R,S: Body: {sender, message, timestamp, sim_slot} + + S->>DB: SELECT * FROM routers
WHERE router_token = 'ABC123' + + alt Router nicht gefunden + S-->>R: 401 Unauthorized + else Router gefunden + S->>DB: UPDATE routers SET
last_seen_at = NOW(),
is_online = true + + S->>DB: INSERT INTO sms_messages
(router_id, sender, message...) + + S->>DB: SELECT * FROM verification_requests
WHERE phone_number = ...
AND status = 'pending'
AND expires_at > NOW() + + alt Request gefunden + S->>S: Code extrahieren + S->>DB: UPDATE verification_requests
SET code = '123456',
status = 'received' + S->>DB: UPDATE sms_messages
SET matched_request_id = ... + S-->>R: 200 {status: "matched"} + else Kein Request + S-->>R: 200 {status: "stored"} + end + end +``` + +--- + +## 6. Fehlerszenarien & Edge Cases + +### 6.1 Error Responses + +| Szenario | HTTP Status | Response Body | Client-Aktion | +|----------|-------------|---------------|---------------| +| Ungültiger API-Key | 401 | `{"error": "unauthorized", "message": "Ungültiger API-Key"}` | Lizenz prüfen | +| Rate-Limit erreicht | 429 | `{"error": "rate_limit", "message": "...", "retry_after": 1800}` | Warten und Retry | +| Keine Email verfügbar | 503 | `{"error": "no_resource", "message": "Keine Email-Adresse verfügbar"}` | Später versuchen | +| Keine Nummer verfügbar | 503 | `{"error": "no_resource", "message": "Keine Telefonnummer verfügbar"}` | Später versuchen | +| Request nicht gefunden | 404 | `{"error": "not_found", "message": "Request nicht gefunden"}` | Neuen Request starten | +| Code-Timeout | 408 | `{"error": "timeout", "message": "Kein Code innerhalb Timeout empfangen"}` | Retry oder manuell | +| Router offline | 503 | `{"error": "router_offline", "message": "Router nicht erreichbar"}` | Support kontaktieren | +| Ungültiger Router-Token | 401 | `{"error": "unauthorized", "message": "Ungültiger Router-Token"}` | Router-Config prüfen | + +### 6.2 Timeout-Behandlung + +```python +# Email-Verifikation +DEFAULT_EMAIL_TIMEOUT = 120 # Sekunden +MAX_EMAIL_TIMEOUT = 300 + +# SMS-Verifikation +DEFAULT_SMS_TIMEOUT = 180 # SMS kann länger dauern +MAX_SMS_TIMEOUT = 300 + +# Request-Expiration +REQUEST_EXPIRY_BUFFER = 60 # Request läuft 60s nach Timeout ab +``` + +### 6.3 Retry-Logik (Client-seitig) + +```python +# Empfohlene Polling-Intervalle +INITIAL_POLL_INTERVAL = 2 # Sekunden +MAX_POLL_INTERVAL = 5 +BACKOFF_FACTOR = 1.5 + +# Empfohlene Retry-Strategie bei Fehlern +MAX_RETRIES = 3 +RETRY_DELAYS = [5, 15, 30] # Sekunden +``` + +--- + +## 7. Integration mit Desktop-Client + +### 7.1 VerificationClient-Klasse (NEU) + +Diese Datei muss im Client erstellt werden: `utils/verification_client.py` + +```python +# utils/verification_client.py +""" +Client für die Kommunikation mit dem Verifikationsserver. +Ersetzt direktes IMAP-Polling durch API-Calls. +""" + +import requests +import time +import logging +from typing import Optional, Dict, Any +from dataclasses import dataclass +from enum import Enum + +logger = logging.getLogger("verification_client") + + +class VerificationStatus(Enum): + PENDING = "pending" + POLLING = "polling" + RECEIVED = "received" + EXPIRED = "expired" + FAILED = "failed" + + +@dataclass +class VerificationResponse: + """Response von einer Verifikationsanfrage.""" + request_id: str + email_address: Optional[str] = None + phone_number: Optional[str] = None + expires_at: Optional[str] = None + + +@dataclass +class CodeResponse: + """Response vom Code-Polling.""" + status: VerificationStatus + code: Optional[str] = None + received_at: Optional[str] = None + + +class VerificationClientError(Exception): + """Basis-Exception für VerificationClient.""" + pass + + +class RateLimitError(VerificationClientError): + """Rate-Limit erreicht.""" + def __init__(self, retry_after: int): + self.retry_after = retry_after + super().__init__(f"Rate-Limit erreicht. Retry nach {retry_after} Sekunden.") + + +class NoResourceError(VerificationClientError): + """Keine Ressource (Email/Telefon) verfügbar.""" + pass + + +class VerificationTimeoutError(VerificationClientError): + """Timeout beim Warten auf Code.""" + pass + + +class VerificationClient: + """ + Client für den Verifikationsserver. + + Verwendung: + client = VerificationClient( + server_url="https://verify.example.com", + api_key="your-api-key" + ) + + # Email-Verifikation + response = client.request_email("instagram") + email = response.email_address + # ... Account registrieren mit email ... + code = client.poll_for_code(response.request_id, "email", timeout=120) + + # SMS-Verifikation + response = client.request_sms("instagram") + phone = response.phone_number + # ... Account registrieren mit phone ... + code = client.poll_for_code(response.request_id, "sms", timeout=180) + """ + + DEFAULT_TIMEOUT = 120 # Sekunden + POLL_INTERVAL = 2 # Sekunden + MAX_POLL_INTERVAL = 5 + + def __init__(self, server_url: str, api_key: str): + """ + Initialisiert den VerificationClient. + + Args: + server_url: Basis-URL des Verifikationsservers (ohne /api/v1) + api_key: API-Key für Authentifizierung + """ + self.server_url = server_url.rstrip("/") + self.api_key = api_key + self.session = requests.Session() + self.session.headers.update({ + "X-API-Key": api_key, + "Content-Type": "application/json", + "User-Agent": "AccountForger/1.0" + }) + + logger.info(f"VerificationClient initialisiert für {server_url}") + + def _make_request( + self, + method: str, + endpoint: str, + json: Dict = None, + timeout: int = 30 + ) -> Dict[str, Any]: + """Führt HTTP-Request aus mit Error-Handling.""" + url = f"{self.server_url}/api/v1{endpoint}" + + try: + response = self.session.request( + method=method, + url=url, + json=json, + timeout=timeout + ) + + # Error-Handling basierend auf Status-Code + if response.status_code == 401: + raise VerificationClientError("Ungültiger API-Key") + + if response.status_code == 429: + retry_after = int(response.headers.get("Retry-After", 60)) + raise RateLimitError(retry_after) + + if response.status_code == 503: + data = response.json() + raise NoResourceError(data.get("message", "Keine Ressource verfügbar")) + + if response.status_code == 408: + raise VerificationTimeoutError("Server-seitiger Timeout") + + if response.status_code == 404: + raise VerificationClientError("Request nicht gefunden") + + response.raise_for_status() + return response.json() + + except requests.exceptions.Timeout: + raise VerificationClientError("Verbindungs-Timeout") + except requests.exceptions.ConnectionError: + raise VerificationClientError("Verbindung zum Server fehlgeschlagen") + + def request_email(self, platform: str, preferred_domain: str = None) -> VerificationResponse: + """ + Fordert eine Email-Adresse für Verifikation an. + + Args: + platform: Zielplattform (instagram, facebook, tiktok, x, etc.) + preferred_domain: Bevorzugte Domain (optional) + + Returns: + VerificationResponse mit request_id und email_address + """ + payload = {"platform": platform} + if preferred_domain: + payload["preferred_domain"] = preferred_domain + + data = self._make_request("POST", "/verification/email/request", json=payload) + + logger.info(f"Email angefordert für {platform}: {data.get('email_address')}") + + return VerificationResponse( + request_id=data["request_id"], + email_address=data["email_address"], + expires_at=data.get("expires_at") + ) + + def request_sms(self, platform: str) -> VerificationResponse: + """ + Fordert eine Telefonnummer für SMS-Verifikation an. + + Args: + platform: Zielplattform + + Returns: + VerificationResponse mit request_id und phone_number + """ + payload = {"platform": platform} + + data = self._make_request("POST", "/verification/sms/request", json=payload) + + logger.info(f"Telefonnummer angefordert für {platform}: {data.get('phone_number')}") + + return VerificationResponse( + request_id=data["request_id"], + phone_number=data["phone_number"], + expires_at=data.get("expires_at") + ) + + def get_code_status(self, request_id: str, verification_type: str) -> CodeResponse: + """ + Fragt Status eines Verifikationscodes ab (einmalig). + + Args: + request_id: ID der Verifikationsanfrage + verification_type: "email" oder "sms" + + Returns: + CodeResponse mit Status und ggf. Code + """ + endpoint = f"/verification/{verification_type}/code/{request_id}" + data = self._make_request("GET", endpoint) + + return CodeResponse( + status=VerificationStatus(data["status"]), + code=data.get("code"), + received_at=data.get("received_at") + ) + + def poll_for_code( + self, + request_id: str, + verification_type: str, + timeout: int = None, + callback: callable = None + ) -> Optional[str]: + """ + Pollt Server bis Code empfangen wurde. + + Args: + request_id: ID der Verifikationsanfrage + verification_type: "email" oder "sms" + timeout: Maximale Wartezeit in Sekunden + callback: Optional - wird bei jedem Poll aufgerufen mit (elapsed_seconds, status) + + Returns: + Verifikationscode oder None bei Timeout + """ + timeout = timeout or self.DEFAULT_TIMEOUT + start_time = time.time() + poll_interval = self.POLL_INTERVAL + + logger.info(f"Starte Polling für {verification_type} Request {request_id}") + + while True: + elapsed = time.time() - start_time + + if elapsed >= timeout: + logger.warning(f"Timeout nach {elapsed:.0f}s für Request {request_id}") + return None + + try: + response = self.get_code_status(request_id, verification_type) + + if callback: + callback(elapsed, response.status) + + if response.status == VerificationStatus.RECEIVED and response.code: + logger.info(f"Code empfangen: {response.code} nach {elapsed:.0f}s") + return response.code + + if response.status in (VerificationStatus.EXPIRED, VerificationStatus.FAILED): + logger.warning(f"Request fehlgeschlagen: {response.status}") + return None + + except VerificationClientError as e: + logger.warning(f"Polling-Fehler: {e}") + # Weitermachen trotz Fehler + + # Warten vor nächstem Poll (mit Backoff) + time.sleep(poll_interval) + poll_interval = min(poll_interval * 1.2, self.MAX_POLL_INTERVAL) + + return None + + def get_available_phones(self) -> Dict[str, Any]: + """ + Fragt verfügbare Telefonnummern ab. + + Returns: + dict mit available_count und phones Liste + """ + return self._make_request("GET", "/phone/available") + + def health_check(self) -> bool: + """ + Prüft Erreichbarkeit des Servers. + + Returns: + True wenn Server erreichbar und gesund + """ + try: + # Health-Endpoint braucht keinen API-Key + response = requests.get( + f"{self.server_url}/api/v1/health", + timeout=5 + ) + data = response.json() + return data.get("status") in ("healthy", "degraded") + except Exception: + return False + + +# Singleton-Pattern für globale Instanz +_verification_client: Optional[VerificationClient] = None + + +def get_verification_client() -> Optional[VerificationClient]: + """Gibt die globale VerificationClient-Instanz zurück.""" + return _verification_client + + +def init_verification_client(server_url: str, api_key: str) -> VerificationClient: + """ + Initialisiert die globale VerificationClient-Instanz. + + Sollte beim App-Start aufgerufen werden. + """ + global _verification_client + _verification_client = VerificationClient(server_url, api_key) + return _verification_client +``` + +### 7.2 Server-Konfigurationsdatei (NEU) + +Erstelle `config/server_config.json`: + +```json +{ + "verification_server": { + "url": "https://verify.example.com", + "api_key": "", + "timeout_email": 120, + "timeout_sms": 180, + "enabled": true + } +} +``` + +### 7.3 Zu ändernde Dateien im Client + +| Datei | Änderung | +|-------|----------| +| `social_networks/base_automation.py` | `verification_client` Parameter hinzufügen | +| `social_networks/instagram/instagram_verification.py` | API-Calls statt direktem EmailHandler | +| `social_networks/facebook/facebook_verification.py` | API-Calls statt direktem EmailHandler | +| `social_networks/tiktok/tiktok_verification.py` | SMS via API | +| `controllers/platform_controllers/base_worker_thread.py` | VerificationClient initialisieren | +| `utils/email_handler.py` | Deprecation-Warning, später entfernen | +| `config/server_config.json` | NEU: Server-Konfiguration | +| `utils/verification_client.py` | NEU: Server-Client | + +### 7.4 Beispiel-Integration für Instagram + +```python +# social_networks/instagram/instagram_verification.py (GEÄNDERT) + +class InstagramVerification: + def __init__(self, automation): + self.automation = automation + # NEU: VerificationClient statt EmailHandler + self.verification_client = automation.verification_client + + def wait_for_email_code(self, email: str, timeout: int = 120) -> Optional[str]: + """Wartet auf Email-Verifikationscode via Server.""" + + if not self.verification_client: + # Fallback auf alten EmailHandler (Übergangsphase) + return self._legacy_wait_for_code(email, timeout) + + try: + # Request wurde bereits beim Account-Erstellen gemacht + # Hier nur noch Polling + request_id = self.automation.current_verification_request_id + + if not request_id: + logger.error("Keine Verification-Request-ID vorhanden") + return None + + code = self.verification_client.poll_for_code( + request_id=request_id, + verification_type="email", + timeout=timeout, + callback=lambda elapsed, status: logger.debug(f"Polling: {elapsed}s, Status: {status}") + ) + + return code + + except Exception as e: + logger.error(f"Fehler beim Warten auf Email-Code: {e}") + return None +``` + +--- + +## 8. Testbeispiele + +### 8.1 Curl-Befehle + +```bash +# Health-Check +curl -X GET https://verify.example.com/api/v1/health + +# Email anfordern +curl -X POST https://verify.example.com/api/v1/verification/email/request \ + -H "X-API-Key: your-api-key" \ + -H "Content-Type: application/json" \ + -d '{"platform": "instagram"}' + +# Email-Code abfragen +curl -X GET https://verify.example.com/api/v1/verification/email/code/550e8400-e29b-41d4-a716-446655440000 \ + -H "X-API-Key: your-api-key" + +# SMS anfordern +curl -X POST https://verify.example.com/api/v1/verification/sms/request \ + -H "X-API-Key: your-api-key" \ + -H "Content-Type: application/json" \ + -d '{"platform": "instagram"}' + +# SMS-Code abfragen +curl -X GET https://verify.example.com/api/v1/verification/sms/code/550e8400-e29b-41d4-a716-446655440000 \ + -H "X-API-Key: your-api-key" + +# Verfügbare Telefonnummern +curl -X GET https://verify.example.com/api/v1/phone/available \ + -H "X-API-Key: your-api-key" + +# Webhook simulieren (Router) +curl -X POST https://verify.example.com/api/v1/webhook/rutx11/sms \ + -H "X-Router-Token: router-token-abc" \ + -H "Content-Type: application/json" \ + -d '{ + "sender": "Instagram", + "message": "123456 ist dein Instagram-Bestätigungscode", + "timestamp": "2026-01-17T10:30:00Z", + "sim_slot": 1 + }' +``` + +### 8.2 Python-Integrations-Test + +```python +# tests/test_verification_integration.py + +import pytest +from utils.verification_client import VerificationClient, RateLimitError + +@pytest.fixture +def client(): + return VerificationClient( + server_url="https://verify.example.com", + api_key="test-api-key" + ) + +def test_health_check(client): + """Server sollte erreichbar sein.""" + assert client.health_check() is True + +def test_email_request(client): + """Email-Request sollte funktionieren.""" + response = client.request_email("instagram") + + assert response.request_id is not None + assert response.email_address is not None + assert "@" in response.email_address + +def test_sms_request(client): + """SMS-Request sollte funktionieren.""" + response = client.request_sms("instagram") + + assert response.request_id is not None + assert response.phone_number is not None + assert response.phone_number.startswith("+") + +def test_rate_limit(client): + """Rate-Limit sollte greifen.""" + # Viele Requests in kurzer Zeit + for _ in range(100): + try: + client.request_email("instagram") + except RateLimitError as e: + assert e.retry_after > 0 + return + + pytest.fail("Rate-Limit wurde nicht erreicht") + +def test_full_email_flow(client, mock_imap_server): + """Vollständiger Email-Verifikationsflow.""" + # 1. Email anfordern + response = client.request_email("instagram") + + # 2. Simuliere eingehende Email + mock_imap_server.send_verification_email( + to=response.email_address, + code="123456", + platform="instagram" + ) + + # 3. Code abrufen + code = client.poll_for_code( + request_id=response.request_id, + verification_type="email", + timeout=10 + ) + + assert code == "123456" +``` + +--- + +## 9. Deployment + +### 9.1 Docker Compose + +```yaml +# docker-compose.yml +version: '3.8' + +services: + api: + build: + context: . + dockerfile: Dockerfile + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://verify:${DB_PASSWORD}@db:5432/verification + - REDIS_URL=redis://redis:6379 + - SECRET_KEY=${SECRET_KEY} + - ENCRYPTION_KEY=${ENCRYPTION_KEY} + - LOG_LEVEL=INFO + depends_on: + db: + condition: service_healthy + redis: + condition: service_started + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/api/v1/health"] + interval: 30s + timeout: 10s + retries: 3 + + db: + image: postgres:15-alpine + environment: + - POSTGRES_DB=verification + - POSTGRES_USER=verify + - POSTGRES_PASSWORD=${DB_PASSWORD} + volumes: + - pgdata:/var/lib/postgresql/data + - ./init.sql:/docker-entrypoint-initdb.d/init.sql + healthcheck: + test: ["CMD-SHELL", "pg_isready -U verify -d verification"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + + redis: + image: redis:7-alpine + volumes: + - redisdata:/data + restart: unless-stopped + + celery-worker: + build: . + command: celery -A app.worker worker -l info -Q email_polling + environment: + - DATABASE_URL=postgresql://verify:${DB_PASSWORD}@db:5432/verification + - REDIS_URL=redis://redis:6379 + - ENCRYPTION_KEY=${ENCRYPTION_KEY} + depends_on: + - db + - redis + restart: unless-stopped + + celery-beat: + build: . + command: celery -A app.worker beat -l info + environment: + - DATABASE_URL=postgresql://verify:${DB_PASSWORD}@db:5432/verification + - REDIS_URL=redis://redis:6379 + depends_on: + - db + - redis + restart: unless-stopped + +volumes: + pgdata: + redisdata: +``` + +### 9.2 Dockerfile + +```dockerfile +# Dockerfile +FROM python:3.11-slim + +WORKDIR /app + +# System-Dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Python-Dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# App-Code +COPY app/ ./app/ + +# Non-root User +RUN useradd -m appuser && chown -R appuser:appuser /app +USER appuser + +# Health-Check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/api/v1/health || exit 1 + +# Start +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] +``` + +### 9.3 Umgebungsvariablen (.env.example) + +```bash +# .env.example + +# Datenbank +DB_PASSWORD=your-secure-password-here + +# Sicherheit +SECRET_KEY=your-secret-key-for-jwt-etc +ENCRYPTION_KEY=your-32-byte-encryption-key + +# Optional: Externe Services +SENTRY_DSN=https://... + +# Logging +LOG_LEVEL=INFO + +# IMAP (falls zentral konfiguriert) +# IMAP_DEFAULT_SERVER=imap.example.com +# IMAP_DEFAULT_PORT=993 +``` + +### 9.4 Sicherheitsanforderungen + +| Anforderung | Implementierung | +|-------------|-----------------| +| HTTPS-Only | Nginx/Traefik Reverse Proxy mit Let's Encrypt | +| API-Key-Hashing | bcrypt mit Cost Factor 12 | +| IMAP-Passwörter | AES-256-GCM verschlüsselt in DB | +| Rate-Limiting | Per-Client basierend auf Tier | +| IP-Whitelist (Webhooks) | Optional: Nur Router-IPs für /webhook/* | +| Audit-Logging | Alle sensitiven Operationen loggen | +| Request-Expiration | Automatische Bereinigung alter Requests | + +--- + +## 10. Implementierungsreihenfolge + +### Phase 1: Basis-Setup (MVP) +1. FastAPI-Projekt aufsetzen +2. PostgreSQL-Schema deployen +3. Health-Check Endpoint +4. Basis-Auth (API-Key-Validierung) + +### Phase 2: Email-Service +1. IMAP-Connection-Pool +2. Email-Polling-Service +3. Code-Extraktion (Regex) +4. `/verification/email/*` Endpoints +5. Background-Task für Polling (Celery) + +### Phase 3: SMS-Webhook +1. `/webhook/rutx11/sms` Endpoint +2. Router-Token-Validierung +3. SMS-Speicherung +4. Request-Matching-Logik +5. `/verification/sms/*` Endpoints + +### Phase 4: Phone-Rotation +1. PhoneRotationService +2. Cooldown-Management +3. Platform-Aware Auswahl +4. `/phone/available` Endpoint + +### Phase 5: Rate-Limiting & Monitoring +1. Rate-Limit-Tracking +2. Tier-basierte Limits +3. Prometheus-Metriken +4. Logging-Aggregation +5. Health-Check erweitern + +--- + +## Appendix A: Projektstruktur (Backend) + +``` +accountforger-server/ +├── app/ +│ ├── __init__.py +│ ├── main.py # FastAPI Entry Point +│ ├── config.py # Settings/Config +│ ├── dependencies.py # FastAPI Dependencies +│ │ +│ ├── api/ +│ │ ├── __init__.py +│ │ └── v1/ +│ │ ├── __init__.py +│ │ ├── router.py # API Router +│ │ ├── verification.py # Verifikations-Endpoints +│ │ ├── webhooks.py # RUTX11 Webhook +│ │ ├── phone.py # Phone-Availability +│ │ └── health.py # Health-Check +│ │ +│ ├── services/ +│ │ ├── __init__.py +│ │ ├── email_service.py # IMAP-Polling +│ │ ├── sms_service.py # Webhook-Verarbeitung +│ │ ├── phone_rotation.py # Nummern-Auswahl +│ │ ├── auth_service.py # API-Key-Validierung +│ │ └── encryption.py # AES für IMAP-Passwörter +│ │ +│ ├── models/ +│ │ ├── __init__.py +│ │ ├── client.py # Client Model +│ │ ├── router.py # Router Model +│ │ ├── phone_number.py # PhoneNumber Model +│ │ ├── verification.py # VerificationRequest Model +│ │ └── sms_message.py # SMSMessage Model +│ │ +│ ├── schemas/ +│ │ ├── __init__.py +│ │ ├── verification.py # Pydantic Schemas +│ │ └── webhook.py # Webhook Schemas +│ │ +│ ├── db/ +│ │ ├── __init__.py +│ │ ├── database.py # SQLAlchemy Setup +│ │ └── migrations/ # Alembic Migrations +│ │ +│ └── worker/ +│ ├── __init__.py +│ ├── celery.py # Celery App +│ └── tasks.py # Background Tasks +│ +├── tests/ +│ ├── __init__.py +│ ├── conftest.py +│ ├── test_email_service.py +│ ├── test_sms_service.py +│ └── test_api.py +│ +├── docker-compose.yml +├── Dockerfile +├── requirements.txt +├── alembic.ini +├── .env.example +└── README.md +``` + +--- + +## Appendix B: requirements.txt + +``` +# Web Framework +fastapi==0.109.0 +uvicorn[standard]==0.27.0 +python-multipart==0.0.6 + +# Database +sqlalchemy==2.0.25 +asyncpg==0.29.0 +alembic==1.13.1 + +# Redis & Celery +redis==5.0.1 +celery==5.3.6 + +# IMAP +aioimaplib==1.0.1 + +# Security +bcrypt==4.1.2 +cryptography==41.0.7 +python-jose[cryptography]==3.3.0 + +# HTTP Client (für Health-Checks) +httpx==0.26.0 + +# Validation +pydantic==2.5.3 +pydantic-settings==2.1.0 +email-validator==2.1.0 + +# Utilities +python-dateutil==2.8.2 + +# Testing +pytest==7.4.4 +pytest-asyncio==0.23.3 + +# Monitoring (optional) +prometheus-client==0.19.0 +sentry-sdk[fastapi]==1.39.1 +``` + +--- + +*Dokument erstellt für Backend-Implementierung. Version 1.0* diff --git a/docs/production-architecture.md b/docs/production-architecture.md new file mode 100644 index 0000000..0b81b47 --- /dev/null +++ b/docs/production-architecture.md @@ -0,0 +1,503 @@ +# AccountForger - Produktionsarchitektur + +## Konfiguration + +| Aspekt | Entscheidung | +|--------|--------------| +| **Hosting** | Eigener VPS/Root-Server (Docker Compose) | +| **Multi-Tenant** | Ja - mehrere Clients mit eigenen API-Keys | +| **eSIM-Strategie** | Pool mit Rotation (mehrere Nummern pro Kunde) | +| **Router-Standort** | Beim Kunden (dezentral) | +| **eSIM-Verwaltung** | Zentral durch Anbieter | + +--- + +## Business-Modell + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ VERKAUF AN KUNDEN │ +│ │ +│ Paket beinhaltet: │ +│ ├── Software-Lizenz (AccountForger Desktop) │ +│ ├── Vorkonfigurierter RUTX11 Router │ +│ │ ├── Webhook bereits eingerichtet │ +│ │ ├── Auth-Token basierend auf Lizenz │ +│ │ └── eSIMs bereits eingelegt │ +│ └── Telefonnummern im System registriert │ +│ │ +│ Kunde muss nur: │ +│ ├── Router mit Strom + Internet verbinden │ +│ └── Software installieren + Lizenz aktivieren │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Übersicht + +Dezentrale Router beim Kunden, zentraler Server für Verifikation. + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ KUNDE A │ +│ ┌─────────────────┐ ┌──────────────────┐ │ +│ │ Desktop Client │ │ RUTX11 Router │ │ +│ │ (Lizenz: A123) │ │ (Token: A123) │ │ +│ └────────┬────────┘ └────────┬─────────┘ │ +└───────────┼────────────────────────────────┼────────────────────┘ + │ │ +┌───────────┼────────────────────────────────┼────────────────────┐ +│ KUNDE B │ +│ ┌─────────────────┐ ┌──────────────────┐ │ +│ │ Desktop Client │ │ RUTX11 Router │ │ +│ │ (Lizenz: B456) │ │ (Token: B456) │ │ +│ └────────┬────────┘ └────────┬─────────┘ │ +└───────────┼────────────────────────────────┼────────────────────┘ + │ │ + │ HTTPS (API) │ HTTPS (Webhook) + │ │ + ▼ ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ ZENTRALER SERVER (VPS) │ +│ │ +│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────────┐ │ +│ │ Email │ │ SMS │ │ Client/Router │ │ +│ │ Service │ │ Service │ │ Registry │ │ +│ │ │ │ │ │ │ │ +│ │ IMAP Polling│ │ Webhook │ │ Lizenz → Router → Tel. │ │ +│ └─────────────┘ │ Empfänger │ └─────────────────────────┘ │ +│ └─────────────┘ │ +│ │ +│ PostgreSQL FastAPI │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 1. Server-Komponenten + +### Technologie-Stack +| Komponente | Technologie | +|------------|-------------| +| Framework | FastAPI (Python) | +| Datenbank | PostgreSQL | +| ORM | SQLAlchemy 2.0 | +| Background Jobs | Celery + Redis | +| Cache | Redis | +| Auth | API-Keys (JWT optional) | + +### API-Endpunkte + +| Endpunkt | Methode | Beschreibung | +|----------|---------|--------------| +| `/api/v1/verification/email/request` | POST | Email-Adresse anfordern | +| `/api/v1/verification/email/code/{id}` | GET | Email-Code abfragen | +| `/api/v1/verification/sms/request` | POST | Telefonnummer anfordern | +| `/api/v1/verification/sms/code/{id}` | GET | SMS-Code abfragen | +| `/api/v1/webhook/rutx11/sms` | POST | SMS-Webhook vom Router | +| `/api/v1/phone/available` | GET | Verfügbare Nummern | + +### Datenbank-Schema + +```sql +-- Clients (Lizenznehmer) +CREATE TABLE clients ( + id UUID PRIMARY KEY, + license_key VARCHAR(50) UNIQUE NOT NULL, + name VARCHAR(255), + api_key_hash VARCHAR(255) NOT NULL, + tier VARCHAR(20) DEFAULT 'standard', + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP DEFAULT NOW() +); + +-- Router (dezentral beim Kunden) +CREATE TABLE routers ( + id UUID PRIMARY KEY, + client_id UUID NOT NULL REFERENCES clients(id), + router_token VARCHAR(100) UNIQUE NOT NULL, -- = Lizenzschlüssel oder abgeleitet + model VARCHAR(50) DEFAULT 'RUTX11', + serial_number VARCHAR(100), + is_online BOOLEAN DEFAULT false, + last_seen_at TIMESTAMP, + created_at TIMESTAMP DEFAULT NOW() +); + +-- Telefonnummern (pro Router/Kunde) +CREATE TABLE phone_numbers ( + id UUID PRIMARY KEY, + router_id UUID NOT NULL REFERENCES routers(id), + phone_number VARCHAR(20) UNIQUE NOT NULL, + esim_slot INTEGER NOT NULL, -- 1 oder 2 + carrier VARCHAR(100), + is_active BOOLEAN DEFAULT true, + cooldown_until TIMESTAMP, + usage_count INTEGER DEFAULT 0, + last_used_at TIMESTAMP +); + +-- Verifikations-Anfragen +CREATE TABLE verification_requests ( + id UUID PRIMARY KEY, + client_id UUID NOT NULL REFERENCES clients(id), + platform VARCHAR(50) NOT NULL, + verification_type VARCHAR(20) NOT NULL, -- 'email' / 'sms' + email_address VARCHAR(255), + phone_number_id UUID REFERENCES phone_numbers(id), + status VARCHAR(20) DEFAULT 'pending', + verification_code VARCHAR(20), + created_at TIMESTAMP DEFAULT NOW(), + expires_at TIMESTAMP +); + +-- Email-Konten (zentral Server-verwaltet) +CREATE TABLE email_accounts ( + id UUID PRIMARY KEY, + email_address VARCHAR(255) UNIQUE NOT NULL, + imap_server VARCHAR(255) NOT NULL, + imap_port INTEGER DEFAULT 993, + password_encrypted BYTEA NOT NULL, + domain VARCHAR(255) NOT NULL, + is_active BOOLEAN DEFAULT true +); + +-- Empfangene SMS +CREATE TABLE sms_messages ( + id UUID PRIMARY KEY, + router_id UUID NOT NULL REFERENCES routers(id), + phone_number VARCHAR(20) NOT NULL, + sender VARCHAR(50), + message_body TEXT NOT NULL, + received_at TIMESTAMP DEFAULT NOW(), + processed BOOLEAN DEFAULT false, + matched_request_id UUID REFERENCES verification_requests(id) +); +``` + +### Entity-Relationship + +``` +clients (1) ──────< routers (1) ──────< phone_numbers (n) + │ │ + │ └──────< sms_messages (n) + │ + └──────< verification_requests (n) +``` + +### Server-Projektstruktur + +``` +accountforger-server/ +├── app/ +│ ├── main.py # FastAPI Entry +│ ├── config.py # Konfiguration +│ ├── api/v1/ +│ │ ├── verification.py # Verifikations-Endpunkte +│ │ ├── webhooks.py # RUTX11 Webhook +│ │ └── auth.py # Authentifizierung +│ ├── services/ +│ │ ├── email_service.py # IMAP-Polling +│ │ ├── sms_service.py # SMS-Verarbeitung +│ │ └── phone_rotation.py # eSIM-Rotation +│ └── infrastructure/ +│ ├── database.py +│ └── imap_client.py +├── docker-compose.yml +└── requirements.txt +``` + +--- + +## 2. RUTX11 Integration + +### SMS-Forwarding Konfiguration + +1. **Im RUTX11 WebUI:** + - Services → Mobile Utilities → SMS Gateway → SMS Forwarding + - Neue Regel: HTTP POST an `https://server.domain/api/v1/webhook/rutx11/sms` + +2. **Webhook-Payload vom Router:** +```json +{ + "sender": "+49123456789", + "message": "123456 is your verification code", + "timestamp": "2026-01-17T10:30:00Z", + "sim_slot": 1 +} +``` + +### Dezentrale Router-Architektur + +``` + ┌─────────────────────────────────┐ + │ Verification Server │ + │ │ + │ ┌─────────────────────────┐ │ + │ │ Router Registry │ │ + │ │ │ │ + │ │ - Client ↔ Router Map │ │ + │ │ - Telefonnummern/Client│ │ + │ │ - Health Monitoring │ │ + │ └───────────┬─────────────┘ │ + └──────────────┼─────────────────┘ + │ + ┌────────────────────┼────────────────────┐ + │ │ │ + ▼ ▼ ▼ + ┌────────────────┐ ┌────────────────┐ ┌────────────────┐ + │ Kunde A Router │ │ Kunde B Router │ │ Kunde C Router │ + │ Token: A123 │ │ Token: B456 │ │ Token: C789 │ + │ Tel: +49... │ │ Tel: +49... │ │ Tel: +49... │ + └────────────────┘ └────────────────┘ └────────────────┘ + │ │ │ + └────────────────────┴────────────────────┘ + │ + Webhooks an zentralen Server +``` + +### Router-Vorkonfiguration (vor Versand) + +```bash +# RUTX11 SMS Forwarding Konfiguration +# Services → Mobile Utilities → SMS Gateway → SMS Forwarding + +Webhook URL: https://verify.domain.com/api/v1/webhook/sms +HTTP Method: POST +Headers: X-Router-Token: {LIZENZ_TOKEN} +Payload Format: JSON +``` + +### eSIM-Rotationsstrategie + +| Strategie | Beschreibung | +|-----------|--------------| +| Cooldown | 30 Min. Pause nach Nutzung pro Nummer | +| Round Robin | Abwechselnd durch alle verfügbaren Nummern | +| Platform-Aware | Tracken welche Nummer für welche Platform verwendet wurde | +| Load Balancing | Gleichmäßige Verteilung über alle Router/SIM-Banks | +| Health Check | Automatische Deaktivierung bei Problemen | + +--- + +## 3. Client-Änderungen + +### Was bleibt im Client +- PyQt5 UI +- Playwright Browser-Automation +- Fingerprint-Management +- Lokale SQLite-Datenbank + +### Was entfernt wird +- `utils/email_handler.py` → ersetzt durch API-Calls +- `config/email_config.json` → keine Credentials mehr im Client +- Hardcoded `"123456"` SMS-Placeholder + +### Neuer VerificationClient + +**Datei:** `utils/verification_client.py` + +```python +class VerificationClient: + def __init__(self, server_url: str, api_key: str): + self.server_url = server_url + self.api_key = api_key + + def request_email(self, platform: str) -> dict: + """Fordert Email-Adresse vom Server an.""" + response = requests.post( + f"{self.server_url}/api/v1/verification/email/request", + json={"platform": platform}, + headers={"X-API-Key": self.api_key} + ) + return response.json() # {"request_id": "...", "email_address": "..."} + + def request_sms(self, platform: str) -> dict: + """Fordert Telefonnummer vom Server an.""" + response = requests.post( + f"{self.server_url}/api/v1/verification/sms/request", + json={"platform": platform}, + headers={"X-API-Key": self.api_key} + ) + return response.json() # {"request_id": "...", "phone_number": "..."} + + def poll_for_code(self, request_id: str, type: str, timeout: int = 120) -> str: + """Pollt Server bis Code empfangen wurde.""" + # ... +``` + +### Zu ändernde Dateien + +| Datei | Änderung | +|-------|----------| +| `social_networks/base_automation.py` | `verification_client` Parameter hinzufügen | +| `social_networks/*/verification.py` | API-Calls statt direktem IMAP | +| `controllers/platform_controllers/base_worker_thread.py` | VerificationClient initialisieren | +| `config/server_config.json` (NEU) | Server-URL und API-Key | + +--- + +## 4. Multi-Tenant Architektur + +### Client-Verwaltung + +```sql +-- Clients (Tenants) +CREATE TABLE clients ( + id UUID PRIMARY KEY, + name VARCHAR(255) NOT NULL, + license_key VARCHAR(50) UNIQUE NOT NULL, + api_key_hash VARCHAR(255) NOT NULL, + is_active BOOLEAN DEFAULT true, + tier VARCHAR(20) DEFAULT 'standard', -- standard, premium, enterprise + created_at TIMESTAMP DEFAULT NOW() +); + +-- Rate Limits pro Tier +CREATE TABLE tier_limits ( + tier VARCHAR(20) PRIMARY KEY, + email_requests_per_hour INTEGER, + sms_requests_per_hour INTEGER, + max_concurrent_verifications INTEGER +); + +INSERT INTO tier_limits VALUES + ('standard', 50, 20, 5), + ('premium', 200, 100, 20), + ('enterprise', 1000, 500, 100); +``` + +### Tenant-Isolation + +| Aspekt | Implementierung | +|--------|-----------------| +| API-Keys | Eindeutig pro Client | +| Rate Limits | Per-Tenant basierend auf Tier | +| Logging | Client-ID in allen Logs | +| Ressourcen | Shared Pool mit Fair-Use | +| Abrechnung | Usage-Tracking pro Client | + +--- + +## 5. Sicherheit + +### Authentifizierung +- API-Key pro Client-Installation +- Gehashed in Server-DB (bcrypt) +- Gebunden an Lizenzschlüssel + +### Credentials-Management +| Credential | Speicherort | Schutz | +|------------|-------------|--------| +| IMAP-Passwörter | Server DB | AES-256 verschlüsselt | +| API-Keys | Server DB | bcrypt Hash | +| RUTX11 Webhook | Server .env | Umgebungsvariable | +| Client API-Key | Client Config | Verschlüsselt lokal | + +### Netzwerk +- HTTPS-Only (TLS 1.3) +- IP-Whitelist für RUTX11 Webhook +- Rate Limiting (10 Email-Requests/Min, 5 SMS-Requests/Min) + +--- + +## 6. Deployment + +### Docker Compose (empfohlen) + +```yaml +version: '3.8' +services: + api: + build: . + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://user:pass@db:5432/verify + - REDIS_URL=redis://redis:6379 + depends_on: + - db + - redis + + db: + image: postgres:15 + volumes: + - pgdata:/var/lib/postgresql/data + + redis: + image: redis:7-alpine + + celery: + build: . + command: celery -A app.worker worker -l info +``` + +--- + +## 7. Verifikation / Testing + +1. **Server starten:** `docker-compose up` +2. **Email-Test:** + - POST `/api/v1/verification/email/request` mit `{"platform": "instagram"}` + - GET `/api/v1/verification/email/code/{id}` → Code erhalten +3. **SMS-Test:** + - Webhook simulieren: POST an `/api/v1/webhook/rutx11/sms` + - Prüfen ob SMS in DB landet und Request gematcht wird +4. **Client-Integration:** + - `VerificationClient` instanziieren + - Account-Erstellung mit Server-Verifikation testen + +--- + +## 8. Onboarding-Prozess (Neuer Kunde) + +### Admin-Workflow + +``` +1. Neue Lizenz generieren + └── license_key: "AF-2026-XXXXX" + +2. Client in DB anlegen + └── INSERT INTO clients (license_key, name, api_key_hash, tier) + +3. Router vorkonfigurieren + ├── RUTX11 Webhook URL setzen + ├── X-Router-Token Header = license_key + └── eSIMs einlegen + +4. Telefonnummern registrieren + ├── INSERT INTO routers (client_id, router_token) + └── INSERT INTO phone_numbers (router_id, phone_number, esim_slot) + +5. Paket versenden + ├── Router (Plug & Play ready) + └── Software-Download Link + Lizenzschlüssel +``` + +### Kunde-Workflow + +``` +1. Router auspacken und anschließen + └── Strom + Ethernet/WLAN + +2. Software installieren + └── AccountForger.exe + +3. Lizenz aktivieren + └── Lizenzschlüssel eingeben → Server validiert + +4. Fertig! + └── SMS-Verifikation funktioniert automatisch +``` + +--- + +## 9. Implementierungsreihenfolge + +1. **Server Basis** - FastAPI Setup, DB-Schema, Email-Service +2. **Router Registry** - Client/Router/Telefonnummer Verwaltung +3. **RUTX11 Webhook** - SMS empfangen, Router-Token validieren, Request matchen +4. **Client Integration** - VerificationClient implementieren +5. **Plattform-Migration** - Alle verification.py Dateien umstellen +6. **Admin-Panel** - Kunden/Router/Nummern verwalten (optional) +7. **Security & Monitoring** - Rate Limiting, Logging, Health Checks diff --git a/install_requirements.py b/install_requirements.py deleted file mode 100644 index 34d3d03..0000000 --- a/install_requirements.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 -""" -Install script for AccountForger dependencies. -Handles PyQt5 installation across different platforms. -""" - -import sys -import subprocess -import platform - -def install_package(package): - """Install a package using pip""" - try: - subprocess.check_call([sys.executable, "-m", "pip", "install", package]) - return True - except subprocess.CalledProcessError: - return False - -def install_pyqt5(): - """Install PyQt5 with platform-specific handling""" - print("Installing PyQt5...") - - # Try different PyQt5 variants - packages_to_try = [ - "PyQt5", - "PyQt5-Qt5", - "PyQt5-sip" - ] - - for package in packages_to_try: - print(f"Trying to install {package}...") - if install_package(package): - print(f"✅ Successfully installed {package}") - return True - else: - print(f"❌ Failed to install {package}") - - return False - -def check_pyqt5(): - """Check if PyQt5 is available""" - try: - import PyQt5 - print("✅ PyQt5 is already installed") - return True - except ImportError: - print("❌ PyQt5 not found") - return False - -def main(): - """Main installation function""" - print("AccountForger - Dependency Installer") - print("=" * 40) - - # Check Python version - python_version = sys.version_info - if python_version < (3, 7): - print(f"❌ Python 3.7+ required, found {python_version.major}.{python_version.minor}") - return False - - print(f"✅ Python {python_version.major}.{python_version.minor} detected") - - # Check/install PyQt5 - if not check_pyqt5(): - print("\nInstalling PyQt5...") - if not install_pyqt5(): - print("\n⚠️ PyQt5 installation failed!") - print("Manual installation options:") - print("1. pip install PyQt5") - print("2. conda install pyqt (if using Anaconda)") - print("3. Use system package manager (Linux)") - print("\nAccountForger will still work with limited functionality") - return False - - # Install other requirements - other_packages = [ - "requests", - "selenium", - "playwright", - "beautifulsoup4", - "lxml" - ] - - print("\nInstalling other dependencies...") - failed_packages = [] - - for package in other_packages: - print(f"Installing {package}...") - if install_package(package): - print(f"✅ {package} installed") - else: - print(f"❌ {package} failed") - failed_packages.append(package) - - if failed_packages: - print(f"\n⚠️ Some packages failed to install: {failed_packages}") - print("Try installing them manually with:") - for package in failed_packages: - print(f" pip install {package}") - - print("\n🚀 Installation complete!") - print("You can now run: python main.py") - - return True - -if __name__ == "__main__": - success = main() - sys.exit(0 if success else 1) \ No newline at end of file diff --git a/resources/icons/aegissight-logo-dark.svg b/resources/icons/aegissight-logo-dark.svg new file mode 100644 index 0000000..c1237f2 --- /dev/null +++ b/resources/icons/aegissight-logo-dark.svg @@ -0,0 +1,20 @@ + + + + + + + + + AegisSight + + + + + + + + + + + diff --git a/resources/icons/aegissight-logo.svg b/resources/icons/aegissight-logo.svg new file mode 100644 index 0000000..d4769fd --- /dev/null +++ b/resources/icons/aegissight-logo.svg @@ -0,0 +1,20 @@ + + + + + + + + + AegisSight + + + + + + + + + + + diff --git a/resources/icons/intelsight-dark.svg b/resources/icons/intelsight-dark.svg deleted file mode 100644 index c61d83e..0000000 --- a/resources/icons/intelsight-dark.svg +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - IntelSight - - \ No newline at end of file diff --git a/resources/icons/intelsight-logo.svg b/resources/icons/intelsight-logo.svg deleted file mode 100644 index b729e91..0000000 --- a/resources/icons/intelsight-logo.svg +++ /dev/null @@ -1,53 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - IntelSight - - \ No newline at end of file diff --git a/resources/themes/dark.qss b/resources/themes/dark.qss index aa701f2..4b1d7dc 100644 --- a/resources/themes/dark.qss +++ b/resources/themes/dark.qss @@ -1,6 +1,6 @@ /* * AccountForger Dark Mode Theme - * Based on IntelSight Corporate Design System + * Based on AegisSight Corporate Design System * Color Palette from CORPORATE_DESIGN_DARK_MODE.md */ diff --git a/resources/themes/light.qss b/resources/themes/light.qss index 5b7cc5a..4b54699 100644 --- a/resources/themes/light.qss +++ b/resources/themes/light.qss @@ -1,6 +1,6 @@ /* * AccountForger Light Mode Theme - * Based on IntelSight Corporate Design System + * Based on AegisSight Corporate Design System * Adapted from Dark Mode guidelines for Light Mode usage */ diff --git a/social_networks/facebook/facebook_ui_helper.py b/social_networks/facebook/facebook_ui_helper.py index 506341c..a1137fa 100644 --- a/social_networks/facebook/facebook_ui_helper.py +++ b/social_networks/facebook/facebook_ui_helper.py @@ -327,15 +327,140 @@ class FacebookUIHelper: def check_element_exists(self, selector: str, timeout: int = 1000) -> bool: """ Prüft ob ein Element existiert. - + Args: selector: CSS-Selektor timeout: Maximale Wartezeit in ms - + Returns: bool: True wenn Element existiert """ if not self._ensure_browser(): return False - - return self.automation.browser.is_element_visible(selector, timeout=timeout) \ No newline at end of file + + return self.automation.browser.is_element_visible(selector, timeout=timeout) + + # ========================================================================== + # ANTI-DETECTION: Tab-Navigation Methoden + # ========================================================================== + + def navigate_to_next_field(self, use_tab: bool = None) -> bool: + """ + Navigiert zum nächsten Feld, entweder per Tab oder Maus-Klick. + + Diese Methode simuliert menschliches Verhalten durch zufällige + Verwendung von Tab-Navigation (wie viele echte Benutzer es tun). + + Args: + use_tab: Explizit Tab verwenden. Wenn None, 50% Chance für Tab. + + Returns: + bool: True bei Erfolg + """ + if not self._ensure_browser(): + return False + + # Entscheide zufällig ob Tab verwendet wird (50% Chance) + if use_tab is None: + use_tab = random.random() < 0.5 + + try: + if use_tab: + logger.debug("Verwende Tab-Navigation zum nächsten Feld") + self.automation.browser.page.keyboard.press("Tab") + + # Variable Wartezeit nach Tab + time.sleep(random.uniform(0.15, 0.35)) + + # Gelegentlich Tab + Shift-Tab (versehentlich zu weit gegangen) + if random.random() < 0.08: + logger.debug("Simuliere Tab-Korrektur (zu weit)") + self.automation.browser.page.keyboard.press("Tab") + time.sleep(random.uniform(0.2, 0.4)) + self.automation.browser.page.keyboard.press("Shift+Tab") + time.sleep(random.uniform(0.15, 0.3)) + + return True + else: + logger.debug("Tab-Navigation nicht verwendet (Maus wird später genutzt)") + return False + + except Exception as e: + logger.error(f"Fehler bei Tab-Navigation: {e}") + return False + + def fill_field_with_tab_navigation(self, selector: str, value: str, + use_tab_navigation: bool = None, + error_rate: float = 0.15) -> bool: + """ + Füllt ein Feld aus mit optionaler Tab-Navigation. + + Diese Methode kombiniert Tab-Navigation mit menschenähnlichem Tippen + für ein realistischeres Verhalten. + + Args: + selector: CSS-Selektor des Feldes + value: Einzugebender Wert + use_tab_navigation: Ob Tab verwendet werden soll (None = 50% Chance) + error_rate: Wahrscheinlichkeit für Tippfehler (10-20%) + + Returns: + bool: True bei Erfolg + """ + if not self._ensure_browser(): + return False + + try: + # Entscheide Navigationsmethode + if use_tab_navigation is None: + use_tab_navigation = random.random() < 0.5 + + if use_tab_navigation: + # Tab-Navigation verwenden + logger.debug(f"Fülle Feld {selector} mit Tab-Navigation") + self.automation.browser.page.keyboard.press("Tab") + time.sleep(random.uniform(0.15, 0.35)) + else: + # Maus-Klick auf Feld + logger.debug(f"Fülle Feld {selector} mit Maus-Klick") + if not self.automation.browser.click_element(selector): + logger.warning(f"Konnte Feld nicht anklicken: {selector}") + return False + time.sleep(random.uniform(0.2, 0.5)) + + # Anti-Detection Delay vor Eingabe + if hasattr(self.automation, 'human_behavior'): + self.automation.human_behavior.anti_detection_delay("field_focus") + + # Text mit Tippfehlern eingeben + for i, char in enumerate(value): + # Tippfehler simulieren + if random.random() < error_rate: + # Falsches Zeichen + wrong_char = random.choice('abcdefghijklmnopqrstuvwxyz0123456789') + self.automation.browser.page.keyboard.type(wrong_char) + time.sleep(random.uniform(0.1, 0.3)) + + # Korrektur mit Backspace + self.automation.browser.page.keyboard.press("Backspace") + time.sleep(random.uniform(0.08, 0.2)) + + # Korrektes Zeichen + self.automation.browser.page.keyboard.type(char) + + # Variable Verzögerung + delay_ms = random.randint(50, 150) + if char in ' .,!?': + delay_ms *= random.uniform(1.3, 2.0) + time.sleep(delay_ms / 1000) + + # Gelegentlich längere Pause (Denken) + if random.random() < 0.05: + time.sleep(random.uniform(0.3, 0.8)) + + logger.info(f"Feld {selector} erfolgreich ausgefüllt") + return True + + except Exception as e: + logger.error(f"Fehler beim Ausfüllen mit Tab-Navigation: {e}") + return False \ No newline at end of file diff --git a/social_networks/instagram/instagram_registration.py b/social_networks/instagram/instagram_registration.py index 7cb611b..26ebd4a 100644 --- a/social_networks/instagram/instagram_registration.py +++ b/social_networks/instagram/instagram_registration.py @@ -258,87 +258,199 @@ class InstagramRegistration: def _navigate_to_signup_page(self) -> bool: """ Navigiert zur Instagram-Registrierungsseite. - + Returns: bool: True bei Erfolg, False bei Fehler """ try: + logger.info(f"Navigiere zur Registrierungsseite: {InstagramSelectors.SIGNUP_URL}") + # Zur Registrierungsseite navigieren self.automation.browser.navigate_to(InstagramSelectors.SIGNUP_URL) - + # Warten, bis die Seite geladen ist + logger.debug("Warte auf Seitenladung...") self.automation.human_behavior.wait_for_page_load() - + + # Aktuelle URL loggen (für Debugging bei Weiterleitungen) + try: + current_url = self.automation.browser.page.url + logger.info(f"Aktuelle URL nach Navigation: {current_url}") + + # Prüfen ob wir umgeleitet wurden + if "login" in current_url.lower() and "signup" not in current_url.lower(): + logger.warning("Wurde zur Login-Seite umgeleitet - versuche erneut zur Registrierung zu navigieren") + self.automation.browser.navigate_to(InstagramSelectors.SIGNUP_URL) + self.automation.human_behavior.wait_for_page_load() + current_url = self.automation.browser.page.url + logger.info(f"URL nach erneutem Navigieren: {current_url}") + except Exception as url_err: + logger.debug(f"Konnte aktuelle URL nicht abrufen: {url_err}") + # SOFORT Cookie-Banner behandeln BEVOR weitere Aktionen (TIMING-FIX) logger.info("Behandle Cookie-Banner SOFORT nach Navigation für korrekte Session-Cookies") cookie_handled = self._handle_cookie_banner() if not cookie_handled: logger.warning("Cookie-Banner konnte nicht behandelt werden - Session könnte beeinträchtigt sein") - + # Kurz warten damit Cookies gesetzt werden können + logger.debug("Warte nach Cookie-Behandlung...") self.automation.human_behavior.random_delay(1.0, 2.0) - + # Screenshot erstellen self.automation._take_screenshot("signup_page") - + # Prüfen, ob Registrierungsformular sichtbar ist + logger.debug(f"Suche nach Registrierungsformular mit Selektor: {InstagramSelectors.EMAIL_PHONE_FIELD}") + if not self.automation.browser.is_element_visible(InstagramSelectors.EMAIL_PHONE_FIELD, timeout=5000): - logger.warning("Registrierungsformular nicht sichtbar") + logger.warning(f"Hauptselektor {InstagramSelectors.EMAIL_PHONE_FIELD} nicht gefunden - versuche Alternativen") + + # Alternative Selektoren versuchen + alt_selectors = [ + InstagramSelectors.ALT_EMAIL_FIELD, + "input[type='email']", + "input[type='text'][aria-label*='mail']", + "input[type='text'][aria-label*='Mail']", + "input[type='text'][placeholder*='mail']", + "input[name='email']", + "//input[contains(@aria-label, 'E-Mail') or contains(@aria-label, 'Email')]" + ] + + for alt_selector in alt_selectors: + logger.debug(f"Versuche alternativen Selektor: {alt_selector}") + if self.automation.browser.is_element_visible(alt_selector, timeout=1000): + logger.info(f"Formular mit alternativem Selektor gefunden: {alt_selector}") + return True + + # Debug: Logge sichtbare Elemente auf der Seite + try: + page_title = self.automation.browser.page.title() + logger.debug(f"Seitentitel: {page_title}") + + # Prüfe auf bekannte Blockade-Elemente + if self.automation.browser.is_element_visible("div[role='dialog']", timeout=1000): + logger.warning("Ein Dialog ist noch sichtbar - könnte Cookie-Dialog oder anderer Modal sein") + + # Screenshot vom aktuellen Zustand + self.automation._take_screenshot("signup_page_blocked") + + # Prüfe auf Fehlermeldungen + if self.automation.browser.is_element_visible("p[class*='error'], div[class*='error']", timeout=1000): + logger.warning("Fehlermeldung auf der Seite erkannt") + + # Prüfe ob wir auf einer anderen Seite sind + current_url = self.automation.browser.page.url + if "challenge" in current_url.lower(): + logger.error("CHECKPOINT/CHALLENGE erkannt - Instagram verlangt Verifizierung!") + elif "suspended" in current_url.lower(): + logger.error("Account wurde suspendiert oder IP blockiert!") + + except Exception as debug_err: + logger.debug(f"Debug-Informationen konnten nicht abgerufen werden: {debug_err}") + + logger.warning("Registrierungsformular nicht sichtbar - alle Selektoren fehlgeschlagen") return False - + logger.info("Erfolgreich zur Registrierungsseite navigiert und Cookies akzeptiert") return True - + except Exception as e: - logger.error(f"Fehler beim Navigieren zur Registrierungsseite: {e}") + logger.error(f"Fehler beim Navigieren zur Registrierungsseite: {e}", exc_info=True) return False def _handle_cookie_banner(self) -> bool: """ Behandelt den Cookie-Banner, falls angezeigt. Akzeptiert IMMER Cookies für vollständiges Session-Management bei der Registrierung. - + + ANTI-DETECTION: Wartet 3-8 Sekunden bevor der Cookie-Dialog geklickt wird, + um menschliches Leseverhalten zu simulieren. + Returns: bool: True wenn Banner behandelt wurde oder nicht existiert, False bei Fehler """ # Cookie-Dialog-Erkennung - if self.automation.browser.is_element_visible(InstagramSelectors.COOKIE_DIALOG, timeout=2000): - logger.info("Cookie-Banner erkannt - akzeptiere alle Cookies für Session-Management") - + logger.debug(f"Prüfe auf Cookie-Dialog mit Selektor: {InstagramSelectors.COOKIE_DIALOG}") + + if self.automation.browser.is_element_visible(InstagramSelectors.COOKIE_DIALOG, timeout=3000): + logger.info("Cookie-Banner erkannt - simuliere Lesen bevor geklickt wird") + + # ANTI-DETECTION: Lese-Pause bevor Cookie-Dialog geklickt wird (3-8 Sekunden) + # Echte Menschen lesen den Cookie-Text bevor sie klicken + logger.debug("Starte Anti-Detection Lese-Pause für Cookie-Banner...") + self.automation.human_behavior.anti_detection_delay("cookie_reading") + logger.debug("Anti-Detection Lese-Pause abgeschlossen") + + # Gelegentlich etwas scrollen um "mehr zu lesen" (30% Chance) + if random.random() < 0.3: + try: + logger.debug("Simuliere Scrollen im Cookie-Dialog...") + self.automation.browser.page.evaluate("window.scrollBy(0, 50)") + time.sleep(random.uniform(0.8, 1.5)) + self.automation.browser.page.evaluate("window.scrollBy(0, -50)") + time.sleep(random.uniform(0.3, 0.6)) + except Exception as scroll_err: + logger.debug(f"Cookie-Dialog Scroll übersprungen: {scroll_err}") + + logger.info("Klicke Cookie-Banner - akzeptiere alle Cookies für Session-Management") + # Akzeptieren-Button suchen und klicken (PRIMÄR für Registrierung) accept_success = self.automation.ui_helper.click_button_fuzzy( InstagramSelectors.get_button_texts("accept_cookies"), InstagramSelectors.COOKIE_ACCEPT_BUTTON ) - + if accept_success: logger.info("Cookie-Banner erfolgreich akzeptiert - Session-Cookies werden gespeichert") - self.automation.human_behavior.random_delay(0.5, 1.5) + + # WICHTIG: Nach Cookie-Klick warten bis Seite stabil ist + logger.debug("Warte auf Seitenstabilität nach Cookie-Akzeptierung...") + time.sleep(2.0) # Feste Wartezeit für Seiten-Reload + + # Warte auf Network-Idle + try: + self.automation.browser.page.wait_for_load_state("networkidle", timeout=10000) + logger.debug("Seite ist nach Cookie-Akzeptierung stabil (networkidle)") + except Exception as wait_err: + logger.warning(f"Timeout beim Warten auf networkidle: {wait_err}") + + self.automation.human_behavior.random_delay(1.0, 2.0) return True else: logger.warning("Konnte Cookie-Banner nicht akzeptieren, versuche alternativen Akzeptieren-Button") - + # Alternative Akzeptieren-Selektoren versuchen alternative_accept_selectors = [ "//button[contains(text(), 'Alle akzeptieren')]", - "//button[contains(text(), 'Accept All')]", + "//button[contains(text(), 'Alle Cookies erlauben')]", + "//button[contains(text(), 'Accept All')]", "//button[contains(text(), 'Zulassen')]", "//button[contains(text(), 'Allow All')]", "//button[contains(@aria-label, 'Accept')]", "[data-testid='accept-all-button']" ] - + for selector in alternative_accept_selectors: + logger.debug(f"Versuche alternativen Cookie-Selektor: {selector}") if self.automation.browser.is_element_visible(selector, timeout=1000): if self.automation.browser.click_element(selector): - logger.info("Cookie-Banner mit alternativem Selector akzeptiert") - self.automation.human_behavior.random_delay(0.5, 1.5) + logger.info(f"Cookie-Banner mit alternativem Selector akzeptiert: {selector}") + + # Nach Cookie-Klick warten + time.sleep(2.0) + try: + self.automation.browser.page.wait_for_load_state("networkidle", timeout=10000) + except: + pass + + self.automation.human_behavior.random_delay(1.0, 2.0) return True - + logger.error("Konnte Cookie-Banner nicht akzeptieren - Session-Management könnte beeinträchtigt sein") return False else: - logger.debug("Kein Cookie-Banner erkannt") + logger.debug("Kein Cookie-Banner erkannt - fahre fort") return True def _select_registration_method(self, method: str) -> bool: diff --git a/social_networks/instagram/instagram_ui_helper.py b/social_networks/instagram/instagram_ui_helper.py index c1e90fe..0ee9e01 100644 --- a/social_networks/instagram/instagram_ui_helper.py +++ b/social_networks/instagram/instagram_ui_helper.py @@ -5,6 +5,7 @@ Instagram-UI-Helper - Hilfsmethoden für die Interaktion mit der Instagram-UI """ import logging +import random import re import time from typing import Dict, List, Any, Optional, Tuple, Union, Callable @@ -786,38 +787,165 @@ class InstagramUIHelper: def wait_for_page_load(self, timeout: int = 30000, check_interval: int = 500) -> bool: """ Wartet, bis die Seite vollständig geladen ist. - + Args: timeout: Zeitlimit in Millisekunden check_interval: Intervall zwischen den Prüfungen in Millisekunden - + Returns: bool: True wenn die Seite geladen wurde, False bei Zeitüberschreitung """ if not self._ensure_browser(): return False - + try: # Warten auf Netzwerk-Idle self.automation.browser.page.wait_for_load_state("networkidle", timeout=timeout) - + # Zusätzlich auf das Verschwinden der Ladeindikatoren warten start_time = time.time() end_time = start_time + (timeout / 1000) - + while time.time() < end_time: if not self.is_page_loading(): # Noch eine kurze Pause für Animationen time.sleep(0.5) logger.info("Seite vollständig geladen") return True - + # Kurze Pause vor der nächsten Prüfung time.sleep(check_interval / 1000) - + logger.warning("Zeitüberschreitung beim Warten auf das Laden der Seite") return False - + except Exception as e: logger.error(f"Fehler beim Warten auf das Laden der Seite: {e}") + return False + + # ========================================================================== + # ANTI-DETECTION: Tab-Navigation Methoden + # ========================================================================== + + def navigate_to_next_field(self, use_tab: bool = None) -> bool: + """ + Navigiert zum nächsten Feld, entweder per Tab oder Maus-Klick. + + Diese Methode simuliert menschliches Verhalten durch zufällige + Verwendung von Tab-Navigation (wie viele echte Benutzer es tun). + + Args: + use_tab: Explizit Tab verwenden. Wenn None, 50% Chance für Tab. + + Returns: + bool: True bei Erfolg + """ + if not self._ensure_browser(): + return False + + # Entscheide zufällig ob Tab verwendet wird (50% Chance) + if use_tab is None: + use_tab = random.random() < 0.5 + + try: + if use_tab: + logger.debug("Verwende Tab-Navigation zum nächsten Feld") + self.automation.browser.page.keyboard.press("Tab") + + # Variable Wartezeit nach Tab + time.sleep(random.uniform(0.15, 0.35)) + + # Gelegentlich Tab + Shift-Tab (versehentlich zu weit gegangen) + if random.random() < 0.08: + logger.debug("Simuliere Tab-Korrektur (zu weit)") + self.automation.browser.page.keyboard.press("Tab") + time.sleep(random.uniform(0.2, 0.4)) + self.automation.browser.page.keyboard.press("Shift+Tab") + time.sleep(random.uniform(0.15, 0.3)) + + return True + else: + logger.debug("Tab-Navigation nicht verwendet (Maus wird später genutzt)") + return False + + except Exception as e: + logger.error(f"Fehler bei Tab-Navigation: {e}") + return False + + def fill_field_with_tab_navigation(self, selector: str, value: str, + use_tab_navigation: bool = None, + error_rate: float = 0.15) -> bool: + """ + Füllt ein Feld aus mit optionaler Tab-Navigation. + + Diese Methode kombiniert Tab-Navigation mit menschenähnlichem Tippen + für ein realistischeres Verhalten. + + Args: + selector: CSS-Selektor des Feldes (nur für Maus-Klick verwendet) + value: Einzugebender Wert + use_tab_navigation: Ob Tab verwendet werden soll (None = 50% Chance) + error_rate: Wahrscheinlichkeit für Tippfehler (10-20%) + + Returns: + bool: True bei Erfolg + """ + if not self._ensure_browser(): + return False + + try: + import random + + # Entscheide Navigationsmethode + if use_tab_navigation is None: + use_tab_navigation = random.random() < 0.5 + + if use_tab_navigation: + # Tab-Navigation verwenden + logger.debug(f"Fülle Feld mit Tab-Navigation") + self.automation.browser.page.keyboard.press("Tab") + time.sleep(random.uniform(0.15, 0.35)) + else: + # Maus-Klick auf Feld + logger.debug(f"Fülle Feld {selector} mit Maus-Klick") + if not self.automation.browser.click_element(selector): + logger.warning(f"Konnte Feld nicht anklicken: {selector}") + return False + time.sleep(random.uniform(0.2, 0.5)) + + # Anti-Detection Delay vor Eingabe + if hasattr(self.automation, 'human_behavior') and hasattr(self.automation.human_behavior, 'anti_detection_delay'): + self.automation.human_behavior.anti_detection_delay("field_focus") + + # Text mit Tippfehlern eingeben + for i, char in enumerate(value): + # Tippfehler simulieren + if random.random() < error_rate: + # Falsches Zeichen + wrong_char = random.choice('abcdefghijklmnopqrstuvwxyz0123456789') + self.automation.browser.page.keyboard.type(wrong_char) + time.sleep(random.uniform(0.1, 0.3)) + + # Korrektur mit Backspace + self.automation.browser.page.keyboard.press("Backspace") + time.sleep(random.uniform(0.08, 0.2)) + + # Korrektes Zeichen + self.automation.browser.page.keyboard.type(char) + + # Variable Verzögerung + delay_ms = random.randint(50, 150) + if char in ' .,!?': + delay_ms *= random.uniform(1.3, 2.0) + time.sleep(delay_ms / 1000) + + # Gelegentlich längere Pause (Denken) + if random.random() < 0.05: + time.sleep(random.uniform(0.3, 0.8)) + + logger.info(f"Feld erfolgreich ausgefüllt") + return True + + except Exception as e: + logger.error(f"Fehler beim Ausfüllen mit Tab-Navigation: {e}") return False \ No newline at end of file diff --git a/social_networks/tiktok/tiktok_registration.py b/social_networks/tiktok/tiktok_registration.py index 6b3250f..35b7720 100644 --- a/social_networks/tiktok/tiktok_registration.py +++ b/social_networks/tiktok/tiktok_registration.py @@ -451,14 +451,97 @@ class TikTokRegistration: logger.debug("Kein Cookie-Banner erkannt") return True + def _close_video_overlay(self) -> bool: + """ + Schließt Video-Overlays, die Klicks blockieren können. + TikTok zeigt manchmal Promo-Videos, die den Login-Button überlagern. + + Returns: + bool: True wenn Overlay geschlossen wurde oder nicht existiert + """ + try: + # Prüfe ob ein Video-Modal sichtbar ist + video_modal_selectors = [ + "div[data-focus-lock-disabled]", + "div[class*='VideoPlayer']", + "div[class*='video-card']", + "div[class*='DivVideoContainer']" + ] + + for selector in video_modal_selectors: + if self.automation.browser.is_element_visible(selector, timeout=1000): + logger.info(f"Video-Overlay erkannt: {selector}") + + # Versuche verschiedene Methoden zum Schließen + close_selectors = [ + "button[aria-label='Close']", + "button[aria-label='Schließen']", + "div[data-focus-lock-disabled] button:has(svg)", + "[data-e2e='browse-close']", + "button.TUXButton:has-text('×')", + "button:has-text('×')" + ] + + for close_selector in close_selectors: + try: + if self.automation.browser.is_element_visible(close_selector, timeout=500): + self.automation.browser.click_element(close_selector) + logger.info(f"Video-Overlay geschlossen mit: {close_selector}") + self.automation.human_behavior.random_delay(0.5, 1.0) + return True + except: + continue + + # Fallback: ESC-Taste drücken + try: + self.automation.browser.page.keyboard.press("Escape") + logger.info("Video-Overlay mit ESC-Taste geschlossen") + self.automation.human_behavior.random_delay(0.5, 1.0) + return True + except: + pass + + # Fallback: JavaScript zum Entfernen des Video-Elements + try: + js_code = """ + // Video-Elemente pausieren und verstecken + document.querySelectorAll('video').forEach(v => { + v.pause(); + v.style.display = 'none'; + }); + // Modal-Container mit focus-lock entfernen + const modal = document.querySelector('div[data-focus-lock-disabled]'); + if (modal && modal.querySelector('video')) { + modal.style.display = 'none'; + return true; + } + return false; + """ + result = self.automation.browser.page.evaluate(js_code) + if result: + logger.info("Video-Overlay mit JavaScript versteckt") + self.automation.human_behavior.random_delay(0.3, 0.5) + return True + except Exception as e: + logger.debug(f"JavaScript-Entfernung fehlgeschlagen: {e}") + + return True # Kein Overlay gefunden = OK + + except Exception as e: + logger.warning(f"Fehler beim Schließen des Video-Overlays: {e}") + return True # Trotzdem fortfahren + def _click_login_button(self) -> bool: """ Klickt auf den Anmelden-Button auf der Startseite. - + Returns: bool: True bei Erfolg, False bei Fehler """ try: + # WICHTIG: Erst Video-Overlays schließen, die Klicks blockieren können + self._close_video_overlay() + # Liste aller Login-Button-Selektoren, die wir versuchen wollen login_selectors = [ self.selectors.LOGIN_BUTTON, # button#header-login-button @@ -469,16 +552,34 @@ class TikTokRegistration: "button[aria-label*='Anmelden']", # Aria-Label "button:has(.TUXButton-label:text('Anmelden'))" # Verschachtelte Struktur ] - - # Versuche jeden Selektor + + # Versuche jeden Selektor mit force=True für blockierte Elemente for i, selector in enumerate(login_selectors): logger.debug(f"Versuche Login-Selektor {i+1}: {selector}") if self.automation.browser.is_element_visible(selector, timeout=3000): - result = self.automation.browser.click_element(selector) - if result: - logger.info(f"Anmelden-Button erfolgreich geklickt mit Selektor {i+1}") - self.automation.human_behavior.random_delay(0.5, 1.5) - return True + # Erst normaler Klick + try: + result = self.automation.browser.click_element(selector) + if result: + logger.info(f"Anmelden-Button erfolgreich geklickt mit Selektor {i+1}") + self.automation.human_behavior.random_delay(0.5, 1.5) + return True + except Exception as click_error: + # Bei Blockierung: Force-Click mit JavaScript + logger.debug(f"Normaler Klick blockiert, versuche JavaScript-Klick: {click_error}") + try: + escaped_selector = selector.replace("'", "\\'") + js_click = f""" + const el = document.querySelector('{escaped_selector}'); + if (el) {{ el.click(); return true; }} + return false; + """ + if self.automation.browser.page.evaluate(js_click): + logger.info(f"Anmelden-Button mit JavaScript geklickt (Selektor {i+1})") + self.automation.human_behavior.random_delay(0.5, 1.5) + return True + except: + continue # Versuche es mit Fuzzy-Button-Matching result = self.automation.ui_helper.click_button_fuzzy( @@ -501,35 +602,50 @@ class TikTokRegistration: def _click_register_link(self) -> bool: """ Klickt auf den Registrieren-Link im Login-Dialog. - + Returns: bool: True bei Erfolg, False bei Fehler """ try: # Warten, bis der Login-Dialog angezeigt wird self.automation.human_behavior.random_delay(2.0, 3.0) - + + # Video-Overlay schließen falls vorhanden (blockiert oft Klicks) + self._close_video_overlay() + # Screenshot für Debugging self.automation._take_screenshot("after_login_button_click") - - # Verschiedene Registrieren-Selektoren versuchen + + # Verschiedene Registrieren-Selektoren versuchen (prioritätsbezogen sortiert) register_selectors = [ - "a:text('Registrieren')", # Direkter Text-Match - "button:text('Registrieren')", # Button-Text - "div:text('Registrieren')", # Div-Text - "span:text('Registrieren')", # Span-Text + # Primäre Selektoren (data-e2e Attribute sind am stabilsten) + "span[data-e2e='bottom-sign-up']", # Offizieller TikTok-Selektor + "[data-e2e='bottom-sign-up']", # Allgemeiner + "[data-e2e*='sign-up']", # Partial match "[data-e2e*='signup']", # Data-Attribute "[data-e2e*='register']", # Data-Attribute + # Dialog-bezogene Selektoren + "div[role='dialog'] a:has-text('Registrieren')", # Link im Dialog + "div[role='dialog'] span:has-text('Registrieren')", # Span im Dialog + "div[role='dialog'] div:has-text('Registrieren')", # Div im Dialog + # Text-basierte Selektoren + "a:text('Registrieren')", # Direkter Text-Match + "button:text('Registrieren')", # Button-Text + "span:text('Registrieren')", # Span-Text + "div:text('Registrieren')", # Div-Text + # Href-basierte Selektoren "a[href*='signup']", # Signup-Link - "//a[contains(text(), 'Registrieren')]", # XPath - "//button[contains(text(), 'Registrieren')]", # XPath Button - "//span[contains(text(), 'Registrieren')]", # XPath Span - "//div[contains(text(), 'Konto erstellen')]", # Alternative Text - "//a[contains(text(), 'Sign up')]", # Englisch - ".signup-link", # CSS-Klasse - ".register-link" # CSS-Klasse + "a[href*='/signup']", # Mit Slash + # XPath als Fallback + "//a[contains(text(), 'Registrieren')]", # XPath + "//span[contains(text(), 'Registrieren')]", # XPath Span + "//div[contains(text(), 'Konto erstellen')]", # Alternative Text + "//a[contains(text(), 'Sign up')]", # Englisch + # CSS-Klassen als letzter Fallback + ".signup-link", # CSS-Klasse + ".register-link" # CSS-Klasse ] - + # Versuche jeden Selektor for i, selector in enumerate(register_selectors): logger.debug(f"Versuche Registrieren-Selektor {i+1}: {selector}") @@ -543,8 +659,37 @@ class TikTokRegistration: except Exception as e: logger.debug(f"Selektor {i+1} fehlgeschlagen: {e}") continue - - # Fallback: Fuzzy-Text-Suche + + # JavaScript-Fallback: Element per JS klicken (umgeht Overlays) + logger.debug("Versuche JavaScript-Klick für Registrieren-Link") + try: + js_selectors = [ + "span[data-e2e='bottom-sign-up']", + "[data-e2e*='sign-up']", + "a[href*='signup']" + ] + for js_sel in js_selectors: + try: + clicked = self.automation.browser.page.evaluate(f''' + () => {{ + const el = document.querySelector("{js_sel}"); + if (el) {{ + el.click(); + return true; + }} + return false; + }} + ''') + if clicked: + logger.info(f"Registrieren-Link per JavaScript geklickt: {js_sel}") + self.automation.human_behavior.random_delay(0.5, 1.5) + return True + except Exception: + continue + except Exception as e: + logger.debug(f"JavaScript-Klick fehlgeschlagen: {e}") + + # Fallback: Fuzzy-Text-Suche mit Playwright Locator try: page_content = self.automation.browser.page.content() if "Registrieren" in page_content or "Sign up" in page_content: @@ -559,18 +704,26 @@ class TikTokRegistration: try: element = self.automation.browser.page.locator(text_sel).first if element.is_visible(): - element.click() - logger.info(f"Auf Text geklickt: {text_sel}") - self.automation.human_behavior.random_delay(0.5, 1.5) - return True + # Versuche normalen Klick + try: + element.click(timeout=3000) + logger.info(f"Auf Text geklickt: {text_sel}") + self.automation.human_behavior.random_delay(0.5, 1.5) + return True + except Exception: + # Falls blockiert, force-click + element.click(force=True) + logger.info(f"Auf Text force-geklickt: {text_sel}") + self.automation.human_behavior.random_delay(0.5, 1.5) + return True except Exception: continue except Exception as e: logger.debug(f"Fallback-Text-Suche fehlgeschlagen: {e}") - + logger.error("Konnte keinen Registrieren-Link finden") return False - + except Exception as e: logger.error(f"Fehler beim Klicken auf den Registrieren-Link: {e}") # Debug-Screenshot bei Fehler @@ -931,7 +1084,8 @@ class TikTokRegistration: year_selected = False # Berechne den Index für das Jahr (normalerweise absteigend sortiert) # Annahme: Jahre von aktuellem Jahr bis 1900, also Index = aktuelles_jahr - gewähltes_jahr - current_year = 2025 # oder datetime.now().year + from datetime import datetime + current_year = datetime.now().year year_index = current_year - birthday['year'] year_option_selectors = [ diff --git a/tests/test_generator_tab_factory.py b/tests/test_generator_tab_factory.py deleted file mode 100644 index d8f9c10..0000000 --- a/tests/test_generator_tab_factory.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -Unit-Tests für GeneratorTabFactory. -Validiert die Factory-Implementierung und plattform-spezifische Tab-Erstellung. -""" - -import unittest -import sys -import os -from unittest.mock import MagicMock, patch - -# Füge Projekt-Root zum Path hinzu -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from PyQt5.QtWidgets import QApplication, QWidget -from views.tabs.generator_tab_factory import GeneratorTabFactory, create_generator_tab -from views.tabs.generator_tab import GeneratorTab - - -class TestGeneratorTabFactory(unittest.TestCase): - """ - Test-Suite für GeneratorTabFactory. - """ - - @classmethod - def setUpClass(cls): - """Erstelle QApplication für Qt-Widgets.""" - if not QApplication.instance(): - cls.app = QApplication([]) - else: - cls.app = QApplication.instance() - - def setUp(self): - """Setup vor jedem Test.""" - # Registry zurücksetzen - GeneratorTabFactory.clear_registry() - self.language_manager = MagicMock() - - def test_create_generic_tab_for_unknown_platform(self): - """Test: Factory erstellt generischen Tab für unbekannte Plattform.""" - tab = GeneratorTabFactory.create_tab("unknown_platform", self.language_manager) - - self.assertIsInstance(tab, QWidget) - self.assertIsInstance(tab, GeneratorTab) - - def test_create_facebook_tab(self): - """Test: Factory erstellt FacebookGeneratorTab für Facebook.""" - tab = GeneratorTabFactory.create_tab("facebook", self.language_manager) - - self.assertIsInstance(tab, QWidget) - # Prüfe ob es der Facebook-spezifische Tab ist - # FacebookGeneratorTab hat gender_male, gender_female, gender_custom Attribute - self.assertTrue(hasattr(tab, 'gender_male'), "Facebook-Tab sollte gender_male haben") - self.assertTrue(hasattr(tab, 'gender_female'), "Facebook-Tab sollte gender_female haben") - self.assertTrue(hasattr(tab, 'gender_custom'), "Facebook-Tab sollte gender_custom haben") - - def test_case_insensitive_platform_names(self): - """Test: Plattform-Namen sind case-insensitive.""" - tab1 = GeneratorTabFactory.create_tab("FACEBOOK", self.language_manager) - tab2 = GeneratorTabFactory.create_tab("Facebook", self.language_manager) - tab3 = GeneratorTabFactory.create_tab("facebook", self.language_manager) - - # Alle sollten Facebook-Tabs sein - for tab in [tab1, tab2, tab3]: - self.assertTrue(hasattr(tab, 'gender_male')) - - def test_registry_functionality(self): - """Test: Tab-Registry funktioniert korrekt.""" - # Erstelle Mock-Tab-Klasse - class MockTab(QWidget): - def __init__(self, platform, language_manager): - super().__init__() - self.platform = platform - self.language_manager = language_manager - - # Registriere Mock-Tab - GeneratorTabFactory.register_tab("test_platform", MockTab) - - # Erstelle Tab - tab = GeneratorTabFactory.create_tab("test_platform", self.language_manager) - - self.assertIsInstance(tab, MockTab) - self.assertEqual(tab.platform, "test_platform") - - def test_lazy_loading(self): - """Test: Tabs werden lazy geladen.""" - # Registry sollte initial leer sein - self.assertEqual(len(GeneratorTabFactory._tab_registry), 0) - - # Erstelle Facebook-Tab - tab = GeneratorTabFactory.create_tab("facebook", self.language_manager) - - # Jetzt sollte Facebook in Registry sein - self.assertIn("facebook", GeneratorTabFactory._tab_registry) - - def test_get_supported_platforms(self): - """Test: Liste der unterstützten Plattformen.""" - platforms = GeneratorTabFactory.get_supported_platforms() - - # Sollte bekannte Plattformen enthalten - self.assertIn("facebook", platforms) - self.assertIn("instagram", platforms) - self.assertIn("tiktok", platforms) - self.assertIn("x", platforms) - - def test_is_platform_supported(self): - """Test: Plattform-Support-Prüfung.""" - self.assertTrue(GeneratorTabFactory.is_platform_supported("facebook")) - self.assertTrue(GeneratorTabFactory.is_platform_supported("FACEBOOK")) - self.assertFalse(GeneratorTabFactory.is_platform_supported("unknown")) - - def test_convenience_function(self): - """Test: Convenience-Funktion create_generator_tab.""" - tab = create_generator_tab("facebook", self.language_manager) - - self.assertIsInstance(tab, QWidget) - self.assertTrue(hasattr(tab, 'gender_male')) - - def test_error_handling_fallback(self): - """Test: Factory fällt auf generischen Tab zurück bei Fehlern.""" - # Simuliere einen Fehler beim Tab-Erstellen - with patch('views.tabs.facebook_generator_tab.FacebookGeneratorTab.__init__', - side_effect=Exception("Test error")): - - tab = GeneratorTabFactory.create_tab("facebook", self.language_manager) - - # Sollte auf generischen Tab zurückfallen - self.assertIsInstance(tab, GeneratorTab) - - def test_signal_compatibility(self): - """Test: Alle Tabs haben die erforderlichen Signale.""" - platforms = ["facebook", "instagram", "tiktok", "x"] - - for platform in platforms: - tab = GeneratorTabFactory.create_tab(platform, self.language_manager) - - # Prüfe erforderliche Signale - self.assertTrue(hasattr(tab, 'start_requested'), - f"{platform}-Tab sollte start_requested Signal haben") - self.assertTrue(hasattr(tab, 'stop_requested'), - f"{platform}-Tab sollte stop_requested Signal haben") - self.assertTrue(hasattr(tab, 'account_created'), - f"{platform}-Tab sollte account_created Signal haben") - - def tearDown(self): - """Cleanup nach jedem Test.""" - GeneratorTabFactory.clear_registry() - - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/test_method_rotation.py b/tests/test_method_rotation.py deleted file mode 100644 index c4860d9..0000000 --- a/tests/test_method_rotation.py +++ /dev/null @@ -1,611 +0,0 @@ -""" -Comprehensive tests for the method rotation system. -Tests all components: entities, repositories, use cases, and integration. -""" - -import unittest -import os -import sys -import tempfile -import sqlite3 -from datetime import datetime, timedelta -from unittest.mock import Mock, patch, MagicMock - -# Add project root to path -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) - -from domain.entities.method_rotation import ( - MethodStrategy, RotationSession, RotationEvent, PlatformMethodState, - RiskLevel, RotationEventType, RotationStrategy -) -from application.use_cases.method_rotation_use_case import MethodRotationUseCase, RotationContext -from infrastructure.repositories.method_strategy_repository import MethodStrategyRepository -from infrastructure.repositories.rotation_session_repository import RotationSessionRepository -from infrastructure.repositories.platform_method_state_repository import PlatformMethodStateRepository - - -class MockDBManager: - """Mock database manager for testing""" - - def __init__(self): - self.db_path = tempfile.mktemp(suffix='.db') - self.connection = None - self._setup_test_database() - - def _setup_test_database(self): - """Create test database with rotation tables""" - conn = sqlite3.connect(self.db_path) - - # Create rotation system tables - with open('database/migrations/add_method_rotation_system.sql', 'r') as f: - sql_script = f.read() - # Remove the INSERT statements for tests - sql_lines = sql_script.split('\n') - create_statements = [line for line in sql_lines if line.strip() and not line.strip().startswith('INSERT')] - clean_sql = '\n'.join(create_statements) - conn.executescript(clean_sql) - - conn.close() - - def get_connection(self): - if not self.connection: - self.connection = sqlite3.connect(self.db_path) - return self.connection - - def execute_query(self, query, params=None): - conn = self.get_connection() - cursor = conn.cursor() - if params: - cursor.execute(query, params) - else: - cursor.execute(query) - conn.commit() - return cursor - - def fetch_one(self, query, params=None): - conn = self.get_connection() - cursor = conn.cursor() - if params: - cursor.execute(query, params) - else: - cursor.execute(query) - return cursor.fetchone() - - def fetch_all(self, query, params=None): - conn = self.get_connection() - cursor = conn.cursor() - if params: - cursor.execute(query, params) - else: - cursor.execute(query) - return cursor.fetchall() - - def close(self): - if self.connection: - self.connection.close() - if os.path.exists(self.db_path): - os.unlink(self.db_path) - - -class TestMethodStrategy(unittest.TestCase): - """Test MethodStrategy entity""" - - def test_method_strategy_creation(self): - """Test creating a method strategy""" - strategy = MethodStrategy( - strategy_id="test_id", - platform="instagram", - method_name="email", - priority=8, - risk_level=RiskLevel.LOW - ) - - self.assertEqual(strategy.strategy_id, "test_id") - self.assertEqual(strategy.platform, "instagram") - self.assertEqual(strategy.method_name, "email") - self.assertEqual(strategy.priority, 8) - self.assertEqual(strategy.risk_level, RiskLevel.LOW) - self.assertTrue(strategy.is_active) - - def test_effectiveness_score_calculation(self): - """Test effectiveness score calculation""" - strategy = MethodStrategy( - strategy_id="test_id", - platform="instagram", - method_name="email", - priority=8, - success_rate=0.9, - failure_rate=0.1, - risk_level=RiskLevel.LOW - ) - - score = strategy.effectiveness_score - self.assertGreater(score, 0.8) # High priority, high success rate should score well - - def test_cooldown_functionality(self): - """Test cooldown period functionality""" - strategy = MethodStrategy( - strategy_id="test_id", - platform="instagram", - method_name="email", - cooldown_period=300, - last_failure=datetime.now() - timedelta(seconds=100) - ) - - self.assertTrue(strategy.is_on_cooldown) - self.assertGreater(strategy.cooldown_remaining_seconds, 0) - - # Test expired cooldown - strategy.last_failure = datetime.now() - timedelta(seconds=400) - self.assertFalse(strategy.is_on_cooldown) - - def test_performance_update(self): - """Test performance metrics update""" - strategy = MethodStrategy( - strategy_id="test_id", - platform="instagram", - method_name="email", - success_rate=0.5, - failure_rate=0.5 - ) - - # Update with success - strategy.update_performance(True, 120.0) - self.assertGreater(strategy.success_rate, 0.5) - self.assertLess(strategy.failure_rate, 0.5) - self.assertIsNotNone(strategy.last_success) - - # Update with failure - original_success_rate = strategy.success_rate - strategy.update_performance(False) - self.assertLess(strategy.success_rate, original_success_rate) - self.assertIsNotNone(strategy.last_failure) - - -class TestRotationSession(unittest.TestCase): - """Test RotationSession entity""" - - def test_rotation_session_creation(self): - """Test creating a rotation session""" - session = RotationSession( - session_id="test_session", - platform="instagram", - current_method="email" - ) - - self.assertEqual(session.session_id, "test_session") - self.assertEqual(session.platform, "instagram") - self.assertEqual(session.current_method, "email") - self.assertTrue(session.is_active) - self.assertEqual(session.rotation_count, 0) - - def test_session_metrics(self): - """Test session metrics calculation""" - session = RotationSession( - session_id="test_session", - platform="instagram", - current_method="email" - ) - - # Add some attempts - session.add_attempt("email", True) - session.add_attempt("email", False) - session.add_attempt("phone", True) - - self.assertEqual(session.success_count, 2) - self.assertEqual(session.failure_count, 1) - self.assertAlmostEqual(session.success_rate, 2/3, places=2) - - def test_rotation_logic(self): - """Test rotation decision logic""" - session = RotationSession( - session_id="test_session", - platform="instagram", - current_method="email" - ) - - # Add failures to trigger rotation - session.add_attempt("email", False) - session.add_attempt("email", False) - - self.assertTrue(session.should_rotate) - - # Test rotation - session.rotate_to_method("phone", "consecutive_failures") - self.assertEqual(session.current_method, "phone") - self.assertEqual(session.rotation_count, 1) - self.assertEqual(session.rotation_reason, "consecutive_failures") - - -class TestMethodStrategyRepository(unittest.TestCase): - """Test MethodStrategyRepository""" - - def setUp(self): - self.db_manager = MockDBManager() - self.repo = MethodStrategyRepository(self.db_manager) - - def tearDown(self): - self.db_manager.close() - - def test_save_and_find_strategy(self): - """Test saving and finding strategies""" - strategy = MethodStrategy( - strategy_id="test_strategy", - platform="instagram", - method_name="email", - priority=8, - risk_level=RiskLevel.LOW - ) - - # Save strategy - self.repo.save(strategy) - - # Find by ID - found_strategy = self.repo.find_by_id("test_strategy") - self.assertIsNotNone(found_strategy) - self.assertEqual(found_strategy.strategy_id, "test_strategy") - self.assertEqual(found_strategy.platform, "instagram") - self.assertEqual(found_strategy.method_name, "email") - - def test_find_active_by_platform(self): - """Test finding active strategies by platform""" - # Create multiple strategies - strategies = [ - MethodStrategy("s1", "instagram", "email", 8, risk_level=RiskLevel.LOW, success_rate=0.9), - MethodStrategy("s2", "instagram", "phone", 6, risk_level=RiskLevel.MEDIUM, success_rate=0.7), - MethodStrategy("s3", "instagram", "social", 4, risk_level=RiskLevel.HIGH, success_rate=0.3, is_active=False), - MethodStrategy("s4", "tiktok", "email", 8, risk_level=RiskLevel.LOW, success_rate=0.8) - ] - - for strategy in strategies: - self.repo.save(strategy) - - # Find active Instagram strategies - active_strategies = self.repo.find_active_by_platform("instagram") - - self.assertEqual(len(active_strategies), 2) # Only active ones - self.assertEqual(active_strategies[0].method_name, "email") # Highest effectiveness - - def test_get_next_available_method(self): - """Test getting next available method""" - # Create strategies - strategies = [ - MethodStrategy("s1", "instagram", "email", 8, risk_level=RiskLevel.LOW, success_rate=0.9), - MethodStrategy("s2", "instagram", "phone", 6, risk_level=RiskLevel.MEDIUM, success_rate=0.7), - ] - - for strategy in strategies: - self.repo.save(strategy) - - # Get next method excluding email - next_method = self.repo.get_next_available_method("instagram", ["email"]) - self.assertIsNotNone(next_method) - self.assertEqual(next_method.method_name, "phone") - - # Get next method with no exclusions - best_method = self.repo.get_next_available_method("instagram") - self.assertIsNotNone(best_method) - self.assertEqual(best_method.method_name, "email") # Best strategy - - def test_platform_statistics(self): - """Test platform statistics calculation""" - # Create strategies with different metrics - strategies = [ - MethodStrategy("s1", "instagram", "email", 8, risk_level=RiskLevel.LOW, - success_rate=0.9, last_success=datetime.now()), - MethodStrategy("s2", "instagram", "phone", 6, risk_level=RiskLevel.MEDIUM, - success_rate=0.6, last_failure=datetime.now()), - ] - - for strategy in strategies: - self.repo.save(strategy) - - stats = self.repo.get_platform_statistics("instagram") - - self.assertEqual(stats['total_methods'], 2) - self.assertEqual(stats['active_methods'], 2) - self.assertGreater(stats['avg_success_rate'], 0) - self.assertEqual(stats['recent_successes_24h'], 1) - - -class TestRotationUseCase(unittest.TestCase): - """Test MethodRotationUseCase""" - - def setUp(self): - self.db_manager = MockDBManager() - self.strategy_repo = MethodStrategyRepository(self.db_manager) - self.session_repo = RotationSessionRepository(self.db_manager) - self.state_repo = PlatformMethodStateRepository(self.db_manager) - - self.use_case = MethodRotationUseCase( - self.strategy_repo, self.session_repo, self.state_repo - ) - - # Setup test data - self._setup_test_strategies() - - def tearDown(self): - self.db_manager.close() - - def _setup_test_strategies(self): - """Setup test strategies""" - strategies = [ - MethodStrategy("instagram_email", "instagram", "email", 8, - risk_level=RiskLevel.LOW, success_rate=0.9), - MethodStrategy("instagram_phone", "instagram", "phone", 6, - risk_level=RiskLevel.MEDIUM, success_rate=0.7), - MethodStrategy("tiktok_email", "tiktok", "email", 8, - risk_level=RiskLevel.LOW, success_rate=0.8), - ] - - for strategy in strategies: - self.strategy_repo.save(strategy) - - def test_start_rotation_session(self): - """Test starting a rotation session""" - context = RotationContext( - platform="instagram", - account_id="test_account" - ) - - session = self.use_case.start_rotation_session(context) - - self.assertIsNotNone(session) - self.assertEqual(session.platform, "instagram") - self.assertEqual(session.current_method, "email") # Best method - self.assertTrue(session.is_active) - - def test_get_optimal_method(self): - """Test getting optimal method""" - context = RotationContext(platform="instagram") - - method = self.use_case.get_optimal_method(context) - - self.assertIsNotNone(method) - self.assertEqual(method.method_name, "email") # Best strategy - - # Test with exclusions - context.excluded_methods = ["email"] - method = self.use_case.get_optimal_method(context) - self.assertEqual(method.method_name, "phone") - - def test_method_rotation(self): - """Test method rotation""" - # Start session - context = RotationContext(platform="instagram") - session = self.use_case.start_rotation_session(context) - - # Record failure to trigger rotation - self.use_case.record_method_result( - session.session_id, "email", False, 0.0, - {'error_type': 'rate_limit', 'message': 'Rate limited'} - ) - - # Check if rotation should occur - should_rotate = self.use_case.should_rotate_method(session.session_id) - - if should_rotate: - # Attempt rotation - next_method = self.use_case.rotate_method(session.session_id, "rate_limit") - self.assertIsNotNone(next_method) - self.assertEqual(next_method.method_name, "phone") - - def test_emergency_mode(self): - """Test emergency mode functionality""" - # Enable emergency mode - self.use_case.enable_emergency_mode("instagram", "test_emergency") - - # Check that platform state reflects emergency mode - state = self.state_repo.find_by_platform("instagram") - self.assertTrue(state.emergency_mode) - - # Disable emergency mode - self.use_case.disable_emergency_mode("instagram") - state = self.state_repo.find_by_platform("instagram") - self.assertFalse(state.emergency_mode) - - def test_performance_tracking(self): - """Test performance tracking and metrics""" - context = RotationContext(platform="instagram") - session = self.use_case.start_rotation_session(context) - - # Record success - self.use_case.record_method_result( - session.session_id, "email", True, 120.0 - ) - - # Get recommendations - recommendations = self.use_case.get_platform_method_recommendations("instagram") - - self.assertIn('platform', recommendations) - self.assertIn('recommended_methods', recommendations) - self.assertGreater(len(recommendations['recommended_methods']), 0) - - -class TestIntegration(unittest.TestCase): - """Integration tests for the complete rotation system""" - - def setUp(self): - self.db_manager = MockDBManager() - - def tearDown(self): - self.db_manager.close() - - def test_complete_rotation_workflow(self): - """Test complete rotation workflow from start to finish""" - # Initialize components - strategy_repo = MethodStrategyRepository(self.db_manager) - session_repo = RotationSessionRepository(self.db_manager) - state_repo = PlatformMethodStateRepository(self.db_manager) - use_case = MethodRotationUseCase(strategy_repo, session_repo, state_repo) - - # Setup strategies - strategies = [ - MethodStrategy("instagram_email", "instagram", "email", 8, - risk_level=RiskLevel.LOW, success_rate=0.9, max_daily_attempts=20), - MethodStrategy("instagram_phone", "instagram", "phone", 6, - risk_level=RiskLevel.MEDIUM, success_rate=0.7, max_daily_attempts=10), - ] - - for strategy in strategies: - strategy_repo.save(strategy) - - # 1. Start rotation session - context = RotationContext(platform="instagram", account_id="test_account") - session = use_case.start_rotation_session(context) - - self.assertIsNotNone(session) - self.assertEqual(session.current_method, "email") - - # 2. Simulate failure and rotation - use_case.record_method_result( - session.session_id, "email", False, 0.0, - {'error_type': 'rate_limit', 'message': 'Rate limited'} - ) - - # Check rotation trigger - if use_case.should_rotate_method(session.session_id): - next_method = use_case.rotate_method(session.session_id, "rate_limit") - self.assertEqual(next_method.method_name, "phone") - - # 3. Simulate success with new method - use_case.record_method_result( - session.session_id, "phone", True, 180.0 - ) - - # 4. Verify session is completed - session_status = use_case.get_session_status(session.session_id) - self.assertIsNotNone(session_status) - - def test_error_handling_and_fallback(self): - """Test error handling and fallback mechanisms""" - # Test with invalid platform - strategy_repo = MethodStrategyRepository(self.db_manager) - session_repo = RotationSessionRepository(self.db_manager) - state_repo = PlatformMethodStateRepository(self.db_manager) - use_case = MethodRotationUseCase(strategy_repo, session_repo, state_repo) - - # Try to get method for platform with no strategies - context = RotationContext(platform="nonexistent") - method = use_case.get_optimal_method(context) - - self.assertIsNone(method) # Should handle gracefully - - def test_concurrent_sessions(self): - """Test handling multiple concurrent sessions""" - strategy_repo = MethodStrategyRepository(self.db_manager) - session_repo = RotationSessionRepository(self.db_manager) - state_repo = PlatformMethodStateRepository(self.db_manager) - use_case = MethodRotationUseCase(strategy_repo, session_repo, state_repo) - - # Setup strategy - strategy = MethodStrategy("instagram_email", "instagram", "email", 8, - risk_level=RiskLevel.LOW, success_rate=0.9) - strategy_repo.save(strategy) - - # Start multiple sessions - sessions = [] - for i in range(3): - context = RotationContext(platform="instagram", account_id=f"account_{i}") - session = use_case.start_rotation_session(context) - sessions.append(session) - - # Verify all sessions are active and distinct - self.assertEqual(len(sessions), 3) - session_ids = [s.session_id for s in sessions] - self.assertEqual(len(set(session_ids)), 3) # All unique - - -class TestMixinIntegration(unittest.TestCase): - """Test mixin integration with controllers""" - - def test_controller_mixin_integration(self): - """Test that controller mixins work correctly""" - from controllers.platform_controllers.method_rotation_mixin import MethodRotationMixin - - # Create mock controller with mixin - class MockController(MethodRotationMixin): - def __init__(self): - self.platform_name = "instagram" - self.db_manager = MockDBManager() - self.logger = Mock() - self._init_method_rotation_system() - - controller = MockController() - - # Test that rotation system is initialized - self.assertIsNotNone(controller.method_rotation_use_case) - - # Test availability check - self.assertTrue(controller._should_use_rotation_system()) - - # Cleanup - controller.db_manager.close() - - def test_worker_mixin_integration(self): - """Test worker thread mixin integration""" - from controllers.platform_controllers.method_rotation_worker_mixin import MethodRotationWorkerMixin - - # Create mock worker with mixin - class MockWorker(MethodRotationWorkerMixin): - def __init__(self): - self.params = {'registration_method': 'email'} - self.log_signal = Mock() - self.rotation_retry_count = 0 - self.max_rotation_retries = 3 - self.controller_instance = None - - worker = MockWorker() - - # Test initialization - worker._init_rotation_support() - - # Test availability check - available = worker._is_rotation_available() - self.assertFalse(available) # No controller instance - - # Test error classification - error_type = worker._classify_error("Rate limit exceeded") - self.assertEqual(error_type, "rate_limit") - - -if __name__ == '__main__': - # Create test suite - test_suite = unittest.TestSuite() - - # Add test cases - test_cases = [ - TestMethodStrategy, - TestRotationSession, - TestMethodStrategyRepository, - TestRotationUseCase, - TestIntegration, - TestMixinIntegration - ] - - for test_case in test_cases: - tests = unittest.TestLoader().loadTestsFromTestCase(test_case) - test_suite.addTests(tests) - - # Run tests - runner = unittest.TextTestRunner(verbosity=2) - result = runner.run(test_suite) - - # Print summary - print(f"\nTest Summary:") - print(f"Tests run: {result.testsRun}") - print(f"Failures: {len(result.failures)}") - print(f"Errors: {len(result.errors)}") - - if result.failures: - print("\nFailures:") - for test, traceback in result.failures: - print(f"- {test}: {traceback}") - - if result.errors: - print("\nErrors:") - for test, traceback in result.errors: - print(f"- {test}: {traceback}") - - # Exit with appropriate code - sys.exit(0 if result.wasSuccessful() else 1) \ No newline at end of file diff --git a/themes/theme_config.py b/themes/theme_config.py index 6771040..7832627 100644 --- a/themes/theme_config.py +++ b/themes/theme_config.py @@ -1,6 +1,6 @@ """ Theme Configuration - Single Source of Truth for all UI Colors and Styles -Based on IntelSight Corporate Design System +Based on AegisSight Corporate Design System """ class ThemeConfig: @@ -94,7 +94,7 @@ class ThemeConfig: 'scrollbar_handle_hover': '#0078A3', # ========== LOGO ========== - 'logo_path': 'intelsight-logo.svg', + 'logo_path': 'aegissight-logo.svg', }, 'dark': { @@ -180,7 +180,7 @@ class ThemeConfig: 'scrollbar_handle_hover': '#00B8E6', # ========== LOGO ========== - 'logo_path': 'intelsight-dark.svg', + 'logo_path': 'aegissight-logo-dark.svg', } } diff --git a/utils/human_behavior.py b/utils/human_behavior.py index ca7c99e..d1633da 100644 --- a/utils/human_behavior.py +++ b/utils/human_behavior.py @@ -24,18 +24,21 @@ class HumanBehavior: self.speed_factor = max(0.1, min(10.0, speed_factor)) # Begrenzung auf 0.1-10.0 self.randomness = max(0.0, min(1.0, randomness)) # Begrenzung auf 0.0-1.0 - # Typische Verzögerungen (in Sekunden) + # Typische Verzögerungen (in Sekunden) - ERHÖHT für Anti-Detection self.delays = { - "typing_per_char": 0.05, # Verzögerung pro Zeichen beim Tippen + "typing_per_char": 0.08, # Verzögerung pro Zeichen beim Tippen (erhöht) "mouse_movement": 0.5, # Verzögerung für Mausbewegung - "click": 0.1, # Verzögerung für Mausklick - "page_load": 2.0, # Verzögerung für das Laden einer Seite - "form_fill": 1.0, # Verzögerung zwischen Formularfeldern - "decision": 1.5, # Verzögerung für Entscheidungen - "scroll": 0.3, # Verzögerung für Scrollbewegungen - "verification": 5.0, # Verzögerung für Verifizierungsprozesse - "image_upload": 3.0, # Verzögerung für Bildupload - "navigation": 1.0 # Verzögerung für Navigation + "click": 0.15, # Verzögerung für Mausklick (erhöht) + "page_load": 8.0, # Verzögerung für das Laden einer Seite (STARK erhöht: 5-15s) + "form_fill": 4.0, # Verzögerung zwischen Formularfeldern (STARK erhöht: 2-8s) + "decision": 3.0, # Verzögerung für Entscheidungen (erhöht) + "scroll": 0.5, # Verzögerung für Scrollbewegungen (erhöht) + "verification": 30.0, # Verzögerung für Verifizierungsprozesse (STARK erhöht: 15-45s) + "image_upload": 5.0, # Verzögerung für Bildupload (erhöht) + "navigation": 2.0, # Verzögerung für Navigation (erhöht) + "cookie_reading": 5.0, # NEU: Cookie-Banner lesen (3-8s) + "field_transition": 5.0, # NEU: Zwischen Formularfeldern (2-8s) + "thinking": 2.0 # NEU: Kurze Denkpause } def sleep(self, delay_type: str, multiplier: float = 1.0) -> None: @@ -76,7 +79,7 @@ class HumanBehavior: def _random_delay(self, min_seconds: float = 1.0, max_seconds: float = 3.0) -> None: """ Führt eine zufällige Wartezeit aus, um menschliches Verhalten zu simulieren. - + Args: min_seconds: Minimale Wartezeit in Sekunden max_seconds: Maximale Wartezeit in Sekunden @@ -84,42 +87,96 @@ class HumanBehavior: delay = random.uniform(min_seconds, max_seconds) logger.debug(f"Zufällige Wartezeit: {delay:.2f} Sekunden") time.sleep(delay) - - def type_text(self, text: str, on_char_typed: Optional[Callable[[str], None]] = None, - error_probability: float = 0.05, correction_probability: float = 0.9) -> str: + + def anti_detection_delay(self, action_type: str = "form_fill") -> None: """ - Simuliert menschliches Tippen mit möglichen Tippfehlern und Korrekturen. - + Erzeugt eine realistische Anti-Detection-Verzögerung. + + Diese Methode verwendet längere, zufälligere Wartezeiten um Bot-Erkennung + zu vermeiden. Die Verzögerungen sind bewusst lang um menschliches + Verhalten realistischer zu simulieren. + + Args: + action_type: Art der Aktion: + - "form_fill": Zwischen Formularfeldern (2-8s) + - "page_load": Auf neuen Seiten (5-15s) + - "verification": Vor Code-Eingabe (15-45s) + - "cookie_reading": Cookie-Banner lesen (3-8s) + - "thinking": Kurze Denkpause (1-3s) + """ + delay_ranges = { + "form_fill": (2.0, 8.0), # Zwischen Formularfeldern + "page_load": (5.0, 15.0), # Auf neuen Seiten + "verification": (15.0, 45.0), # Vor Code-Eingabe + "cookie_reading": (3.0, 8.0), # Cookie-Banner lesen + "thinking": (1.0, 3.0), # Kurze Denkpause + "field_focus": (0.5, 1.5), # Vor Feldinteraktion + } + + min_delay, max_delay = delay_ranges.get(action_type, (2.0, 5.0)) + + # Basis-Verzögerung + delay = random.uniform(min_delay, max_delay) + + # Zusätzliche Variation basierend auf randomness + if self.randomness > 0: + variation = 1.0 + (random.random() * 2 - 1) * self.randomness * 0.3 + delay *= variation + + # Speed-Factor anwenden (aber nicht zu stark reduzieren) + delay = delay / max(self.speed_factor, 0.5) + + # Gelegentlich extra lange Pause (simuliert Ablenkung/Nachdenken) + if random.random() < 0.1: + extra_delay = random.uniform(2.0, 5.0) + delay += extra_delay + logger.debug(f"Extra Denkpause: +{extra_delay:.2f}s") + + logger.debug(f"Anti-Detection Delay ({action_type}): {delay:.2f}s") + time.sleep(max(0.5, delay)) # Minimum 0.5s + + def type_text(self, text: str, on_char_typed: Optional[Callable[[str], None]] = None, + error_probability: float = 0.15, correction_probability: float = 0.95) -> str: + """ + Simuliert menschliches Tippen mit realistischen Tippfehlern und Korrekturen. + + Die Fehlerrate wurde auf 15% erhöht (vorher 5%) um realistischeres + menschliches Verhalten zu simulieren. Echte Menschen machen häufig + Tippfehler und korrigieren diese sofort. + Args: text: Zu tippender Text on_char_typed: Optionale Funktion, die für jedes getippte Zeichen aufgerufen wird - error_probability: Wahrscheinlichkeit für Tippfehler (0-1) + error_probability: Wahrscheinlichkeit für Tippfehler (0-1), Standard: 0.15 (15%) correction_probability: Wahrscheinlichkeit, Tippfehler zu korrigieren (0-1) - + Returns: Der tatsächlich getippte Text (mit oder ohne Fehler) """ - # Anpassen der Fehlerwahrscheinlichkeit basierend auf Zufälligkeit - adjusted_error_prob = error_probability * self.randomness - + # Fehlerrate zwischen 10-20% halten für Realismus + base_error_prob = max(0.10, min(0.20, error_probability)) + # Anpassen basierend auf Zufälligkeit (aber nicht unter 10%) + adjusted_error_prob = max(0.10, base_error_prob * (0.8 + self.randomness * 0.4)) + result = "" i = 0 - + while i < len(text): char = text[i] - + # Potentieller Tippfehler if random.random() < adjusted_error_prob: - # Auswahl eines Fehlertyps: - # - Falsches Zeichen (Tastatur-Nachbarn) - # - Ausgelassenes Zeichen - # - Doppeltes Zeichen + # Auswahl eines Fehlertyps: + # - Falsches Zeichen (Tastatur-Nachbarn) - 50% + # - Transposition (Buchstaben vertauschen) - 15% + # - Ausgelassenes Zeichen - 15% + # - Doppeltes Zeichen - 20% error_type = random.choices( - ["wrong", "skip", "double"], - weights=[0.6, 0.2, 0.2], + ["wrong", "transposition", "skip", "double"], + weights=[0.50, 0.15, 0.15, 0.20], k=1 )[0] - + if error_type == "wrong": # Falsches Zeichen tippen (Tastatur-Nachbarn) keyboard_neighbors = self.get_keyboard_neighbors(char) @@ -129,15 +186,18 @@ class HumanBehavior: if on_char_typed: on_char_typed(wrong_char) self.sleep("typing_per_char") - + + # Pause bevor Fehler "bemerkt" wird + time.sleep(random.uniform(0.1, 0.4)) + # Entscheiden, ob der Fehler korrigiert wird if random.random() < correction_probability: # Löschen des falschen Zeichens result = result[:-1] if on_char_typed: on_char_typed("\b") # Backspace - self.sleep("typing_per_char", 1.5) # Längere Pause für Korrektur - + self.sleep("typing_per_char", 1.8) + # Korrektes Zeichen tippen result += char if on_char_typed: @@ -149,35 +209,87 @@ class HumanBehavior: if on_char_typed: on_char_typed(char) self.sleep("typing_per_char") - + + elif error_type == "transposition" and i < len(text) - 1: + # Buchstaben vertauschen (häufiger Tippfehler) + next_char = text[i + 1] + result += next_char + char # Vertauscht + if on_char_typed: + on_char_typed(next_char) + self.sleep("typing_per_char") + on_char_typed(char) + self.sleep("typing_per_char") + + # Korrektur der Transposition + if random.random() < correction_probability: + time.sleep(random.uniform(0.2, 0.5)) # Bemerken des Fehlers + # Beide Zeichen löschen + result = result[:-2] + if on_char_typed: + on_char_typed("\b") + self.sleep("typing_per_char", 1.3) + on_char_typed("\b") + self.sleep("typing_per_char", 1.5) + + # Korrekte Reihenfolge tippen + result += char + next_char + if on_char_typed: + on_char_typed(char) + self.sleep("typing_per_char") + on_char_typed(next_char) + self.sleep("typing_per_char") + + i += 1 # Nächstes Zeichen überspringen (bereits verarbeitet) + elif error_type == "skip": # Zeichen auslassen (nichts tun) + # In 50% der Fälle später bemerken und nachtippen + if random.random() < 0.5 and i < len(text) - 1: + # Nächstes Zeichen normal tippen + pass # Wird übersprungen pass - + elif error_type == "double": # Zeichen doppelt tippen result += char + char if on_char_typed: on_char_typed(char) + self.sleep("typing_per_char", 0.3) # Sehr kurz zwischen Doppel on_char_typed(char) self.sleep("typing_per_char") - + + # Pause bevor Fehler bemerkt wird + time.sleep(random.uniform(0.15, 0.35)) + # Entscheiden, ob der Fehler korrigiert wird if random.random() < correction_probability: # Löschen des doppelten Zeichens result = result[:-1] if on_char_typed: on_char_typed("\b") # Backspace - self.sleep("typing_per_char", 1.2) + self.sleep("typing_per_char", 1.3) else: # Normales Tippen ohne Fehler result += char if on_char_typed: on_char_typed(char) - self.sleep("typing_per_char") - + + # Variable Tippgeschwindigkeit basierend auf Zeichen + if char in ' .,!?;:': + # Längere Pause nach Satzzeichen/Leerzeichen + self.sleep("typing_per_char", random.uniform(1.2, 1.8)) + elif char.isupper(): + # Leicht länger für Großbuchstaben (Shift-Taste) + self.sleep("typing_per_char", random.uniform(1.0, 1.3)) + else: + self.sleep("typing_per_char", random.uniform(0.8, 1.2)) + i += 1 - + + # Gelegentliche längere Pause (Nachdenken) + if random.random() < 0.05: + time.sleep(random.uniform(0.3, 0.8)) + return result def get_keyboard_neighbors(self, char: str) -> List[str]: diff --git a/utils/modal_test.py b/utils/modal_test.py deleted file mode 100644 index 69aafcb..0000000 --- a/utils/modal_test.py +++ /dev/null @@ -1,195 +0,0 @@ -""" -Modal System Test - Test-Funktionen für das Modal-System -""" - -import logging -import time -from typing import Optional -from PyQt5.QtWidgets import QApplication, QMainWindow, QPushButton, QVBoxLayout, QWidget -from PyQt5.QtCore import QTimer - -from utils.modal_manager import ModalManager -from views.widgets.progress_modal import ProgressModal -from views.widgets.account_creation_modal import AccountCreationModal -from views.widgets.login_process_modal import LoginProcessModal - -logger = logging.getLogger("modal_test") - - -class ModalTestWindow(QMainWindow): - """Test-Fenster für Modal-System Tests""" - - def __init__(self): - super().__init__() - self.setWindowTitle("AccountForger Modal System Test") - self.setGeometry(100, 100, 600, 400) - - # Modal Manager - self.modal_manager = ModalManager(parent_window=self) - - # Test UI - self.setup_ui() - - def setup_ui(self): - """Erstellt Test-UI""" - central_widget = QWidget() - self.setCentralWidget(central_widget) - - layout = QVBoxLayout(central_widget) - - # Test Buttons - btn_account_creation = QPushButton("Test Account Creation Modal") - btn_account_creation.clicked.connect(self.test_account_creation_modal) - layout.addWidget(btn_account_creation) - - btn_login_process = QPushButton("Test Login Process Modal") - btn_login_process.clicked.connect(self.test_login_process_modal) - layout.addWidget(btn_login_process) - - btn_generic_modal = QPushButton("Test Generic Progress Modal") - btn_generic_modal.clicked.connect(self.test_generic_modal) - layout.addWidget(btn_generic_modal) - - btn_error_modal = QPushButton("Test Error Modal") - btn_error_modal.clicked.connect(self.test_error_modal) - layout.addWidget(btn_error_modal) - - btn_modal_manager = QPushButton("Test Modal Manager") - btn_modal_manager.clicked.connect(self.test_modal_manager) - layout.addWidget(btn_modal_manager) - - def test_account_creation_modal(self): - """Testet Account Creation Modal""" - logger.info("Testing Account Creation Modal") - - modal = AccountCreationModal(parent=self, platform="Instagram") - - # Steps setzen - steps = [ - "Browser wird vorbereitet", - "Formular wird ausgefüllt", - "Account wird erstellt", - "E-Mail wird verifiziert" - ] - modal.set_steps(steps) - - # Modal anzeigen - modal.show_platform_specific_process() - - # Simuliere Steps - QTimer.singleShot(1000, lambda: modal.start_step("Browser wird vorbereitet")) - QTimer.singleShot(2000, lambda: modal.complete_step("Browser wird vorbereitet", "Formular wird ausgefüllt")) - QTimer.singleShot(3000, lambda: modal.start_step("Formular wird ausgefüllt")) - QTimer.singleShot(4000, lambda: modal.complete_step("Formular wird ausgefüllt", "Account wird erstellt")) - QTimer.singleShot(5000, lambda: modal.start_step("Account wird erstellt")) - QTimer.singleShot(6000, lambda: modal.complete_step("Account wird erstellt", "E-Mail wird verifiziert")) - QTimer.singleShot(7000, lambda: modal.start_step("E-Mail wird verifiziert")) - QTimer.singleShot(8000, lambda: modal.show_success({"username": "test_user", "platform": "Instagram"})) - - def test_login_process_modal(self): - """Testet Login Process Modal""" - logger.info("Testing Login Process Modal") - - modal = LoginProcessModal(parent=self, platform="TikTok") - - # Session Login testen - modal.show_session_login("test_account", "TikTok") - - # Simuliere Login-Prozess - QTimer.singleShot(1000, lambda: modal.update_login_progress("browser_init", "Browser wird gestartet")) - QTimer.singleShot(2000, lambda: modal.update_login_progress("session_restore", "Session wird wiederhergestellt")) - QTimer.singleShot(3000, lambda: modal.update_login_progress("verification", "Login wird geprüft")) - QTimer.singleShot(4000, lambda: modal.show_session_restored()) - - def test_generic_modal(self): - """Testet Generic Progress Modal""" - logger.info("Testing Generic Progress Modal") - - modal = ProgressModal(parent=self, modal_type="verification") - modal.show_process() - - # Simuliere Updates - QTimer.singleShot(1000, lambda: modal.update_status("Verbindung wird hergestellt...", "Server wird kontaktiert")) - QTimer.singleShot(2000, lambda: modal.update_status("Daten werden verarbeitet...", "Bitte warten")) - QTimer.singleShot(3000, lambda: modal.update_status("✅ Vorgang abgeschlossen!", "Erfolgreich")) - QTimer.singleShot(4000, lambda: modal.hide_process()) - - def test_error_modal(self): - """Testet Error Modal""" - logger.info("Testing Error Modal") - - modal = ProgressModal(parent=self, modal_type="generic") - modal.show_process() - - # Nach kurzer Zeit Fehler anzeigen - QTimer.singleShot(1500, lambda: modal.show_error("Netzwerkfehler aufgetreten", auto_close_seconds=3)) - - def test_modal_manager(self): - """Testet Modal Manager""" - logger.info("Testing Modal Manager") - - # Zeige Account Creation Modal über Manager - self.modal_manager.show_modal( - 'account_creation', - title="🔄 Test Account wird erstellt", - status="Modal Manager Test läuft...", - detail="Über ModalManager aufgerufen" - ) - - # Simuliere Updates über Manager - QTimer.singleShot(1000, lambda: self.modal_manager.update_modal_status( - 'account_creation', - "Browser wird initialisiert...", - "Schritt 1 von 3" - )) - - QTimer.singleShot(2000, lambda: self.modal_manager.update_modal_status( - 'account_creation', - "Formular wird ausgefüllt...", - "Schritt 2 von 3" - )) - - QTimer.singleShot(3000, lambda: self.modal_manager.update_modal_status( - 'account_creation', - "Account wird finalisiert...", - "Schritt 3 von 3" - )) - - QTimer.singleShot(4000, lambda: self.modal_manager.update_modal_status( - 'account_creation', - "✅ Account erfolgreich erstellt!", - "Test abgeschlossen" - )) - - QTimer.singleShot(5000, lambda: self.modal_manager.hide_modal('account_creation')) - - -def run_modal_test(): - """Führt den Modal-Test aus""" - import sys - - # QApplication erstellen falls nicht vorhanden - app = QApplication.instance() - if app is None: - app = QApplication(sys.argv) - - # Test-Fenster erstellen - test_window = ModalTestWindow() - test_window.show() - - # App ausführen - if hasattr(app, 'exec'): - return app.exec() - else: - return app.exec_() - - -if __name__ == "__main__": - # Logging konfigurieren - logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' - ) - - # Test ausführen - run_modal_test() \ No newline at end of file diff --git a/utils/performance_monitor.py b/utils/performance_monitor.py deleted file mode 100644 index 7b0013f..0000000 --- a/utils/performance_monitor.py +++ /dev/null @@ -1,412 +0,0 @@ -""" -Performance Monitor - Non-intrusive monitoring for race condition detection -Debug-only monitoring without production performance impact -""" - -import time -import threading -import functools -import traceback -from typing import Dict, Any, Optional, Callable, List -from collections import defaultdict, deque -from datetime import datetime, timedelta -from dataclasses import dataclass, field -import logging -import json -import os - -logger = logging.getLogger(__name__) - - -@dataclass -class OperationMetrics: - """Metriken für eine einzelne Operation""" - operation_name: str - thread_id: int - thread_name: str - start_time: float - end_time: Optional[float] = None - duration: Optional[float] = None - success: bool = True - error_message: Optional[str] = None - metadata: Dict[str, Any] = field(default_factory=dict) - stack_trace: Optional[str] = None - - def complete(self, success: bool = True, error_message: Optional[str] = None): - """Markiert Operation als abgeschlossen""" - self.end_time = time.time() - self.duration = self.end_time - self.start_time - self.success = success - self.error_message = error_message - - def to_dict(self) -> Dict[str, Any]: - """Konvertiert zu Dictionary für Serialisierung""" - return { - 'operation_name': self.operation_name, - 'thread_id': self.thread_id, - 'thread_name': self.thread_name, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'duration': self.duration, - 'success': self.success, - 'error_message': self.error_message, - 'metadata': self.metadata, - 'has_stack_trace': self.stack_trace is not None - } - - -class PerformanceMonitor: - """ - Performance-Monitor mit race condition detection - """ - - def __init__(self, enabled: bool = None, max_history: int = 1000): - # Auto-detect based on debug settings oder environment - if enabled is None: - enabled = ( - os.getenv('DEBUG_RACE_CONDITIONS', '').lower() in ['true', '1', 'yes'] or - os.getenv('PERFORMANCE_MONITORING', '').lower() in ['true', '1', 'yes'] - ) - - self.enabled = enabled - self.max_history = max_history - - # Monitoring data - self._operation_history: deque = deque(maxlen=max_history) - self._active_operations: Dict[str, OperationMetrics] = {} - self._operation_stats: Dict[str, Dict[str, Any]] = defaultdict(lambda: { - 'total_calls': 0, - 'successful_calls': 0, - 'failed_calls': 0, - 'total_duration': 0.0, - 'min_duration': float('inf'), - 'max_duration': 0.0, - 'concurrent_executions': 0, - 'max_concurrent': 0 - }) - - # Thread safety - self._lock = threading.RLock() - - # Race condition detection - self._potential_races: List[Dict[str, Any]] = [] - self._long_operations: List[Dict[str, Any]] = [] - - # Thresholds - self.long_operation_threshold = 2.0 # seconds - self.race_detection_window = 0.1 # seconds - - if self.enabled: - logger.info("Performance monitoring enabled") - - def monitor_operation(self, operation_name: str, capture_stack: bool = False): - """ - Decorator für Operation-Monitoring - """ - def decorator(func: Callable) -> Callable: - if not self.enabled: - return func # No overhead when disabled - - @functools.wraps(func) - def wrapper(*args, **kwargs): - return self._execute_monitored( - operation_name or func.__name__, - func, - capture_stack, - *args, - **kwargs - ) - - wrapper.original = func - wrapper.is_monitored = True - return wrapper - - return decorator - - def _execute_monitored(self, operation_name: str, func: Callable, - capture_stack: bool, *args, **kwargs) -> Any: - """Führt eine überwachte Operation aus""" - if not self.enabled: - return func(*args, **kwargs) - - thread_id = threading.current_thread().ident - thread_name = threading.current_thread().name - operation_key = f"{operation_name}_{thread_id}_{time.time()}" - - # Metrics-Objekt erstellen - metrics = OperationMetrics( - operation_name=operation_name, - thread_id=thread_id, - thread_name=thread_name, - start_time=time.time(), - stack_trace=traceback.format_stack() if capture_stack else None - ) - - # Race condition detection - self._detect_potential_race(operation_name, metrics.start_time) - - with self._lock: - # Concurrent execution tracking - concurrent_count = sum( - 1 for op in self._active_operations.values() - if op.operation_name == operation_name - ) - - stats = self._operation_stats[operation_name] - stats['concurrent_executions'] = concurrent_count - stats['max_concurrent'] = max(stats['max_concurrent'], concurrent_count) - - # Operation zu aktiven hinzufügen - self._active_operations[operation_key] = metrics - - try: - # Operation ausführen - result = func(*args, **kwargs) - - # Erfolg markieren - metrics.complete(success=True) - - return result - - except Exception as e: - # Fehler markieren - metrics.complete(success=False, error_message=str(e)) - raise - - finally: - # Cleanup und Statistik-Update - with self._lock: - self._active_operations.pop(operation_key, None) - self._update_statistics(metrics) - self._operation_history.append(metrics) - - # Long operation detection - if metrics.duration and metrics.duration > self.long_operation_threshold: - self._record_long_operation(metrics) - - def _detect_potential_race(self, operation_name: str, start_time: float): - """Erkennt potentielle Race Conditions""" - if not self.enabled: - return - - # Prüfe ob ähnliche Operationen zeitgleich laufen - concurrent_ops = [] - with self._lock: - for op in self._active_operations.values(): - if (op.operation_name == operation_name and - abs(op.start_time - start_time) < self.race_detection_window): - concurrent_ops.append(op) - - if len(concurrent_ops) > 0: - race_info = { - 'operation_name': operation_name, - 'detected_at': start_time, - 'concurrent_threads': [op.thread_id for op in concurrent_ops], - 'time_window': self.race_detection_window, - 'severity': 'high' if len(concurrent_ops) > 2 else 'medium' - } - - self._potential_races.append(race_info) - - logger.warning(f"Potential race condition detected: {operation_name} " - f"running on {len(concurrent_ops)} threads simultaneously") - - def _record_long_operation(self, metrics: OperationMetrics): - """Zeichnet lange Operationen auf""" - long_op_info = { - 'operation_name': metrics.operation_name, - 'duration': metrics.duration, - 'thread_id': metrics.thread_id, - 'start_time': metrics.start_time, - 'success': metrics.success, - 'metadata': metrics.metadata - } - - self._long_operations.append(long_op_info) - - logger.warning(f"Long operation detected: {metrics.operation_name} " - f"took {metrics.duration:.3f}s (threshold: {self.long_operation_threshold}s)") - - def _update_statistics(self, metrics: OperationMetrics): - """Aktualisiert Operation-Statistiken""" - stats = self._operation_stats[metrics.operation_name] - - stats['total_calls'] += 1 - if metrics.success: - stats['successful_calls'] += 1 - else: - stats['failed_calls'] += 1 - - if metrics.duration: - stats['total_duration'] += metrics.duration - stats['min_duration'] = min(stats['min_duration'], metrics.duration) - stats['max_duration'] = max(stats['max_duration'], metrics.duration) - - def get_statistics(self) -> Dict[str, Any]: - """Gibt vollständige Monitoring-Statistiken zurück""" - if not self.enabled: - return {'monitoring_enabled': False} - - with self._lock: - # Statistiken aufbereiten - processed_stats = {} - for op_name, stats in self._operation_stats.items(): - processed_stats[op_name] = { - **stats, - 'average_duration': ( - stats['total_duration'] / stats['total_calls'] - if stats['total_calls'] > 0 else 0 - ), - 'success_rate': ( - stats['successful_calls'] / stats['total_calls'] - if stats['total_calls'] > 0 else 0 - ), - 'min_duration': stats['min_duration'] if stats['min_duration'] != float('inf') else 0 - } - - return { - 'monitoring_enabled': True, - 'operation_statistics': processed_stats, - 'race_conditions': { - 'detected_count': len(self._potential_races), - 'recent_races': self._potential_races[-10:], # Last 10 - }, - 'long_operations': { - 'detected_count': len(self._long_operations), - 'threshold': self.long_operation_threshold, - 'recent_long_ops': self._long_operations[-10:], # Last 10 - }, - 'active_operations': len(self._active_operations), - 'history_size': len(self._operation_history), - 'thresholds': { - 'long_operation_threshold': self.long_operation_threshold, - 'race_detection_window': self.race_detection_window - } - } - - def get_race_condition_report(self) -> Dict[str, Any]: - """Gibt detaillierten Race Condition Report zurück""" - if not self.enabled: - return {'monitoring_enabled': False} - - with self._lock: - # Gruppiere Race Conditions nach Operation - races_by_operation = defaultdict(list) - for race in self._potential_races: - races_by_operation[race['operation_name']].append(race) - - # Analysiere Patterns - analysis = {} - for op_name, races in races_by_operation.items(): - high_severity = sum(1 for r in races if r['severity'] == 'high') - analysis[op_name] = { - 'total_races': len(races), - 'high_severity_races': high_severity, - 'affected_threads': len(set( - thread_id for race in races - for thread_id in race['concurrent_threads'] - )), - 'first_detected': min(r['detected_at'] for r in races), - 'last_detected': max(r['detected_at'] for r in races), - 'recommendation': self._get_race_recommendation(op_name, races) - } - - return { - 'monitoring_enabled': True, - 'total_race_conditions': len(self._potential_races), - 'affected_operations': len(races_by_operation), - 'analysis_by_operation': analysis, - 'raw_detections': self._potential_races - } - - def _get_race_recommendation(self, operation_name: str, races: List[Dict]) -> str: - """Gibt Empfehlungen für Race Condition Behebung""" - race_count = len(races) - high_severity_count = sum(1 for r in races if r['severity'] == 'high') - - if high_severity_count > 5: - return f"CRITICAL: {operation_name} has {high_severity_count} high-severity race conditions. Implement ThreadSafetyMixin immediately." - elif race_count > 10: - return f"HIGH: {operation_name} frequently encounters race conditions. Consider adding thread synchronization." - elif race_count > 3: - return f"MEDIUM: {operation_name} occasionally has race conditions. Monitor and consider thread safety measures." - else: - return f"LOW: {operation_name} has minimal race condition risk." - - def export_report(self, filename: Optional[str] = None) -> str: - """Exportiert vollständigen Report als JSON""" - if not self.enabled: - return "Monitoring not enabled" - - if filename is None: - timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") - filename = f"performance_report_{timestamp}.json" - - report = { - 'timestamp': datetime.now().isoformat(), - 'statistics': self.get_statistics(), - 'race_condition_report': self.get_race_condition_report(), - 'operation_history': [op.to_dict() for op in list(self._operation_history)[-100:]] # Last 100 - } - - try: - with open(filename, 'w', encoding='utf-8') as f: - json.dump(report, f, indent=2, ensure_ascii=False) - - logger.info(f"Performance report exported to: {filename}") - return filename - - except Exception as e: - logger.error(f"Failed to export performance report: {e}") - return f"Export failed: {e}" - - def reset_statistics(self): - """Setzt alle Statistiken zurück""" - with self._lock: - self._operation_history.clear() - self._operation_stats.clear() - self._potential_races.clear() - self._long_operations.clear() - # Aktive Operationen nicht löschen - könnten noch laufen - - if self.enabled: - logger.info("Performance monitoring statistics reset") - - -# Global Monitor Instance -_global_monitor: Optional[PerformanceMonitor] = None -_monitor_init_lock = threading.RLock() - - -def get_performance_monitor() -> PerformanceMonitor: - """Holt die globale Monitor-Instanz (Singleton)""" - global _global_monitor - - if _global_monitor is None: - with _monitor_init_lock: - if _global_monitor is None: - _global_monitor = PerformanceMonitor() - - return _global_monitor - - -# Convenience Decorators -def monitor_if_enabled(operation_name: str = None, capture_stack: bool = False): - """Convenience decorator für conditional monitoring""" - monitor = get_performance_monitor() - return monitor.monitor_operation(operation_name, capture_stack) - - -def monitor_race_conditions(operation_name: str = None): - """Speziell für Race Condition Detection""" - return monitor_if_enabled(operation_name, capture_stack=True) - - -def monitor_fingerprint_operations(operation_name: str = None): - """Speziell für Fingerprint-Operationen""" - return monitor_if_enabled(f"fingerprint_{operation_name}", capture_stack=False) - - -def monitor_session_operations(operation_name: str = None): - """Speziell für Session-Operationen""" - return monitor_if_enabled(f"session_{operation_name}", capture_stack=False) \ No newline at end of file diff --git a/utils/process_guard.py b/utils/process_guard.py index 8d62686..6a4a67d 100644 --- a/utils/process_guard.py +++ b/utils/process_guard.py @@ -7,6 +7,12 @@ Dieser Guard verhindert: - Mehrere Browser-Instanzen gleichzeitig Clean Code & YAGNI: Nur das Nötigste, keine Über-Engineering. + +WICHTIG - Korrekte Verwendung: +- start() → Prozess beginnt +- end(success=True/False) → Prozess endet normal (zählt für Failure-Tracking) +- release() → Prozess wird abgebrochen (zählt NICHT als Failure) +- Alle Methoden sind idempotent (mehrfacher Aufruf ist sicher) """ import json @@ -14,6 +20,7 @@ import logging from datetime import datetime, timedelta from pathlib import Path from typing import Optional, Tuple +import threading logger = logging.getLogger(__name__) @@ -26,6 +33,9 @@ class ProcessGuard: - Process Lock Management (nur ein Prozess gleichzeitig) - Fehler-Tracking (Zwangspause nach 3 Fehlern) - Persistierung der Pause-Zeit über Neustarts + + Thread-Safety: + - Alle öffentlichen Methoden sind thread-safe durch Lock """ # Konfiguration @@ -35,11 +45,15 @@ class ProcessGuard: def __init__(self): """Initialisiert den Process Guard.""" + # Thread-Safety Lock + self._thread_lock = threading.Lock() + # Process Lock self._is_locked = False self._current_process = None self._current_platform = None self._lock_started_at = None # Timestamp für Auto-Timeout + self._lock_id = None # Eindeutige ID für jeden Lock # Error Tracking self._failure_count = 0 @@ -48,6 +62,9 @@ class ProcessGuard: # Config File self._config_file = Path("config/.process_guard") + # Counter für Lock-IDs + self._lock_counter = 0 + def can_start(self, process_type: str, platform: str) -> Tuple[bool, Optional[str]]: """ Prüft ob ein Prozess gestartet werden darf. @@ -61,98 +78,172 @@ class ProcessGuard: - (True, None) wenn erlaubt - (False, "Fehlermeldung") wenn blockiert """ - # 1. Prüfe Zwangspause - if self._is_paused(): - remaining_min = self._get_pause_remaining_minutes() - error_msg = ( - f"⏸ Zwangspause aktiv\n\n" - f"Nach 3 fehlgeschlagenen Versuchen ist eine Pause erforderlich.\n" - f"Verbleibende Zeit: {remaining_min} Minuten\n\n" - f"Empfehlung:\n" - f"• Proxy-Einstellungen prüfen\n" - f"• Internetverbindung prüfen\n" - f"• Plattform-Status überprüfen" - ) - return False, error_msg + with self._thread_lock: + # 1. Prüfe Zwangspause + if self._is_paused(): + remaining_min = self._get_pause_remaining_minutes() + error_msg = ( + f"⏸ Zwangspause aktiv\n\n" + f"Nach 3 fehlgeschlagenen Versuchen ist eine Pause erforderlich.\n" + f"Verbleibende Zeit: {remaining_min} Minuten\n\n" + f"Empfehlung:\n" + f"• Proxy-Einstellungen prüfen\n" + f"• Internetverbindung prüfen\n" + f"• Plattform-Status überprüfen" + ) + return False, error_msg - # 2. Prüfe Process Lock - if self._is_locked: - error_msg = ( - f"⚠ Prozess läuft bereits\n\n" - f"Aktuell aktiv: {self._current_process} ({self._current_platform})\n\n" - f"Bitte warten Sie bis der aktuelle Vorgang abgeschlossen ist." - ) - return False, error_msg + # 2. Prüfe Process Lock (mit Auto-Timeout-Check) + if self._is_locked_with_timeout_check(): + error_msg = ( + f"⚠ Prozess läuft bereits\n\n" + f"Aktuell aktiv: {self._current_process} ({self._current_platform})\n\n" + f"Bitte warten Sie bis der aktuelle Vorgang abgeschlossen ist." + ) + return False, error_msg - return True, None + return True, None - def start(self, process_type: str, platform: str): + def start(self, process_type: str, platform: str) -> int: """ Startet einen Prozess (setzt den Lock). Args: process_type: Art des Prozesses platform: Plattform - """ - self._is_locked = True - self._current_process = process_type - self._current_platform = platform - self._lock_started_at = datetime.now() # Timestamp für Auto-Timeout - logger.info(f"Process locked: {process_type} ({platform})") - def end(self, success: bool): + Returns: + int: Lock-ID für diesen Prozess (für spätere Freigabe) + """ + with self._thread_lock: + self._lock_counter += 1 + self._lock_id = self._lock_counter + self._is_locked = True + self._current_process = process_type + self._current_platform = platform + self._lock_started_at = datetime.now() + logger.info(f"Process locked [ID={self._lock_id}]: {process_type} ({platform})") + return self._lock_id + + def end(self, success: bool) -> bool: """ Beendet einen Prozess (gibt den Lock frei). + Diese Methode ist IDEMPOTENT - mehrfacher Aufruf ist sicher. + Der Failure-Counter wird nur erhöht wenn der Lock aktiv war. + Args: success: War der Prozess erfolgreich? + + Returns: + bool: True wenn Lock freigegeben wurde, False wenn kein Lock aktiv war """ - # Lock freigeben - process_info = f"{self._current_process} ({self._current_platform})" - self._is_locked = False - self._current_process = None - self._current_platform = None - self._lock_started_at = None # Timestamp zurücksetzen + with self._thread_lock: + # IDEMPOTENZ: Prüfe ob Lock überhaupt aktiv ist + if not self._is_locked: + logger.debug("end() aufgerufen, aber kein Lock aktiv - ignoriere") + return False - # Fehler-Tracking - if success: - if self._failure_count > 0: - logger.info(f"Fehler-Counter zurückgesetzt nach Erfolg (war: {self._failure_count})") - self._failure_count = 0 - self._save_pause_state() - else: - self._failure_count += 1 - logger.warning(f"Fehlschlag #{self._failure_count} bei {process_info}") + # Lock-Info für Logging speichern + process_info = f"{self._current_process} ({self._current_platform})" + lock_id = self._lock_id - if self._failure_count >= self.MAX_FAILURES: - self._activate_pause() + # Lock freigeben + self._is_locked = False + self._current_process = None + self._current_platform = None + self._lock_started_at = None + self._lock_id = None - logger.info(f"Process unlocked: {process_info} (success={success})") + # Fehler-Tracking (nur wenn Lock aktiv war) + if success: + if self._failure_count > 0: + logger.info(f"Fehler-Counter zurückgesetzt nach Erfolg (war: {self._failure_count})") + self._failure_count = 0 + self._save_pause_state() + else: + self._failure_count += 1 + logger.warning(f"Fehlschlag #{self._failure_count} bei {process_info}") + + if self._failure_count >= self.MAX_FAILURES: + self._activate_pause() + + logger.info(f"Process unlocked [ID={lock_id}]: {process_info} (success={success})") + return True + + def release(self) -> bool: + """ + Gibt den Lock frei OHNE den Failure-Counter zu beeinflussen. + + Verwendung: + - User-Abbruch (Cancel-Button) + - Validierungsfehler VOR Prozessstart + - Cleanup bei App-Schließung + + Diese Methode ist IDEMPOTENT - mehrfacher Aufruf ist sicher. + + Returns: + bool: True wenn Lock freigegeben wurde, False wenn kein Lock aktiv war + """ + with self._thread_lock: + # IDEMPOTENZ: Prüfe ob Lock überhaupt aktiv ist + if not self._is_locked: + logger.debug("release() aufgerufen, aber kein Lock aktiv - ignoriere") + return False + + # Lock-Info für Logging speichern + process_info = f"{self._current_process} ({self._current_platform})" + lock_id = self._lock_id + + # Lock freigeben (OHNE Failure-Tracking) + self._is_locked = False + self._current_process = None + self._current_platform = None + self._lock_started_at = None + self._lock_id = None + + logger.info(f"Process released [ID={lock_id}]: {process_info} (kein Failure gezählt)") + return True def reset(self): """ Reset beim App-Start. Lädt Pause-State, resettet aber Lock (da Lock nicht über Neustarts persistiert). """ - self._is_locked = False - self._current_process = None - self._current_platform = None - self._lock_started_at = None # Timestamp zurücksetzen - self._load_pause_state() + with self._thread_lock: + self._is_locked = False + self._current_process = None + self._current_platform = None + self._lock_started_at = None + self._lock_id = None + self._load_pause_state() - if self._is_paused(): - remaining = self._get_pause_remaining_minutes() - logger.warning(f"Zwangspause aktiv: noch {remaining} Minuten") + if self._is_paused(): + remaining = self._get_pause_remaining_minutes() + logger.warning(f"Zwangspause aktiv: noch {remaining} Minuten") - logger.info("Process Guard initialisiert") + logger.info("Process Guard initialisiert") def is_locked(self) -> bool: """ Gibt zurück ob aktuell ein Prozess läuft (mit Auto-Timeout-Check). + Thread-safe Methode. + Returns: True wenn ein Prozess aktiv ist """ + with self._thread_lock: + return self._is_locked_with_timeout_check() + + def _is_locked_with_timeout_check(self) -> bool: + """ + Interne Methode: Prüft Lock-Status mit Auto-Timeout. + MUSS innerhalb eines _thread_lock aufgerufen werden! + + Returns: + True wenn Lock aktiv ist + """ if not self._is_locked: return False @@ -162,14 +253,15 @@ class ProcessGuard: if elapsed_minutes > self.LOCK_TIMEOUT_MINUTES: logger.warning( - f"⏰ AUTO-TIMEOUT: Lock nach {int(elapsed_minutes)} Minuten freigegeben. " + f"⏰ AUTO-TIMEOUT: Lock [ID={self._lock_id}] nach {int(elapsed_minutes)} Minuten freigegeben. " f"Prozess: {self._current_process} ({self._current_platform})" ) - # Lock automatisch freigeben + # Lock automatisch freigeben (OHNE Failure-Zählung - Timeout ist kein User-Fehler) self._is_locked = False self._current_process = None self._current_platform = None self._lock_started_at = None + self._lock_id = None return False return True @@ -178,26 +270,44 @@ class ProcessGuard: """ Gibt zurück ob Zwangspause aktiv ist. + Thread-safe Methode. + Returns: True wenn Pause aktiv ist """ - return self._is_paused() + with self._thread_lock: + return self._is_paused() def get_status_message(self) -> Optional[str]: """ Gibt Status-Nachricht zurück wenn blockiert. + Thread-safe Methode. + Returns: None wenn nicht blockiert, sonst Nachricht """ - if self._is_paused(): - remaining = self._get_pause_remaining_minutes() - return f"Zwangspause aktiv (noch {remaining} Min)" + with self._thread_lock: + if self._is_paused(): + remaining = self._get_pause_remaining_minutes() + return f"Zwangspause aktiv (noch {remaining} Min)" - if self._is_locked: - return f"'{self._current_process}' läuft" + if self._is_locked: + return f"'{self._current_process}' läuft" - return None + return None + + def get_failure_count(self) -> int: + """ + Gibt den aktuellen Failure-Counter zurück. + + Thread-safe Methode. + + Returns: + int: Anzahl der Fehlschläge seit letztem Erfolg + """ + with self._thread_lock: + return self._failure_count # Private Methoden @@ -273,18 +383,24 @@ class ProcessGuard: logger.error(f"Fehler beim Laden des Pause-State: {e}") -# Globale Instanz (YAGNI: Kein komplexes Singleton-Pattern nötig) +# Globale Instanz mit Thread-Safety _guard_instance = None +_guard_instance_lock = threading.Lock() def get_guard() -> ProcessGuard: """ Gibt die globale ProcessGuard-Instanz zurück. + Thread-safe Singleton-Pattern. + Returns: ProcessGuard: Die globale Guard-Instanz """ global _guard_instance if _guard_instance is None: - _guard_instance = ProcessGuard() + with _guard_instance_lock: + # Double-check locking + if _guard_instance is None: + _guard_instance = ProcessGuard() return _guard_instance diff --git a/utils/profile_export_service.py b/utils/profile_export_service.py index dc37645..f4932a3 100644 --- a/utils/profile_export_service.py +++ b/utils/profile_export_service.py @@ -206,8 +206,8 @@ class ProfileExportService: spaceBefore=5*mm ) - # IntelSight Logo versuchen zu laden - logo_path = Path("resources/icons/intelsight-logo.svg") + # AegisSight Logo versuchen zu laden + logo_path = Path("resources/icons/aegissight-logo.svg") if logo_path.exists(): try: # SVG zu reportlab Image (mit svglib falls verfügbar) diff --git a/utils/proxy_rotator.py b/utils/proxy_rotator.py index 40ef930..4287933 100644 --- a/utils/proxy_rotator.py +++ b/utils/proxy_rotator.py @@ -383,19 +383,19 @@ class ProxyRotator: def format_proxy_for_playwright(self, proxy: str) -> Dict[str, str]: """ Formatiert einen Proxy-String für die Verwendung mit Playwright. - + Args: proxy: Proxy-String im Format host:port:username:password - + Returns: Dictionary mit Playwright-Proxy-Konfiguration """ parts = proxy.split(":") - + if len(parts) >= 4: # Format: host:port:username:password host, port, username, password = parts[:4] - + return { "server": f"{host}:{port}", "username": username, @@ -404,10 +404,110 @@ class ProxyRotator: elif len(parts) >= 2: # Format: host:port host, port = parts[:2] - + return { "server": f"{host}:{port}" } else: logger.warning(f"Ungültiges Proxy-Format: {self.mask_proxy_credentials(proxy)}") - return {} \ No newline at end of file + return {} + + # ========================================================================== + # ANTI-DETECTION: Erzwungene Proxy-Rotation für Account-Registrierung + # ========================================================================== + + def force_rotation(self, proxy_type: str = None) -> Optional[Dict[str, str]]: + """ + Erzwingt eine sofortige Proxy-Rotation. + + Diese Methode sollte VOR jeder neuen Account-Registrierung aufgerufen + werden, um sicherzustellen, dass ein frischer Proxy verwendet wird. + Dies verhindert, dass mehrere Accounts von derselben IP erstellt werden. + + Args: + proxy_type: Proxy-Typ ("ipv4", "ipv6", "mobile") oder None für zufällig + + Returns: + Neue Proxy-Konfiguration (Dict) oder None wenn kein Proxy verfügbar + """ + logger.info("ERZWINGE Proxy-Rotation für neue Registrierung") + + # Vorherigen Proxy vergessen + old_proxy = self.current_proxy + self.current_proxy = None + self.last_rotation_time = 0 + + # Neuen Proxy holen + new_proxy = self.get_proxy(proxy_type) + + if new_proxy: + self.current_proxy = new_proxy.get('server', '') + self.last_rotation_time = time.time() + + # Log mit maskierten Credentials + masked_server = self.mask_proxy_credentials(self.current_proxy) + logger.info(f"Proxy rotiert: {masked_server}") + + if old_proxy: + logger.debug(f"Vorheriger Proxy: {self.mask_proxy_credentials(old_proxy)}") + else: + logger.warning("Kein Proxy verfügbar für erzwungene Rotation") + + return new_proxy + + def get_proxy_for_registration(self, proxy_type: str = None, + force_new: bool = True) -> Optional[Dict[str, str]]: + """ + Holt einen Proxy speziell für Account-Registrierung. + + Diese Methode ist ein Wrapper um force_rotation() mit zusätzlicher + Logik für Registrierungen. + + Args: + proxy_type: Gewünschter Proxy-Typ oder None für zufällig + force_new: Ob ein neuer Proxy erzwungen werden soll (Standard: True) + + Returns: + Proxy-Konfiguration für Playwright oder None + """ + if force_new: + proxy_config = self.force_rotation(proxy_type) + else: + proxy_config = self.get_proxy(proxy_type) + + if not proxy_config: + logger.warning("Kein Proxy für Registrierung verfügbar - Registrierung ohne Proxy") + return None + + logger.info(f"Proxy für Registrierung bereit: {self.mask_proxy_credentials(proxy_config.get('server', ''))}") + return proxy_config + + def should_rotate_for_registration(self) -> bool: + """ + Prüft, ob eine Proxy-Rotation vor der nächsten Registrierung empfohlen wird. + + Returns: + True wenn Rotation empfohlen, False sonst + """ + # Immer True - jede Registrierung sollte einen neuen Proxy verwenden + # Dies ist die sicherste Anti-Detection-Strategie + return True + + def get_rotation_stats(self) -> Dict[str, Any]: + """ + Gibt Statistiken über Proxy-Rotationen zurück. + + Returns: + Dictionary mit Rotations-Statistiken + """ + return { + "current_proxy": self.mask_proxy_credentials(self.current_proxy) if self.current_proxy else None, + "last_rotation_time": self.last_rotation_time, + "time_since_last_rotation": time.time() - self.last_rotation_time if self.last_rotation_time > 0 else None, + "rotation_interval": self.config.get("rotation_interval", 300), + "available_proxies": { + "ipv4": len(self.config.get("ipv4", [])), + "ipv6": len(self.config.get("ipv6", [])), + "mobile": len(self.config.get("mobile", [])) + } + } \ No newline at end of file diff --git a/utils/rate_limit_handler.py b/utils/rate_limit_handler.py new file mode 100644 index 0000000..d03401d --- /dev/null +++ b/utils/rate_limit_handler.py @@ -0,0 +1,351 @@ +""" +Rate Limit Handler für HTTP 429 und ähnliche Fehler. + +Dieses Modul implementiert exponentielles Backoff für Rate-Limiting, +um automatisch auf zu viele Anfragen zu reagieren und Sperren zu vermeiden. +""" + +import logging +import time +import random +from typing import Callable, Any, Optional, List + +logger = logging.getLogger("rate_limit_handler") + + +class RateLimitHandler: + """ + Behandelt Rate-Limits mit exponentiellem Backoff. + + Diese Klasse implementiert eine robuste Strategie zum Umgang mit + Rate-Limiting durch soziale Netzwerke. Bei Erkennung eines Rate-Limits + wird exponentiell länger gewartet, um Sperren zu vermeiden. + + Beispiel: + handler = RateLimitHandler() + + # Option 1: Manuelles Handling + if rate_limit_detected: + handler.handle_rate_limit() + + # Option 2: Automatisches Retry + result = handler.execute_with_backoff(my_function, arg1, arg2) + """ + + # Bekannte Rate-Limit-Indikatoren + RATE_LIMIT_INDICATORS = [ + # HTTP Status Codes + '429', + 'rate limit', + 'rate_limit', + 'ratelimit', + # Englische Meldungen + 'too many requests', + 'too many attempts', + 'slow down', + 'try again later', + 'temporarily blocked', + 'please wait', + 'request blocked', + # Deutsche Meldungen + 'zu viele anfragen', + 'zu viele versuche', + 'später erneut versuchen', + 'vorübergehend gesperrt', + 'bitte warten', + # Plattform-spezifische Meldungen + 'challenge_required', # Instagram + 'checkpoint_required', # Instagram/Facebook + 'feedback_required', # Instagram + 'spam', # Generisch + 'suspicious activity', # Generisch + 'unusual activity', # Generisch + ] + + def __init__(self, + initial_delay: float = 60.0, + max_delay: float = 600.0, + backoff_multiplier: float = 2.0, + max_retries: int = 5, + jitter_factor: float = 0.2): + """ + Initialisiert den Rate-Limit-Handler. + + Args: + initial_delay: Anfängliche Wartezeit in Sekunden (Standard: 60s = 1 Minute) + max_delay: Maximale Wartezeit in Sekunden (Standard: 600s = 10 Minuten) + backoff_multiplier: Multiplikator für exponentielles Backoff (Standard: 2.0) + max_retries: Maximale Anzahl an Wiederholungsversuchen (Standard: 5) + jitter_factor: Faktor für zufällige Variation (Standard: 0.2 = ±20%) + """ + self.initial_delay = initial_delay + self.max_delay = max_delay + self.backoff_multiplier = backoff_multiplier + self.max_retries = max_retries + self.jitter_factor = jitter_factor + + # Status-Tracking + self.current_retry = 0 + self.last_rate_limit_time = 0 + self.total_rate_limits = 0 + self.consecutive_successes = 0 + + def is_rate_limited(self, response_text: str) -> bool: + """ + Prüft, ob eine Antwort auf ein Rate-Limit hindeutet. + + Args: + response_text: Text der Antwort (z.B. Seiteninhalt, Fehlermeldung) + + Returns: + True wenn Rate-Limit erkannt wurde, sonst False + """ + if not response_text: + return False + + response_lower = response_text.lower() + + for indicator in self.RATE_LIMIT_INDICATORS: + if indicator in response_lower: + logger.warning(f"Rate-Limit Indikator gefunden: '{indicator}'") + return True + + return False + + def calculate_delay(self, retry_count: int = None) -> float: + """ + Berechnet die Backoff-Verzögerung. + + Args: + retry_count: Aktueller Wiederholungsversuch (optional) + + Returns: + Verzögerung in Sekunden + """ + if retry_count is None: + retry_count = self.current_retry + + # Exponentielles Backoff berechnen + delay = self.initial_delay * (self.backoff_multiplier ** retry_count) + + # Jitter hinzufügen (zufällige Variation) + jitter = delay * random.uniform(-self.jitter_factor, self.jitter_factor) + delay = delay + jitter + + # Maximum nicht überschreiten + delay = min(delay, self.max_delay) + + return delay + + def handle_rate_limit(self, retry_count: int = None, + on_waiting: Optional[Callable[[float, int], None]] = None) -> float: + """ + Behandelt ein erkanntes Rate-Limit mit Backoff. + + Args: + retry_count: Aktueller Wiederholungsversuch + on_waiting: Optionaler Callback während des Wartens (delay, retry) + + Returns: + Tatsächlich gewartete Zeit in Sekunden + """ + if retry_count is None: + retry_count = self.current_retry + + delay = self.calculate_delay(retry_count) + + logger.warning( + f"Rate-Limit erkannt! Warte {delay:.1f}s " + f"(Versuch {retry_count + 1}/{self.max_retries})" + ) + + # Callback aufrufen falls vorhanden + if on_waiting: + on_waiting(delay, retry_count + 1) + + # Warten + time.sleep(delay) + + # Status aktualisieren + self.current_retry = retry_count + 1 + self.last_rate_limit_time = time.time() + self.total_rate_limits += 1 + self.consecutive_successes = 0 + + return delay + + def execute_with_backoff(self, func: Callable, *args, + on_retry: Optional[Callable[[int, Exception], None]] = None, + **kwargs) -> Any: + """ + Führt eine Funktion mit automatischem Backoff bei Rate-Limits aus. + + Args: + func: Auszuführende Funktion + *args: Positionsargumente für die Funktion + on_retry: Optionaler Callback bei Retry (retry_count, exception) + **kwargs: Keyword-Argumente für die Funktion + + Returns: + Rückgabewert der Funktion oder None bei Fehler + + Raises: + Exception: Wenn max_retries erreicht oder nicht-Rate-Limit-Fehler + """ + last_exception = None + + for attempt in range(self.max_retries): + try: + result = func(*args, **kwargs) + + # Erfolg - Reset Retry-Zähler + self.current_retry = 0 + self.consecutive_successes += 1 + + # Nach mehreren Erfolgen: Backoff-Zähler langsam reduzieren + if self.consecutive_successes >= 3: + self.total_rate_limits = max(0, self.total_rate_limits - 1) + + return result + + except Exception as e: + last_exception = e + error_str = str(e).lower() + + # Prüfe auf Rate-Limit-Indikatoren + is_rate_limit = any( + indicator in error_str + for indicator in self.RATE_LIMIT_INDICATORS + ) + + if is_rate_limit: + logger.warning(f"Rate-Limit Exception erkannt: {e}") + + if on_retry: + on_retry(attempt, e) + + self.handle_rate_limit(attempt) + else: + # Anderer Fehler - nicht durch Backoff lösbar + logger.error(f"Nicht-Rate-Limit Fehler: {e}") + raise + + # Maximum erreicht + logger.error( + f"Maximale Wiederholungsversuche ({self.max_retries}) erreicht. " + f"Letzter Fehler: {last_exception}" + ) + return None + + def should_slow_down(self) -> bool: + """ + Prüft, ob die Geschwindigkeit reduziert werden sollte. + + Basierend auf der Anzahl der kürzlichen Rate-Limits wird empfohlen, + ob zusätzliche Verzögerungen eingebaut werden sollten. + + Returns: + True wenn Verlangsamung empfohlen, sonst False + """ + # Wenn kürzlich (< 5 min) ein Rate-Limit war + time_since_last = time.time() - self.last_rate_limit_time + if time_since_last < 300 and self.last_rate_limit_time > 0: + return True + + # Wenn viele Rate-Limits insgesamt + if self.total_rate_limits >= 3: + return True + + return False + + def get_recommended_delay(self) -> float: + """ + Gibt eine empfohlene zusätzliche Verzögerung zurück. + + Basierend auf dem aktuellen Status wird eine Verzögerung empfohlen, + die zwischen Aktionen eingefügt werden sollte. + + Returns: + Empfohlene Verzögerung in Sekunden + """ + if not self.should_slow_down(): + return 0.0 + + # Basis-Verzögerung basierend auf Anzahl der Rate-Limits + base_delay = 5.0 * self.total_rate_limits + + # Zusätzliche Verzögerung wenn kürzlich Rate-Limit war + time_since_last = time.time() - self.last_rate_limit_time + if time_since_last < 300: + # Je kürzer her, desto länger warten + recency_factor = 1.0 - (time_since_last / 300) + base_delay += 10.0 * recency_factor + + return min(base_delay, 30.0) # Maximum 30 Sekunden + + def reset(self): + """Setzt den Handler auf Anfangszustand zurück.""" + self.current_retry = 0 + self.last_rate_limit_time = 0 + self.total_rate_limits = 0 + self.consecutive_successes = 0 + logger.info("Rate-Limit Handler zurückgesetzt") + + def get_status(self) -> dict: + """ + Gibt den aktuellen Status des Handlers zurück. + + Returns: + Dictionary mit Status-Informationen + """ + return { + "current_retry": self.current_retry, + "total_rate_limits": self.total_rate_limits, + "consecutive_successes": self.consecutive_successes, + "last_rate_limit_time": self.last_rate_limit_time, + "should_slow_down": self.should_slow_down(), + "recommended_delay": self.get_recommended_delay(), + } + + +# Globale Instanz für einfache Verwendung +_default_handler: Optional[RateLimitHandler] = None + + +def get_default_handler() -> RateLimitHandler: + """ + Gibt die globale Standard-Instanz des Rate-Limit-Handlers zurück. + + Returns: + RateLimitHandler-Instanz + """ + global _default_handler + if _default_handler is None: + _default_handler = RateLimitHandler() + return _default_handler + + +def handle_rate_limit(retry_count: int = None) -> float: + """ + Convenience-Funktion für Rate-Limit-Handling mit Standard-Handler. + + Args: + retry_count: Aktueller Wiederholungsversuch + + Returns: + Gewartete Zeit in Sekunden + """ + return get_default_handler().handle_rate_limit(retry_count) + + +def is_rate_limited(response_text: str) -> bool: + """ + Convenience-Funktion für Rate-Limit-Erkennung. + + Args: + response_text: Zu prüfender Text + + Returns: + True wenn Rate-Limit erkannt + """ + return get_default_handler().is_rate_limited(response_text) diff --git a/utils/theme_manager.py b/utils/theme_manager.py index 90581a9..00f6958 100644 --- a/utils/theme_manager.py +++ b/utils/theme_manager.py @@ -191,9 +191,9 @@ class ThemeManager(QObject): return os.path.join(self.base_dir, "resources", "icons", f"{icon_name}.svg") # Logo is theme-specific - if icon_name == "intelsight-logo": + if icon_name == "aegissight-logo": theme = ThemeConfig.get_theme(self.current_theme) - logo_name = theme.get('logo_path', 'intelsight-logo.svg').replace('.svg', '') + logo_name = theme.get('logo_path', 'aegissight-logo.svg').replace('.svg', '') return os.path.join(self.base_dir, "resources", "icons", f"{logo_name}.svg") # For other icons diff --git a/utils/username_generator.py b/utils/username_generator.py index bba87ac..086cce4 100644 --- a/utils/username_generator.py +++ b/utils/username_generator.py @@ -295,21 +295,22 @@ class UsernameGenerator: Generierter Benutzername """ # Verschiedene Muster für zufällige Benutzernamen + # ANTI-DETECTION: Keine verdächtigen Patterns wie "user" + Zahlen patterns = [ - # Adjektiv + Substantiv + # Adjektiv + Substantiv (z.B. "happytiger") lambda: random.choice(self.adjectives) + random.choice(self.nouns), - - # Substantiv + Zahlen - lambda: random.choice(self.nouns) + "".join(random.choices(string.digits, k=random.randint(1, 4))), - - # Adjektiv + Substantiv + Zahlen - lambda: random.choice(self.adjectives) + random.choice(self.nouns) + "".join(random.choices(string.digits, k=random.randint(1, 3))), - - # Substantiv + Unterstrich + Substantiv - lambda: random.choice(self.nouns) + ("_" if "_" in policy["allowed_chars"] else "") + random.choice(self.nouns), - - # Benutzer + Zahlen - lambda: "user" + "".join(random.choices(string.digits, k=random.randint(3, 6))) + + # Substantiv + Jahr (z.B. "eagle1995") + lambda: random.choice(self.nouns) + str(random.randint(1985, 2005)), + + # Adjektiv + Substantiv + 2 Ziffern (z.B. "coolwolf42") + lambda: random.choice(self.adjectives) + random.choice(self.nouns) + str(random.randint(10, 99)), + + # Substantiv + Unterstrich + Adjektiv (z.B. "tiger_happy") + lambda: random.choice(self.nouns) + ("_" if "_" in policy["allowed_chars"] else "") + random.choice(self.adjectives), + + # Adjektiv + Substantiv mit Punkt (z.B. "happy.tiger") - falls erlaubt + lambda: random.choice(self.adjectives) + ("." if "." in policy["allowed_chars"] else "") + random.choice(self.nouns), ] # Zufälliges Muster auswählen und Benutzernamen generieren @@ -417,49 +418,221 @@ class UsernameGenerator: policy: Optional[Dict[str, Any]] = None) -> Tuple[bool, str]: """ Überprüft, ob ein Benutzername den Richtlinien entspricht. - + Args: username: Zu überprüfender Benutzername platform: Name der Plattform policy: Optionale Richtlinie (sonst wird die der Plattform verwendet) - + Returns: (Gültigkeit, Fehlermeldung) """ # Richtlinie bestimmen if not policy: policy = self.get_platform_policy(platform) - + # Länge prüfen if len(username) < policy["min_length"]: return False, f"Benutzername ist zu kurz (mindestens {policy['min_length']} Zeichen erforderlich)" - + if len(username) > policy["max_length"]: return False, f"Benutzername ist zu lang (maximal {policy['max_length']} Zeichen erlaubt)" - + # Erlaubte Zeichen prüfen for char in username: if char not in policy["allowed_chars"]: return False, f"Unerlaubtes Zeichen: '{char}'" - + # Anfangszeichen prüfen if username[0] not in policy["allowed_start_chars"]: return False, f"Benutzername darf nicht mit '{username[0]}' beginnen" - + # Endzeichen prüfen if username[-1] not in policy["allowed_end_chars"]: return False, f"Benutzername darf nicht mit '{username[-1]}' enden" - + # Aufeinanderfolgende Sonderzeichen prüfen if not policy["allowed_consecutive_special"]: special_chars = set(policy["allowed_chars"]) - set(string.ascii_letters + string.digits) for i in range(len(username) - 1): if username[i] in special_chars and username[i+1] in special_chars: return False, "Keine aufeinanderfolgenden Sonderzeichen erlaubt" - + # Disallowed words for word in policy["disallowed_words"]: if word.lower() in username.lower(): return False, f"Der Benutzername darf '{word}' nicht enthalten" - - return True, "Benutzername ist gültig" \ No newline at end of file + + # ANTI-DETECTION: Prüfe auf verdächtige Bot-Patterns + if self._has_suspicious_pattern(username): + return False, "Benutzername enthält verdächtiges Bot-Pattern" + + return True, "Benutzername ist gültig" + + # ========================================================================== + # ANTI-DETECTION: Verdächtige Pattern-Erkennung + # ========================================================================== + + def _has_suspicious_pattern(self, username: str) -> bool: + """ + Prüft, ob ein Benutzername verdächtige Bot-Patterns enthält. + + Diese Methode erkennt Benutzernamen-Muster, die häufig von Bots + verwendet werden und daher von Plattformen leicht erkannt werden. + + Args: + username: Zu prüfender Benutzername + + Returns: + True wenn verdächtig, False wenn ok + """ + username_lower = username.lower() + + # Liste verdächtiger Patterns (Regex) + suspicious_patterns = [ + # Plattform-spezifische Bot-Prefixe + r'^fb_', # Facebook Bot-Pattern + r'^ig_', # Instagram Bot-Pattern + r'^tw_', # Twitter Bot-Pattern + r'^tt_', # TikTok Bot-Pattern + + # Offensichtliche Bot/Test-Prefixe + r'^bot_', # Offensichtlicher Bot + r'^test_', # Test-Account + r'^temp_', # Temporär + r'^fake_', # Offensichtlich fake + r'^new_', # Neu (suspekt) + r'^auto_', # Automatisierung + + # Notfall/Backup-Patterns (aus altem Code) + r'_emergency_', # Notfall-Pattern + r'_backup_', # Backup-Pattern + r'^emergency_', # Emergency am Anfang + r'^backup_', # Backup am Anfang + + # Generische Bot-Patterns + r'^user\d{4,}$', # user + 4+ Ziffern am Ende (z.B. user12345) + r'^account\d+', # account + Zahlen + r'^profile\d+', # profile + Zahlen + + # Verdächtige Zahlenfolgen + r'\d{8,}', # 8+ aufeinanderfolgende Ziffern + r'^[a-z]{1,2}\d{6,}$', # 1-2 Buchstaben + 6+ Ziffern + + # Timestamp-basierte Patterns + r'\d{10,}', # Unix-Timestamp-ähnlich (10+ Ziffern) + r'_\d{13}_', # Millisekunden-Timestamp in der Mitte + + # Generische Suffixe die auf Bots hindeuten + r'_gen$', # Generator-Suffix + r'_bot$', # Bot-Suffix + r'_auto$', # Auto-Suffix + r'_spam$', # Spam-Suffix + ] + + for pattern in suspicious_patterns: + if re.search(pattern, username_lower): + logger.debug(f"Verdächtiges Pattern gefunden: {pattern} in '{username}'") + return True + + # Zusätzliche Heuristiken + + # Prüfe auf zu viele Unterstriche (>2 ist verdächtig) + if username_lower.count('_') > 2: + logger.debug(f"Zu viele Unterstriche in '{username}'") + return True + + # Prüfe auf repetitive Zeichen (z.B. "aaaa" oder "1111") + for i in range(len(username_lower) - 3): + if username_lower[i] == username_lower[i+1] == username_lower[i+2] == username_lower[i+3]: + logger.debug(f"Repetitive Zeichen in '{username}'") + return True + + return False + + def generate_realistic_username(self, first_name: str = "", last_name: str = "", + platform: str = "default") -> str: + """ + Generiert einen realistischen Benutzernamen ohne verdächtige Patterns. + + Diese Methode ist speziell für Anti-Detection optimiert und generiert + Benutzernamen, die wie echte menschliche Benutzernamen aussehen. + + Args: + first_name: Vorname (optional) + last_name: Nachname (optional) + platform: Zielplattform + + Returns: + Realistischer Benutzername + """ + policy = self.get_platform_policy(platform) + + # Realistische Patterns (wie echte Menschen sie wählen) + realistic_patterns = [] + + if first_name: + first_name_clean = re.sub(r'[^a-z]', '', first_name.lower()) + + # Pattern 1: vorname + Geburtsjahr (z.B. "max1995") + realistic_patterns.append( + lambda fn=first_name_clean: f"{fn}{random.randint(1985, 2005)}" + ) + + # Pattern 2: vorname + Nachname-Initial + 2 Ziffern (z.B. "maxm92") + if last_name: + last_initial = last_name[0].lower() if last_name else '' + realistic_patterns.append( + lambda fn=first_name_clean, li=last_initial: f"{fn}{li}{random.randint(10, 99)}" + ) + + # Pattern 3: vorname.nachname (z.B. "max.mustermann") + if last_name: + last_name_clean = re.sub(r'[^a-z]', '', last_name.lower()) + realistic_patterns.append( + lambda fn=first_name_clean, ln=last_name_clean: f"{fn}.{ln}" + ) + + # Pattern 4: vorname_adjektiv (z.B. "max_sunny") + realistic_patterns.append( + lambda fn=first_name_clean: f"{fn}_{random.choice(self.adjectives)}" + ) + + # Pattern 5: adjektiv_vorname_jahr (z.B. "sunny_max_93") + realistic_patterns.append( + lambda fn=first_name_clean: f"{random.choice(self.adjectives)}_{fn}_{random.randint(85, 99)}" + ) + + # Fallback-Patterns ohne Namen + realistic_patterns.extend([ + # adjektiv + tier (z.B. "happytiger") + lambda: f"{random.choice(self.adjectives)}{random.choice(self.nouns)}", + + # adjektiv + tier + 2 Ziffern (z.B. "coolwolf42") + lambda: f"{random.choice(self.adjectives)}{random.choice(self.nouns)}{random.randint(10, 99)}", + + # tier + jahr (z.B. "eagle1995") + lambda: f"{random.choice(self.nouns)}{random.randint(1985, 2005)}", + ]) + + # Versuche bis zu 20 mal einen gültigen, nicht-verdächtigen Namen zu generieren + for _ in range(20): + pattern_func = random.choice(realistic_patterns) + username = pattern_func() + + # Länge anpassen + if len(username) > policy["max_length"]: + username = username[:policy["max_length"]] + if len(username) < policy["min_length"]: + username += str(random.randint(10, 99)) + + # Validieren (inkl. Pattern-Check) + valid, _ = self.validate_username(username, policy=policy) + if valid: + logger.info(f"Realistischer Benutzername generiert: {username}") + return username + + # Absoluter Fallback + fallback = f"{random.choice(self.adjectives)}{random.choice(self.nouns)}{random.randint(10, 99)}" + logger.warning(f"Fallback-Benutzername verwendet: {fallback}") + return fallback \ No newline at end of file diff --git a/views/about_dialog.py b/views/about_dialog.py index d438241..9ceead8 100644 --- a/views/about_dialog.py +++ b/views/about_dialog.py @@ -41,12 +41,12 @@ class AboutDialog(QDialog): # Get the theme-aware logo path if self.theme_manager: # Use theme manager to get correct logo based on current theme - logo_path = self.theme_manager.get_icon_path("intelsight-logo") + logo_path = self.theme_manager.get_icon_path("aegissight-logo") else: # Fallback to light logo if no theme manager current_dir = os.path.dirname(os.path.abspath(__file__)) parent_dir = os.path.dirname(current_dir) - logo_path = os.path.join(parent_dir, "resources", "icons", "intelsight-logo.svg") + logo_path = os.path.join(parent_dir, "resources", "icons", "aegissight-logo.svg") if os.path.exists(logo_path): # Load logo and display it at a smaller size for corner placement @@ -61,7 +61,7 @@ class AboutDialog(QDialog): logo_label.setFixedSize(scaled_pixmap.size()) else: # Fallback if logo not found - logo_label.setText("IntelSight") + logo_label.setText("AegisSight") logo_label.setStyleSheet("font-size: 18px; font-weight: bold;") # Logo in top-left corner @@ -111,7 +111,7 @@ class AboutDialog(QDialog): lines = [ f"

{title}

", f"

{version_text}

", - "

© 2025 IntelSight UG (haftungsbeschränkt)

", + "

© 2025 AegisSight UG (haftungsbeschränkt)

", f"

{support}

", f"

{license_text}

", ] diff --git a/views/main_window.py b/views/main_window.py index 3e6a1e9..5373ac1 100644 --- a/views/main_window.py +++ b/views/main_window.py @@ -156,11 +156,11 @@ class MainWindow(QMainWindow): # Get the correct logo based on current theme if self.theme_manager: - logo_path = self.theme_manager.get_icon_path("intelsight-logo") + logo_path = self.theme_manager.get_icon_path("aegissight-logo") else: # Fallback if no theme manager logo_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), - "resources", "icons", "intelsight-logo.svg") + "resources", "icons", "aegissight-logo.svg") self.logo_widget.setIcon(QIcon(logo_path)) self.logo_widget.setIconSize(QSize(120, 40)) @@ -322,7 +322,7 @@ class MainWindow(QMainWindow): if hasattr(self, 'logo_widget') and self.logo_widget and self.theme_manager: # Get the new logo path from theme manager based on current theme current_theme = self.theme_manager.get_current_theme() - logo_path = self.theme_manager.get_icon_path("intelsight-logo") + logo_path = self.theme_manager.get_icon_path("aegissight-logo") print(f"DEBUG: Updating logo for theme '{current_theme}'") print(f"DEBUG: Logo path: {logo_path}")