Initial commit
Dieser Commit ist enthalten in:
9
.claude/settings.local.json
Normale Datei
9
.claude/settings.local.json
Normale Datei
@ -0,0 +1,9 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(curl:*)",
|
||||
"Bash(nslookup:*)"
|
||||
],
|
||||
"deny": []
|
||||
}
|
||||
}
|
||||
137
.gitignore
vendored
Normale Datei
137
.gitignore
vendored
Normale Datei
@ -0,0 +1,137 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
Pipfile.lock
|
||||
|
||||
# PEP 582
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
logs/
|
||||
*.log
|
||||
|
||||
# Screenshots (if they're temporary)
|
||||
screenshots/temp/
|
||||
372
CLAUDE_PROJECT_README.md
Normale Datei
372
CLAUDE_PROJECT_README.md
Normale Datei
@ -0,0 +1,372 @@
|
||||
# AccountForger
|
||||
|
||||
*This README was automatically generated by Claude Project Manager*
|
||||
|
||||
## Project Overview
|
||||
|
||||
- **Path**: `A:\GiTea\AccountForger`
|
||||
- **Files**: 891 files
|
||||
- **Size**: 354.0 MB
|
||||
- **Last Modified**: 2025-08-01 20:51
|
||||
|
||||
## Technology Stack
|
||||
|
||||
### Languages
|
||||
- Python
|
||||
|
||||
### Frameworks & Libraries
|
||||
- React
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
check_rotation_system.py
|
||||
CLAUDE_PROJECT_README.md
|
||||
debug_video_issue.py
|
||||
install_requirements.py
|
||||
main.py
|
||||
package.json
|
||||
README.md
|
||||
requirements.txt
|
||||
run_migration.py
|
||||
application/
|
||||
│ ├── __init__.py
|
||||
│ ├── services/
|
||||
│ │ ├── error_handler.py
|
||||
│ │ └── __init__.py
|
||||
│ └── use_cases/
|
||||
│ ├── adaptive_rate_limit_use_case.py
|
||||
│ ├── analyze_failure_rate_use_case.py
|
||||
│ ├── detect_rate_limit_use_case.py
|
||||
│ ├── export_accounts_use_case.py
|
||||
│ ├── generate_account_fingerprint_use_case.py
|
||||
│ ├── generate_reports_use_case.py
|
||||
│ ├── log_account_creation_use_case.py
|
||||
│ ├── method_rotation_use_case.py
|
||||
│ ├── one_click_login_use_case.py
|
||||
│ └── __init__.py
|
||||
browser/
|
||||
│ ├── cookie_consent_handler.py
|
||||
│ ├── fingerprint_protection.py
|
||||
│ ├── instagram_video_bypass.py
|
||||
│ ├── playwright_extensions.py
|
||||
│ ├── playwright_manager.py
|
||||
│ ├── stealth_config.py
|
||||
│ ├── video_stealth_enhancement.py
|
||||
│ └── __init__.py
|
||||
config/
|
||||
│ ├── app_version.json
|
||||
│ ├── browser_config.json
|
||||
│ ├── email_config.json
|
||||
│ ├── facebook_config.json
|
||||
│ ├── implementation_switch.py
|
||||
│ ├── instagram_config.json
|
||||
│ └── license.json
|
||||
controllers/
|
||||
│ ├── account_controller.py
|
||||
│ ├── main_controller.py
|
||||
│ ├── session_controller.py
|
||||
│ ├── settings_controller.py
|
||||
│ └── platform_controllers/
|
||||
│ ├── base_controller.py
|
||||
│ ├── base_worker_thread.py
|
||||
│ ├── gmail_controller.py
|
||||
│ ├── instagram_controller.py
|
||||
│ ├── method_rotation_mixin.py
|
||||
│ ├── method_rotation_worker_mixin.py
|
||||
│ ├── ok_ru_controller.py
|
||||
│ ├── rotation_error_handler.py
|
||||
│ ├── safe_imports.py
|
||||
│ └── tiktok_controller.py
|
||||
database/
|
||||
│ ├── accounts.db
|
||||
│ ├── account_repository.py
|
||||
│ ├── db_manager.py
|
||||
│ ├── schema_v2.sql
|
||||
│ ├── __init__.py
|
||||
│ └── migrations/
|
||||
│ ├── add_browser_storage_columns.sql
|
||||
│ ├── add_fingerprint_persistence.sql
|
||||
│ ├── add_fingerprint_support.sql
|
||||
│ ├── add_method_rotation_system.sql
|
||||
│ └── remove_unused_fingerprint_columns.sql
|
||||
docs/
|
||||
│ └── CLEAN_ARCHITECTURE.md
|
||||
domain/
|
||||
│ ├── exceptions.py
|
||||
│ ├── __init__.py
|
||||
│ ├── entities/
|
||||
│ │ ├── account_creation_event.py
|
||||
│ │ ├── browser_fingerprint.py
|
||||
│ │ ├── error_event.py
|
||||
│ │ ├── method_rotation.py
|
||||
│ │ ├── rate_limit_policy.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── repositories/
|
||||
│ │ ├── analytics_repository.py
|
||||
│ │ ├── fingerprint_repository.py
|
||||
│ │ ├── method_rotation_repository.py
|
||||
│ │ ├── rate_limit_repository.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── services/
|
||||
│ │ ├── analytics_service.py
|
||||
│ │ ├── fingerprint_service.py
|
||||
│ │ ├── rate_limit_service.py
|
||||
│ │ └── __init__.py
|
||||
│ └── value_objects/
|
||||
│ ├── account_creation_params.py
|
||||
│ ├── action_timing.py
|
||||
│ ├── browser_protection_style.py
|
||||
│ ├── error_summary.py
|
||||
│ ├── login_credentials.py
|
||||
│ ├── operation_result.py
|
||||
│ ├── report.py
|
||||
│ └── __init__.py
|
||||
infrastructure/
|
||||
│ ├── __init__.py
|
||||
│ ├── repositories/
|
||||
│ │ ├── account_repository.py
|
||||
│ │ ├── analytics_repository.py
|
||||
│ │ ├── base_repository.py
|
||||
│ │ ├── fingerprint_repository.py
|
||||
│ │ ├── method_strategy_repository.py
|
||||
│ │ ├── platform_method_state_repository.py
|
||||
│ │ ├── rate_limit_repository.py
|
||||
│ │ ├── rotation_session_repository.py
|
||||
│ │ └── __init__.py
|
||||
│ └── services/
|
||||
│ ├── advanced_fingerprint_service.py
|
||||
│ ├── browser_protection_service.py
|
||||
│ ├── fingerprint_cache_service.py
|
||||
│ ├── instagram_rate_limit_service.py
|
||||
│ ├── structured_analytics_service.py
|
||||
│ ├── __init__.py
|
||||
│ └── fingerprint/
|
||||
│ ├── account_fingerprint_service.py
|
||||
│ ├── browser_injection_service.py
|
||||
│ ├── fingerprint_generator_service.py
|
||||
│ ├── fingerprint_persistence_service.py
|
||||
│ ├── fingerprint_profile_service.py
|
||||
│ ├── fingerprint_rotation_service.py
|
||||
│ ├── fingerprint_validation_service.py
|
||||
│ ├── timezone_location_service.py
|
||||
│ └── __init__.py
|
||||
licensing/
|
||||
│ ├── api_client.py
|
||||
│ ├── hardware_fingerprint.py
|
||||
│ ├── license_manager.py
|
||||
│ ├── license_validator.py
|
||||
│ ├── session_manager.py
|
||||
│ └── __init__.py
|
||||
localization/
|
||||
│ ├── language_manager.py
|
||||
│ ├── __init__.py
|
||||
│ └── languages/
|
||||
│ ├── de.json
|
||||
│ ├── en.json
|
||||
│ ├── es.json
|
||||
│ ├── fr.json
|
||||
│ └── ja.json
|
||||
logs/
|
||||
│ ├── instagram_automation.log
|
||||
│ ├── instagram_controller.log
|
||||
│ ├── instagram_login.log
|
||||
│ ├── instagram_registration.log
|
||||
│ ├── instagram_ui_helper.log
|
||||
│ ├── instagram_utils.log
|
||||
│ ├── instagram_verification.log
|
||||
│ ├── instagram_workflow.log
|
||||
│ ├── main.log
|
||||
│ └── screenshots/
|
||||
│ ├── after_account_create_click_1753044575.png
|
||||
│ ├── after_account_create_click_1753044886.png
|
||||
│ ├── after_account_create_click_1753045178.png
|
||||
│ ├── after_account_create_click_1753045715.png
|
||||
│ ├── after_account_create_click_1753045915.png
|
||||
│ ├── after_account_create_click_1753046167.png
|
||||
│ ├── after_account_create_click_1753046976.png
|
||||
│ ├── after_account_create_click_1753047240.png
|
||||
│ ├── after_account_create_click_1753047386.png
|
||||
│ └── after_account_create_click_1753048280.png
|
||||
resources/
|
||||
│ ├── icons/
|
||||
│ │ ├── check-white.svg
|
||||
│ │ ├── check.svg
|
||||
│ │ ├── de.svg
|
||||
│ │ ├── en.svg
|
||||
│ │ ├── es.svg
|
||||
│ │ ├── facebook.svg
|
||||
│ │ ├── fr.svg
|
||||
│ │ ├── gmail.svg
|
||||
│ │ ├── instagram.svg
|
||||
│ │ └── intelsight-logo.svg
|
||||
│ └── themes/
|
||||
│ ├── dark.qss
|
||||
│ └── light.qss
|
||||
screenshots
|
||||
social_networks/
|
||||
│ ├── base_automation.py
|
||||
│ ├── __init__.py
|
||||
│ ├── facebook/
|
||||
│ │ ├── facebook_automation.py
|
||||
│ │ ├── facebook_login.py
|
||||
│ │ ├── facebook_registration.py
|
||||
│ │ ├── facebook_selectors.py
|
||||
│ │ ├── facebook_ui_helper.py
|
||||
│ │ ├── facebook_utils.py
|
||||
│ │ ├── facebook_verification.py
|
||||
│ │ ├── facebook_workflow.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── gmail/
|
||||
│ │ ├── gmail_automation.py
|
||||
│ │ ├── gmail_login.py
|
||||
│ │ ├── gmail_registration.py
|
||||
│ │ ├── gmail_selectors.py
|
||||
│ │ ├── gmail_ui_helper.py
|
||||
│ │ ├── gmail_utils.py
|
||||
│ │ ├── gmail_verification.py
|
||||
│ │ ├── gmail_workflow.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── instagram/
|
||||
│ │ ├── instagram_automation.py
|
||||
│ │ ├── instagram_login.py
|
||||
│ │ ├── instagram_registration.py
|
||||
│ │ ├── instagram_selectors.py
|
||||
│ │ ├── instagram_ui_helper.py
|
||||
│ │ ├── instagram_utils.py
|
||||
│ │ ├── instagram_verification.py
|
||||
│ │ ├── instagram_workflow.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── ok_ru/
|
||||
│ │ ├── ok_ru_automation.py
|
||||
│ │ ├── ok_ru_login.py
|
||||
│ │ ├── ok_ru_registration.py
|
||||
│ │ ├── ok_ru_selectors.py
|
||||
│ │ ├── ok_ru_ui_helper.py
|
||||
│ │ ├── ok_ru_utils.py
|
||||
│ │ ├── ok_ru_verification.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── tiktok/
|
||||
│ │ ├── tiktok_automation.py
|
||||
│ │ ├── tiktok_login.py
|
||||
│ │ ├── tiktok_registration.py
|
||||
│ │ ├── tiktok_registration_backup.py
|
||||
│ │ ├── tiktok_registration_final.py
|
||||
│ │ ├── tiktok_registration_new.py
|
||||
│ │ ├── tiktok_selectors.py
|
||||
│ │ ├── tiktok_ui_helper.py
|
||||
│ │ ├── tiktok_utils.py
|
||||
│ │ └── tiktok_verification.py
|
||||
│ ├── twitter/
|
||||
│ │ ├── twitter_automation.py
|
||||
│ │ ├── twitter_login.py
|
||||
│ │ ├── twitter_registration.py
|
||||
│ │ ├── twitter_selectors.py
|
||||
│ │ ├── twitter_ui_helper.py
|
||||
│ │ ├── twitter_utils.py
|
||||
│ │ ├── twitter_verification.py
|
||||
│ │ ├── twitter_workflow.py
|
||||
│ │ └── __init__.py
|
||||
│ ├── vk/
|
||||
│ │ ├── vk_automation.py
|
||||
│ │ ├── vk_login.py
|
||||
│ │ ├── vk_registration.py
|
||||
│ │ ├── vk_selectors.py
|
||||
│ │ ├── vk_ui_helper.py
|
||||
│ │ ├── vk_utils.py
|
||||
│ │ ├── vk_verification.py
|
||||
│ │ ├── vk_workflow.py
|
||||
│ │ └── __init__.py
|
||||
│ └── x/
|
||||
│ ├── x_automation.py
|
||||
│ ├── x_login.py
|
||||
│ ├── x_registration.py
|
||||
│ ├── x_selectors.py
|
||||
│ ├── x_ui_helper.py
|
||||
│ ├── x_utils.py
|
||||
│ ├── x_verification.py
|
||||
│ ├── x_workflow.py
|
||||
│ └── __init__.py
|
||||
styles/
|
||||
│ ├── modal_styles.py
|
||||
│ └── __init__.py
|
||||
tests/
|
||||
│ └── test_method_rotation.py
|
||||
updates/
|
||||
│ ├── downloader.py
|
||||
│ ├── update_checker.py
|
||||
│ ├── update_v1.1.0.zip
|
||||
│ ├── version.py
|
||||
│ └── __init__.py
|
||||
utils/
|
||||
│ ├── birthday_generator.py
|
||||
│ ├── email_handler.py
|
||||
│ ├── human_behavior.py
|
||||
│ ├── logger.py
|
||||
│ ├── modal_manager.py
|
||||
│ ├── modal_test.py
|
||||
│ ├── password_generator.py
|
||||
│ ├── performance_monitor.py
|
||||
│ ├── proxy_rotator.py
|
||||
│ └── result_decorators.py
|
||||
views/
|
||||
├── about_dialog.py
|
||||
├── main_window.py
|
||||
├── platform_selector.py
|
||||
├── components/
|
||||
│ ├── accounts_overview_view.py
|
||||
│ ├── platform_grid_view.py
|
||||
│ ├── tab_navigation.py
|
||||
│ └── __init__.py
|
||||
├── dialogs/
|
||||
│ ├── account_creation_result_dialog.py
|
||||
│ ├── license_activation_dialog.py
|
||||
│ └── __init__.py
|
||||
├── tabs/
|
||||
│ ├── accounts_tab.py
|
||||
│ ├── generator_tab.py
|
||||
│ ├── generator_tab_modern.py
|
||||
│ └── settings_tab.py
|
||||
└── widgets/
|
||||
├── account_card.py
|
||||
├── account_creation_modal.py
|
||||
├── account_creation_modal_v2.py
|
||||
├── forge_animation_widget.py
|
||||
├── forge_animation_widget_v2.py
|
||||
├── icon_factory.py
|
||||
├── language_dropdown.py
|
||||
├── login_process_modal.py
|
||||
├── modern_message_box.py
|
||||
└── platform_button.py
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `package.json`
|
||||
- `README.md`
|
||||
- `requirements.txt`
|
||||
|
||||
## Claude Integration
|
||||
|
||||
This project is managed with Claude Project Manager. To work with this project:
|
||||
|
||||
1. Open Claude Project Manager
|
||||
2. Click on this project's tile
|
||||
3. Claude will open in the project directory
|
||||
|
||||
## Notes
|
||||
|
||||
*Add your project-specific notes here*
|
||||
|
||||
---
|
||||
|
||||
## Development Log
|
||||
|
||||
- README generated on 2025-07-27 11:07:01
|
||||
- README updated on 2025-07-28 18:14:33
|
||||
- README updated on 2025-07-29 19:24:34
|
||||
- README updated on 2025-07-31 00:00:41
|
||||
- README updated on 2025-08-01 19:02:35
|
||||
- README updated on 2025-08-01 20:50:22
|
||||
- README updated on 2025-08-01 20:51:41
|
||||
- README updated on 2025-08-01 21:06:44
|
||||
130
README.md
Normale Datei
130
README.md
Normale Datei
@ -0,0 +1,130 @@
|
||||
# Social Media Account Generator
|
||||
|
||||
Dieses Repository enthält eine Desktopanwendung zur automatisierten Erstellung und Verwaltung von Social‑Media‑Accounts. Die grafische Oberfläche basiert auf **PyQt5**, die Browser‑Automatisierung erfolgt mit **Playwright**. Der Code ist modular aufgebaut und kann leicht um weitere Plattformen erweitert werden.
|
||||
|
||||
## Installation
|
||||
|
||||
1. Python 3.8 oder neuer installieren.
|
||||
2. Abhängigkeiten mit `pip install -r requirements.txt` einrichten.
|
||||
|
||||
## Anwendung starten
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
Beim ersten Start werden benötigte Ordner wie `logs`, `config` und `resources` automatisch angelegt. Einstellungen können im Ordner `config` angepasst werden.
|
||||
|
||||
## Projektstruktur (Auszug)
|
||||
|
||||
```text
|
||||
.
|
||||
├── main.py
|
||||
├── browser/
|
||||
│ ├── playwright_manager.py
|
||||
│ └── stealth_config.py
|
||||
├── controllers/
|
||||
│ ├── main_controller.py
|
||||
│ ├── account_controller.py
|
||||
│ ├── settings_controller.py
|
||||
│ └── platform_controllers/
|
||||
│ ├── base_controller.py
|
||||
│ ├── instagram_controller.py
|
||||
│ └── tiktok_controller.py
|
||||
├── views/
|
||||
│ ├── main_window.py
|
||||
│ ├── platform_selector.py
|
||||
│ ├── about_dialog.py
|
||||
│ ├── widgets/
|
||||
│ │ └── platform_button.py
|
||||
│ └── tabs/
|
||||
│ ├── generator_tab.py
|
||||
│ ├── accounts_tab.py
|
||||
│ └── settings_tab.py
|
||||
├── social_networks/
|
||||
│ ├── base_automation.py
|
||||
│ ├── instagram/
|
||||
│ │ └── ...
|
||||
│ ├── tiktok/
|
||||
│ │ └── ...
|
||||
│ ├── facebook/
|
||||
│ │ └── ...
|
||||
│ └── twitter/
|
||||
│ └── ...
|
||||
├── localization/
|
||||
│ ├── language_manager.py
|
||||
│ └── languages/
|
||||
│ ├── de.json
|
||||
│ ├── en.json
|
||||
│ ├── es.json
|
||||
│ ├── fr.json
|
||||
│ └── ja.json
|
||||
├── utils/
|
||||
│ ├── logger.py
|
||||
│ ├── password_generator.py
|
||||
│ ├── username_generator.py
|
||||
│ ├── birthday_generator.py
|
||||
│ ├── email_handler.py
|
||||
│ ├── proxy_rotator.py
|
||||
│ ├── human_behavior.py
|
||||
│ ├── text_similarity.py
|
||||
│ └── theme_manager.py
|
||||
├── database/
|
||||
│ ├── db_manager.py
|
||||
│ └── ...
|
||||
├── licensing/
|
||||
│ ├── license_manager.py
|
||||
│ ├── hardware_fingerprint.py
|
||||
│ └── license_validator.py
|
||||
├── updates/
|
||||
│ ├── update_checker.py
|
||||
│ ├── downloader.py
|
||||
│ ├── version.py
|
||||
│ └── ...
|
||||
├── config/
|
||||
│ ├── browser_config.json
|
||||
│ ├── email_config.json
|
||||
│ ├── proxy_config.json
|
||||
│ ├── stealth_config.json
|
||||
│ ├── license_config.json
|
||||
│ ├── instagram_config.json
|
||||
│ ├── facebook_config.json
|
||||
│ ├── twitter_config.json
|
||||
│ ├── tiktok_config.json
|
||||
│ ├── theme.json
|
||||
│ ├── app_version.json
|
||||
│ └── update_config.json
|
||||
├── resources/
|
||||
│ ├── icons/
|
||||
│ │ ├── instagram.svg
|
||||
│ │ ├── facebook.svg
|
||||
│ │ ├── twitter.svg
|
||||
│ │ ├── tiktok.svg
|
||||
│ │ └── vk.svg
|
||||
│ └── themes/
|
||||
│ ├── light.qss
|
||||
│ └── dark.qss
|
||||
├── testcases/
|
||||
│ └── imap_test.py
|
||||
├── requirements.txt
|
||||
└── README.md
|
||||
```
|
||||
|
||||
Weitere Ordner:
|
||||
|
||||
- `logs/` – Protokolldateien und Screenshots
|
||||
- `resources/` – Icons und Theme‑Dateien
|
||||
- `updates/` – heruntergeladene Updates
|
||||
|
||||
## Lokalisierung
|
||||
|
||||
Im Ordner `localization/languages` befinden sich Übersetzungsdateien für Deutsch, Englisch, Spanisch, Französisch und Japanisch. Die aktuelle Sprache kann zur Laufzeit gewechselt werden.
|
||||
|
||||
## Lizenz und Updates
|
||||
|
||||
Die Ordner `licensing` und `updates` enthalten die Logik zur Lizenzprüfung und zum Update‑Management. Versionsinformationen werden in `updates/version.py` verwaltet.
|
||||
|
||||
## Tests
|
||||
|
||||
Im Ordner `testcases` liegt beispielhaft `imap_test.py`, mit dem die IMAP‑Konfiguration getestet werden kann.
|
||||
|
||||
3
application/__init__.py
Normale Datei
3
application/__init__.py
Normale Datei
@ -0,0 +1,3 @@
|
||||
"""
|
||||
Application Layer - Use Cases und Application Services
|
||||
"""
|
||||
0
application/services/__init__.py
Normale Datei
0
application/services/__init__.py
Normale Datei
169
application/services/error_handler.py
Normale Datei
169
application/services/error_handler.py
Normale Datei
@ -0,0 +1,169 @@
|
||||
"""
|
||||
Zentralisiertes Error Handling für AccountForger
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
from domain.exceptions import (
|
||||
AccountCreationException,
|
||||
RateLimitException,
|
||||
CaptchaRequiredException,
|
||||
ValidationException,
|
||||
ProxyException,
|
||||
NetworkException,
|
||||
AccountForgerException
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorResult:
|
||||
"""Strukturiertes Fehler-Ergebnis"""
|
||||
user_message: str
|
||||
technical_details: str
|
||||
recovery_suggestion: Optional[str] = None
|
||||
error_type: Optional[str] = None
|
||||
retry_possible: bool = False
|
||||
retry_after: Optional[int] = None
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
"""Zentralisiertes Error Handling"""
|
||||
|
||||
def __init__(self, logger: Optional[logging.Logger] = None):
|
||||
self.logger = logger or logging.getLogger(__name__)
|
||||
|
||||
def handle_account_creation_error(self, error: Exception, context: Dict[str, Any]) -> ErrorResult:
|
||||
"""Behandelt Account-Erstellungsfehler einheitlich"""
|
||||
self.logger.error(f"Account creation failed: {error}", extra=context)
|
||||
|
||||
# Spezifische Exception-Typen behandeln
|
||||
if isinstance(error, RateLimitException):
|
||||
return ErrorResult(
|
||||
user_message=error.user_friendly_message,
|
||||
technical_details=str(error),
|
||||
recovery_suggestion=error.recovery_suggestion,
|
||||
error_type="rate_limit",
|
||||
retry_possible=True,
|
||||
retry_after=error.retry_after
|
||||
)
|
||||
|
||||
elif isinstance(error, CaptchaRequiredException):
|
||||
return ErrorResult(
|
||||
user_message=error.user_friendly_message,
|
||||
technical_details=str(error),
|
||||
recovery_suggestion=error.recovery_suggestion,
|
||||
error_type="captcha",
|
||||
retry_possible=False
|
||||
)
|
||||
|
||||
elif isinstance(error, ValidationException):
|
||||
return ErrorResult(
|
||||
user_message=f"Eingabefehler: {error.message}",
|
||||
technical_details=str(error),
|
||||
recovery_suggestion="Bitte überprüfen Sie Ihre Eingaben",
|
||||
error_type="validation",
|
||||
retry_possible=False
|
||||
)
|
||||
|
||||
elif isinstance(error, ProxyException):
|
||||
return ErrorResult(
|
||||
user_message="Proxy-Verbindungsfehler",
|
||||
technical_details=str(error),
|
||||
recovery_suggestion="Überprüfen Sie Ihre Proxy-Einstellungen",
|
||||
error_type="proxy",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
elif isinstance(error, NetworkException):
|
||||
return ErrorResult(
|
||||
user_message="Netzwerkverbindungsfehler",
|
||||
technical_details=str(error),
|
||||
recovery_suggestion=error.details.get("recovery_suggestion", "Überprüfen Sie Ihre Internetverbindung"),
|
||||
error_type="network",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
elif isinstance(error, AccountCreationException):
|
||||
return ErrorResult(
|
||||
user_message=error.user_friendly_message,
|
||||
technical_details=str(error),
|
||||
recovery_suggestion=error.recovery_suggestion,
|
||||
error_type=error.error_type,
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
# Generische Fehler
|
||||
else:
|
||||
return ErrorResult(
|
||||
user_message="Ein unerwarteter Fehler ist aufgetreten",
|
||||
technical_details=str(error),
|
||||
recovery_suggestion="Bitte versuchen Sie es später erneut",
|
||||
error_type="unknown",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
def interpret_error_message(self, error_msg: str, platform: str) -> ErrorResult:
|
||||
"""Interpretiert String-Fehlermeldungen und gibt strukturierte Ergebnisse zurück"""
|
||||
error_lower = error_msg.lower()
|
||||
|
||||
# Rate Limit Patterns
|
||||
if any(pattern in error_lower for pattern in ["rate limit", "too many", "zu viele"]):
|
||||
return ErrorResult(
|
||||
user_message="Zu viele Versuche - bitte später erneut versuchen",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion="Warten Sie 5-10 Minuten vor dem nächsten Versuch",
|
||||
error_type="rate_limit",
|
||||
retry_possible=True,
|
||||
retry_after=300 # 5 Minuten
|
||||
)
|
||||
|
||||
# Captcha Patterns
|
||||
elif any(pattern in error_lower for pattern in ["captcha", "verification required", "verifizierung erforderlich"]):
|
||||
return ErrorResult(
|
||||
user_message=f"{platform} erfordert eine Captcha-Verifizierung",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion="Nutzen Sie einen anderen Proxy oder versuchen Sie es später",
|
||||
error_type="captcha",
|
||||
retry_possible=False
|
||||
)
|
||||
|
||||
# Username Patterns
|
||||
elif any(pattern in error_lower for pattern in ["username", "benutzername", "already taken", "bereits vergeben"]):
|
||||
return ErrorResult(
|
||||
user_message="Der gewählte Benutzername ist nicht verfügbar",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion="Versuchen Sie einen anderen Benutzernamen",
|
||||
error_type="username_taken",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
# Password Patterns
|
||||
elif any(pattern in error_lower for pattern in ["password", "passwort", "weak", "schwach"]):
|
||||
return ErrorResult(
|
||||
user_message="Das Passwort erfüllt nicht die Anforderungen",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion=f"Verwenden Sie ein stärkeres Passwort mit Groß-/Kleinbuchstaben, Zahlen und Sonderzeichen",
|
||||
error_type="weak_password",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
# Network Patterns
|
||||
elif any(pattern in error_lower for pattern in ["network", "netzwerk", "connection", "verbindung", "timeout"]):
|
||||
return ErrorResult(
|
||||
user_message="Netzwerkverbindungsfehler",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion="Überprüfen Sie Ihre Internetverbindung",
|
||||
error_type="network",
|
||||
retry_possible=True
|
||||
)
|
||||
|
||||
# Default
|
||||
else:
|
||||
return ErrorResult(
|
||||
user_message=f"Fehler bei der Registrierung: {error_msg}",
|
||||
technical_details=error_msg,
|
||||
recovery_suggestion="Überprüfen Sie Ihre Eingaben und versuchen Sie es erneut",
|
||||
error_type="unknown",
|
||||
retry_possible=True
|
||||
)
|
||||
28
application/use_cases/__init__.py
Normale Datei
28
application/use_cases/__init__.py
Normale Datei
@ -0,0 +1,28 @@
|
||||
"""
|
||||
Application Use Cases - Geschäftslogik-Orchestrierung
|
||||
"""
|
||||
|
||||
# Rate Limiting Use Cases
|
||||
from .adaptive_rate_limit_use_case import AdaptiveRateLimitUseCase
|
||||
from .detect_rate_limit_use_case import DetectRateLimitUseCase
|
||||
|
||||
# Analytics Use Cases
|
||||
from .log_account_creation_use_case import LogAccountCreationUseCase
|
||||
from .analyze_failure_rate_use_case import AnalyzeFailureRateUseCase
|
||||
from .generate_reports_use_case import GenerateReportsUseCase
|
||||
|
||||
# Export Use Cases
|
||||
from .export_accounts_use_case import ExportAccountsUseCase
|
||||
|
||||
# Login Use Cases
|
||||
from .one_click_login_use_case import OneClickLoginUseCase
|
||||
|
||||
__all__ = [
|
||||
'AdaptiveRateLimitUseCase',
|
||||
'DetectRateLimitUseCase',
|
||||
'LogAccountCreationUseCase',
|
||||
'AnalyzeFailureRateUseCase',
|
||||
'GenerateReportsUseCase',
|
||||
'ExportAccountsUseCase',
|
||||
'OneClickLoginUseCase'
|
||||
]
|
||||
221
application/use_cases/adaptive_rate_limit_use_case.py
Normale Datei
221
application/use_cases/adaptive_rate_limit_use_case.py
Normale Datei
@ -0,0 +1,221 @@
|
||||
"""
|
||||
Adaptive Rate Limit Use Case - Passt Geschwindigkeit dynamisch an
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.services.rate_limit_service import IRateLimitService
|
||||
from domain.value_objects.action_timing import ActionTiming, ActionType
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
|
||||
logger = logging.getLogger("adaptive_rate_limit_use_case")
|
||||
|
||||
|
||||
class AdaptiveRateLimitUseCase:
|
||||
"""
|
||||
Use Case für adaptive Geschwindigkeitsanpassung basierend auf Systemverhalten.
|
||||
Analysiert Response-Zeiten, passt Delays dynamisch an und erkennt Anomalien.
|
||||
"""
|
||||
|
||||
def __init__(self, rate_limit_service: IRateLimitService):
|
||||
self.rate_limit_service = rate_limit_service
|
||||
self.anomaly_threshold = 2.0 # Standardabweichungen für Anomalie
|
||||
self.adaptation_interval = timedelta(minutes=5)
|
||||
self.last_adaptation = {}
|
||||
|
||||
def execute(self, action_type: ActionType, context: Optional[Dict[str, Any]] = None) -> float:
|
||||
"""
|
||||
Führt adaptive Rate Limiting Logik aus.
|
||||
|
||||
Args:
|
||||
action_type: Typ der auszuführenden Aktion
|
||||
context: Zusätzlicher Kontext (z.B. Session-ID, Platform)
|
||||
|
||||
Returns:
|
||||
Optimale Verzögerung in Sekunden
|
||||
"""
|
||||
# Prüfe ob Adaptation notwendig ist
|
||||
if self._should_adapt(action_type):
|
||||
self._adapt_policy(action_type)
|
||||
|
||||
# Berechne Delay mit aktuellem Policy
|
||||
delay = self.rate_limit_service.calculate_delay(action_type, context)
|
||||
|
||||
# Warte wenn nötig
|
||||
actual_wait = self.rate_limit_service.wait_if_needed(action_type)
|
||||
|
||||
logger.debug(f"Adaptive delay for {action_type.value}: {delay:.2f}s (waited: {actual_wait:.2f}s)")
|
||||
|
||||
return delay
|
||||
|
||||
def record_timing(self, timing: ActionTiming) -> None:
|
||||
"""
|
||||
Zeichnet Timing auf und triggert ggf. Anpassungen.
|
||||
|
||||
Args:
|
||||
timing: Timing-Informationen der ausgeführten Aktion
|
||||
"""
|
||||
# Zeichne Timing auf
|
||||
self.rate_limit_service.record_action(timing)
|
||||
|
||||
# Analysiere auf Anomalien
|
||||
if self._is_anomaly(timing):
|
||||
logger.warning(f"Anomaly detected for {timing.action_type.value}: "
|
||||
f"duration={timing.duration}s, success={timing.success}")
|
||||
self._handle_anomaly(timing)
|
||||
|
||||
def _should_adapt(self, action_type: ActionType) -> bool:
|
||||
"""Prüft ob Policy angepasst werden sollte"""
|
||||
last = self.last_adaptation.get(action_type, datetime.min)
|
||||
return datetime.now() - last > self.adaptation_interval
|
||||
|
||||
def _adapt_policy(self, action_type: ActionType) -> None:
|
||||
"""Passt Policy basierend auf gesammelten Daten an"""
|
||||
# Hole Statistiken
|
||||
stats = self.rate_limit_service.get_statistics(
|
||||
action_type,
|
||||
timeframe=timedelta(hours=1)
|
||||
)
|
||||
|
||||
if not stats or 'success_rate' not in stats:
|
||||
return
|
||||
|
||||
current_policy = self.rate_limit_service.get_policy(action_type)
|
||||
success_rate = stats['success_rate']
|
||||
avg_duration = stats.get('avg_duration_ms', 0) / 1000.0
|
||||
|
||||
# Neue Policy-Parameter berechnen
|
||||
new_policy = self._calculate_new_policy(
|
||||
current_policy,
|
||||
success_rate,
|
||||
avg_duration
|
||||
)
|
||||
|
||||
if new_policy != current_policy:
|
||||
self.rate_limit_service.update_policy(action_type, new_policy)
|
||||
logger.info(f"Adapted policy for {action_type.value}: "
|
||||
f"min_delay={new_policy.min_delay:.2f}, "
|
||||
f"max_delay={new_policy.max_delay:.2f}")
|
||||
|
||||
self.last_adaptation[action_type] = datetime.now()
|
||||
|
||||
def _calculate_new_policy(self, current: RateLimitPolicy,
|
||||
success_rate: float,
|
||||
avg_duration: float) -> RateLimitPolicy:
|
||||
"""Berechnet neue Policy-Parameter"""
|
||||
# Kopiere aktuelle Policy
|
||||
new_min = current.min_delay
|
||||
new_max = current.max_delay
|
||||
new_backoff = current.backoff_multiplier
|
||||
|
||||
# Anpassung basierend auf Erfolgsrate
|
||||
if success_rate < 0.7: # Niedrige Erfolgsrate
|
||||
# Erhöhe Delays signifikant
|
||||
new_min = min(new_min * 1.3, 10.0)
|
||||
new_max = min(new_max * 1.3, 30.0)
|
||||
new_backoff = min(new_backoff * 1.1, 3.0)
|
||||
elif success_rate < 0.85: # Mittlere Erfolgsrate
|
||||
# Moderate Erhöhung
|
||||
new_min = min(new_min * 1.1, 10.0)
|
||||
new_max = min(new_max * 1.1, 30.0)
|
||||
elif success_rate > 0.95: # Hohe Erfolgsrate
|
||||
# Vorsichtige Verringerung
|
||||
if avg_duration < current.min_delay * 0.8:
|
||||
new_min = max(new_min * 0.9, 0.1)
|
||||
new_max = max(new_max * 0.9, new_min * 3)
|
||||
|
||||
# Stelle sicher dass max > min
|
||||
new_max = max(new_max, new_min * 2)
|
||||
|
||||
return RateLimitPolicy(
|
||||
min_delay=round(new_min, 2),
|
||||
max_delay=round(new_max, 2),
|
||||
adaptive=current.adaptive,
|
||||
backoff_multiplier=round(new_backoff, 2),
|
||||
max_retries=current.max_retries
|
||||
)
|
||||
|
||||
def _is_anomaly(self, timing: ActionTiming) -> bool:
|
||||
"""Erkennt ob ein Timing eine Anomalie darstellt"""
|
||||
# Hole Statistiken für Vergleich
|
||||
stats = self.rate_limit_service.get_statistics(
|
||||
timing.action_type,
|
||||
timeframe=timedelta(hours=1)
|
||||
)
|
||||
|
||||
if not stats or 'avg_duration_ms' not in stats:
|
||||
return False
|
||||
|
||||
avg_duration = stats['avg_duration_ms'] / 1000.0
|
||||
|
||||
# Sehr langsame Requests sind Anomalien
|
||||
if timing.duration > avg_duration * self.anomaly_threshold:
|
||||
return True
|
||||
|
||||
# Fehler nach mehreren Erfolgen sind Anomalien
|
||||
if not timing.success and stats.get('success_rate', 0) > 0.9:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _handle_anomaly(self, timing: ActionTiming) -> None:
|
||||
"""Behandelt erkannte Anomalien"""
|
||||
# Sofortige Policy-Anpassung bei kritischen Anomalien
|
||||
if not timing.success and timing.error_message:
|
||||
if any(indicator in timing.error_message.lower()
|
||||
for indicator in ['rate limit', 'too many', 'blocked']):
|
||||
# Rate Limit erkannt - sofort anpassen
|
||||
current_policy = self.rate_limit_service.get_policy(timing.action_type)
|
||||
emergency_policy = RateLimitPolicy(
|
||||
min_delay=min(current_policy.min_delay * 2, 10.0),
|
||||
max_delay=min(current_policy.max_delay * 2, 30.0),
|
||||
adaptive=current_policy.adaptive,
|
||||
backoff_multiplier=min(current_policy.backoff_multiplier * 1.5, 3.0),
|
||||
max_retries=current_policy.max_retries
|
||||
)
|
||||
self.rate_limit_service.update_policy(timing.action_type, emergency_policy)
|
||||
logger.warning(f"Emergency policy update for {timing.action_type.value} due to rate limit")
|
||||
|
||||
def get_recommendations(self) -> Dict[str, Any]:
|
||||
"""Gibt Empfehlungen basierend auf aktuellen Metriken"""
|
||||
recommendations = {
|
||||
'actions': [],
|
||||
'warnings': [],
|
||||
'optimizations': []
|
||||
}
|
||||
|
||||
# Analysiere alle Action Types
|
||||
for action_type in ActionType:
|
||||
stats = self.rate_limit_service.get_statistics(
|
||||
action_type,
|
||||
timeframe=timedelta(hours=24)
|
||||
)
|
||||
|
||||
if not stats or stats.get('total_actions', 0) < 10:
|
||||
continue
|
||||
|
||||
success_rate = stats.get('success_rate', 0)
|
||||
avg_retries = stats.get('avg_retry_count', 0)
|
||||
|
||||
# Empfehlungen basierend auf Metriken
|
||||
if success_rate < 0.5:
|
||||
recommendations['warnings'].append(
|
||||
f"{action_type.value}: Sehr niedrige Erfolgsrate ({success_rate:.1%})"
|
||||
)
|
||||
recommendations['actions'].append(
|
||||
f"Erhöhe Delays für {action_type.value} oder prüfe auf Blocking"
|
||||
)
|
||||
|
||||
if avg_retries > 2:
|
||||
recommendations['warnings'].append(
|
||||
f"{action_type.value}: Hohe Retry-Rate ({avg_retries:.1f})"
|
||||
)
|
||||
|
||||
if success_rate > 0.98 and stats.get('avg_duration_ms', 0) < 500:
|
||||
recommendations['optimizations'].append(
|
||||
f"{action_type.value}: Könnte schneller ausgeführt werden"
|
||||
)
|
||||
|
||||
return recommendations
|
||||
352
application/use_cases/analyze_failure_rate_use_case.py
Normale Datei
352
application/use_cases/analyze_failure_rate_use_case.py
Normale Datei
@ -0,0 +1,352 @@
|
||||
"""
|
||||
Analyze Failure Rate Use Case - Analysiert Fehlerquoten und Muster
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict, Counter
|
||||
|
||||
from domain.services.analytics_service import IAnalyticsService
|
||||
from domain.value_objects.error_summary import ErrorSummary
|
||||
from domain.entities.error_event import ErrorType
|
||||
|
||||
logger = logging.getLogger("analyze_failure_rate_use_case")
|
||||
|
||||
|
||||
class AnalyzeFailureRateUseCase:
|
||||
"""
|
||||
Use Case für Fehleranalyse.
|
||||
Implementiert zeitbasierte Fehleranalyse, Fehler-Clustering,
|
||||
Trend-Erkennung und Empfehlungen für Verbesserungen.
|
||||
"""
|
||||
|
||||
def __init__(self, analytics_service: IAnalyticsService):
|
||||
self.analytics_service = analytics_service
|
||||
self.critical_error_types = [
|
||||
ErrorType.RATE_LIMIT,
|
||||
ErrorType.CAPTCHA,
|
||||
ErrorType.AUTHENTICATION
|
||||
]
|
||||
self.error_thresholds = {
|
||||
'critical': 0.5, # 50% Fehlerrate ist kritisch
|
||||
'warning': 0.3, # 30% Fehlerrate ist Warnung
|
||||
'acceptable': 0.1 # 10% Fehlerrate ist akzeptabel
|
||||
}
|
||||
|
||||
def execute(self,
|
||||
platform: Optional[str] = None,
|
||||
timeframe: timedelta = timedelta(hours=24)) -> Dict[str, Any]:
|
||||
"""
|
||||
Analysiert Fehlerquoten und Muster.
|
||||
|
||||
Args:
|
||||
platform: Spezifische Platform oder None für alle
|
||||
timeframe: Zeitrahmen für Analyse
|
||||
|
||||
Returns:
|
||||
Analyse-Ergebnis mit Metriken und Empfehlungen
|
||||
"""
|
||||
# Hole Basis-Metriken
|
||||
success_rate = self.analytics_service.get_success_rate(timeframe, platform)
|
||||
failure_rate = 1.0 - success_rate
|
||||
|
||||
# Hole häufigste Fehler
|
||||
common_errors = self.analytics_service.get_common_errors(20, timeframe)
|
||||
|
||||
# Analysiere Fehler-Muster
|
||||
patterns = self.analytics_service.analyze_failure_patterns(timeframe)
|
||||
|
||||
# Erstelle Analyse
|
||||
analysis = {
|
||||
'timeframe': str(timeframe),
|
||||
'platform': platform or 'all',
|
||||
'metrics': {
|
||||
'overall_failure_rate': failure_rate,
|
||||
'overall_success_rate': success_rate,
|
||||
'severity': self._calculate_severity(failure_rate)
|
||||
},
|
||||
'error_breakdown': self._analyze_error_types(common_errors),
|
||||
'temporal_patterns': self._analyze_temporal_patterns(patterns),
|
||||
'error_clusters': self._cluster_errors(common_errors),
|
||||
'critical_issues': self._identify_critical_issues(common_errors, failure_rate),
|
||||
'recommendations': self._generate_recommendations(
|
||||
failure_rate, common_errors, patterns
|
||||
)
|
||||
}
|
||||
|
||||
# Logge wichtige Erkenntnisse
|
||||
self._log_insights(analysis)
|
||||
|
||||
return analysis
|
||||
|
||||
def _calculate_severity(self, failure_rate: float) -> str:
|
||||
"""Berechnet Schweregrad basierend auf Fehlerrate"""
|
||||
if failure_rate >= self.error_thresholds['critical']:
|
||||
return 'critical'
|
||||
elif failure_rate >= self.error_thresholds['warning']:
|
||||
return 'warning'
|
||||
elif failure_rate >= self.error_thresholds['acceptable']:
|
||||
return 'moderate'
|
||||
else:
|
||||
return 'low'
|
||||
|
||||
def _analyze_error_types(self, errors: List[ErrorSummary]) -> List[Dict[str, Any]]:
|
||||
"""Analysiert Fehlertypen im Detail"""
|
||||
breakdown = []
|
||||
|
||||
for error in errors[:10]: # Top 10 Fehler
|
||||
analysis = {
|
||||
'error_type': error.error_type,
|
||||
'count': error.error_count,
|
||||
'frequency_per_hour': error.frequency,
|
||||
'recovery_rate': error.recovery_success_rate,
|
||||
'severity_score': error.severity_score,
|
||||
'impact': {
|
||||
'user_impact': error.total_user_impact,
|
||||
'system_impact': error.total_system_impact,
|
||||
'data_loss': error.data_loss_incidents
|
||||
},
|
||||
'common_contexts': {
|
||||
'urls': error.most_common_urls[:3],
|
||||
'actions': error.most_common_actions[:3],
|
||||
'steps': error.most_common_steps[:3]
|
||||
},
|
||||
'trend': self._calculate_error_trend(error)
|
||||
}
|
||||
breakdown.append(analysis)
|
||||
|
||||
return breakdown
|
||||
|
||||
def _calculate_error_trend(self, error: ErrorSummary) -> str:
|
||||
"""Berechnet Trend für einen Fehlertyp"""
|
||||
# Vereinfacht: Basierend auf Frequenz
|
||||
if error.frequency > 10:
|
||||
return 'increasing'
|
||||
elif error.frequency > 5:
|
||||
return 'stable'
|
||||
else:
|
||||
return 'decreasing'
|
||||
|
||||
def _analyze_temporal_patterns(self, patterns: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Analysiert zeitliche Muster in Fehlern"""
|
||||
temporal = {
|
||||
'peak_error_hours': [],
|
||||
'low_error_hours': [],
|
||||
'daily_pattern': 'unknown',
|
||||
'weekly_pattern': 'unknown'
|
||||
}
|
||||
|
||||
# TODO: Implementiere mit echten Timeline-Daten
|
||||
# Beispiel-Implementation
|
||||
if patterns:
|
||||
# Finde Peak-Zeiten
|
||||
if 'hourly_distribution' in patterns:
|
||||
hourly = patterns['hourly_distribution']
|
||||
sorted_hours = sorted(hourly.items(),
|
||||
key=lambda x: x[1],
|
||||
reverse=True)
|
||||
temporal['peak_error_hours'] = [h[0] for h in sorted_hours[:3]]
|
||||
temporal['low_error_hours'] = [h[0] for h in sorted_hours[-3:]]
|
||||
|
||||
return temporal
|
||||
|
||||
def _cluster_errors(self, errors: List[ErrorSummary]) -> List[Dict[str, Any]]:
|
||||
"""Clustert ähnliche Fehler"""
|
||||
clusters = []
|
||||
|
||||
# Cluster nach Error Type
|
||||
type_clusters = defaultdict(list)
|
||||
for error in errors:
|
||||
# Extrahiere Basis-Typ aus error_type
|
||||
base_type = error.error_type.split('_')[0] if '_' in error.error_type else error.error_type
|
||||
type_clusters[base_type].append(error)
|
||||
|
||||
# Erstelle Cluster-Analyse
|
||||
for cluster_name, cluster_errors in type_clusters.items():
|
||||
if len(cluster_errors) > 1:
|
||||
total_count = sum(e.error_count for e in cluster_errors)
|
||||
avg_recovery = sum(e.recovery_success_rate for e in cluster_errors) / len(cluster_errors)
|
||||
|
||||
clusters.append({
|
||||
'cluster_name': cluster_name,
|
||||
'error_count': len(cluster_errors),
|
||||
'total_occurrences': total_count,
|
||||
'avg_recovery_rate': avg_recovery,
|
||||
'members': [e.error_type for e in cluster_errors]
|
||||
})
|
||||
|
||||
return sorted(clusters, key=lambda x: x['total_occurrences'], reverse=True)
|
||||
|
||||
def _identify_critical_issues(self,
|
||||
errors: List[ErrorSummary],
|
||||
overall_failure_rate: float) -> List[Dict[str, Any]]:
|
||||
"""Identifiziert kritische Issues"""
|
||||
critical_issues = []
|
||||
|
||||
# Hohe Gesamt-Fehlerrate
|
||||
if overall_failure_rate >= self.error_thresholds['critical']:
|
||||
critical_issues.append({
|
||||
'issue': 'high_overall_failure_rate',
|
||||
'severity': 'critical',
|
||||
'description': f'Fehlerrate von {overall_failure_rate:.1%} überschreitet kritischen Schwellenwert',
|
||||
'recommendation': 'Sofortige Untersuchung und Maßnahmen erforderlich'
|
||||
})
|
||||
|
||||
# Kritische Fehlertypen
|
||||
for error in errors:
|
||||
error_type = ErrorType.UNKNOWN
|
||||
try:
|
||||
error_type = ErrorType(error.error_type)
|
||||
except:
|
||||
pass
|
||||
|
||||
if error_type in self.critical_error_types:
|
||||
if error.frequency > 5: # Mehr als 5 pro Stunde
|
||||
critical_issues.append({
|
||||
'issue': f'high_frequency_{error.error_type}',
|
||||
'severity': 'critical',
|
||||
'description': f'{error.error_type} tritt {error.frequency:.1f} mal pro Stunde auf',
|
||||
'recommendation': self._get_error_specific_recommendation(error_type)
|
||||
})
|
||||
|
||||
# Niedrige Recovery-Rate
|
||||
low_recovery = [e for e in errors if e.recovery_success_rate < 0.2]
|
||||
if low_recovery:
|
||||
critical_issues.append({
|
||||
'issue': 'low_recovery_rate',
|
||||
'severity': 'warning',
|
||||
'description': f'{len(low_recovery)} Fehlertypen haben Recovery-Rate < 20%',
|
||||
'recommendation': 'Recovery-Strategien überprüfen und verbessern'
|
||||
})
|
||||
|
||||
return critical_issues
|
||||
|
||||
def _get_error_specific_recommendation(self, error_type: ErrorType) -> str:
|
||||
"""Gibt spezifische Empfehlung für Fehlertyp"""
|
||||
recommendations = {
|
||||
ErrorType.RATE_LIMIT: 'Rate Limiting Parameter erhöhen und Delays anpassen',
|
||||
ErrorType.CAPTCHA: 'CAPTCHA-Solving-Service prüfen oder manuelle Intervention',
|
||||
ErrorType.AUTHENTICATION: 'Credentials und Session-Management überprüfen',
|
||||
ErrorType.NETWORK: 'Netzwerk-Stabilität und Proxy-Konfiguration prüfen',
|
||||
ErrorType.TIMEOUT: 'Timeouts erhöhen und Performance optimieren'
|
||||
}
|
||||
|
||||
return recommendations.get(error_type, 'Detaillierte Fehleranalyse durchführen')
|
||||
|
||||
def _generate_recommendations(self,
|
||||
failure_rate: float,
|
||||
errors: List[ErrorSummary],
|
||||
patterns: Dict[str, Any]) -> List[str]:
|
||||
"""Generiert konkrete Handlungsempfehlungen"""
|
||||
recommendations = []
|
||||
|
||||
# Basis-Empfehlungen nach Fehlerrate
|
||||
severity = self._calculate_severity(failure_rate)
|
||||
if severity == 'critical':
|
||||
recommendations.append(
|
||||
"🚨 KRITISCH: Sofortige Intervention erforderlich - "
|
||||
"Pausieren Sie neue Account-Erstellungen bis Issues gelöst sind"
|
||||
)
|
||||
elif severity == 'warning':
|
||||
recommendations.append(
|
||||
"⚠️ WARNUNG: Erhöhte Fehlerrate - "
|
||||
"Reduzieren Sie Geschwindigkeit und überwachen Sie genau"
|
||||
)
|
||||
|
||||
# Spezifische Empfehlungen basierend auf Top-Fehlern
|
||||
if errors:
|
||||
top_error = errors[0]
|
||||
if top_error.error_type == ErrorType.RATE_LIMIT.value:
|
||||
recommendations.append(
|
||||
"📊 Rate Limiting ist Hauptproblem - "
|
||||
"Erhöhen Sie Delays zwischen Aktionen um 50%"
|
||||
)
|
||||
elif top_error.error_type == ErrorType.CAPTCHA.value:
|
||||
recommendations.append(
|
||||
"🔐 CAPTCHA-Challenges häufig - "
|
||||
"Prüfen Sie Fingerprinting und Session-Qualität"
|
||||
)
|
||||
|
||||
# Zeitbasierte Empfehlungen
|
||||
if patterns and 'peak_hours' in patterns:
|
||||
recommendations.append(
|
||||
f"⏰ Vermeiden Sie Aktivität während Peak-Zeiten: "
|
||||
f"{', '.join(patterns['peak_hours'])}"
|
||||
)
|
||||
|
||||
# Recovery-basierte Empfehlungen
|
||||
low_recovery = [e for e in errors if e.recovery_success_rate < 0.3]
|
||||
if len(low_recovery) > 3:
|
||||
recommendations.append(
|
||||
"🔄 Viele Fehler ohne erfolgreiche Recovery - "
|
||||
"Implementieren Sie bessere Retry-Strategien"
|
||||
)
|
||||
|
||||
# Platform-spezifische Empfehlungen
|
||||
platform_errors = defaultdict(int)
|
||||
for error in errors:
|
||||
for url in error.most_common_urls:
|
||||
if 'instagram' in url.lower():
|
||||
platform_errors['instagram'] += error.error_count
|
||||
elif 'tiktok' in url.lower():
|
||||
platform_errors['tiktok'] += error.error_count
|
||||
|
||||
if platform_errors:
|
||||
worst_platform = max(platform_errors.items(), key=lambda x: x[1])
|
||||
recommendations.append(
|
||||
f"📱 {worst_platform[0].title()} hat die meisten Fehler - "
|
||||
f"Fokussieren Sie Optimierungen auf diese Plattform"
|
||||
)
|
||||
|
||||
return recommendations
|
||||
|
||||
def _log_insights(self, analysis: Dict[str, Any]) -> None:
|
||||
"""Loggt wichtige Erkenntnisse"""
|
||||
severity = analysis['metrics']['severity']
|
||||
failure_rate = analysis['metrics']['overall_failure_rate']
|
||||
|
||||
log_message = f"Failure analysis completed: {failure_rate:.1%} failure rate ({severity})"
|
||||
|
||||
if analysis['critical_issues']:
|
||||
log_message += f", {len(analysis['critical_issues'])} critical issues found"
|
||||
|
||||
if severity in ['critical', 'warning']:
|
||||
logger.warning(log_message)
|
||||
else:
|
||||
logger.info(log_message)
|
||||
|
||||
# Logge Top-Empfehlungen
|
||||
if analysis['recommendations']:
|
||||
logger.info(f"Top recommendation: {analysis['recommendations'][0]}")
|
||||
|
||||
def compare_platforms(self,
|
||||
timeframe: timedelta = timedelta(days=7)) -> Dict[str, Any]:
|
||||
"""Vergleicht Fehlerraten zwischen Plattformen"""
|
||||
comparison = self.analytics_service.get_platform_comparison(timeframe)
|
||||
|
||||
# Erweitere mit Fehler-spezifischen Metriken
|
||||
for platform, stats in comparison.items():
|
||||
if isinstance(stats, dict):
|
||||
# Berechne Fehler-Schwerpunkte
|
||||
platform_errors = self.analytics_service.get_common_errors(10, timeframe)
|
||||
# Filter für Platform
|
||||
# TODO: Implementiere Platform-Filter in Error Summary
|
||||
|
||||
stats['primary_error_types'] = []
|
||||
stats['improvement_potential'] = self._calculate_improvement_potential(stats)
|
||||
|
||||
return comparison
|
||||
|
||||
def _calculate_improvement_potential(self, stats: Dict[str, Any]) -> str:
|
||||
"""Berechnet Verbesserungspotential"""
|
||||
success_rate = stats.get('success_rate', 0)
|
||||
|
||||
if success_rate < 0.5:
|
||||
return 'high'
|
||||
elif success_rate < 0.7:
|
||||
return 'medium'
|
||||
elif success_rate < 0.9:
|
||||
return 'low'
|
||||
else:
|
||||
return 'minimal'
|
||||
259
application/use_cases/detect_rate_limit_use_case.py
Normale Datei
259
application/use_cases/detect_rate_limit_use_case.py
Normale Datei
@ -0,0 +1,259 @@
|
||||
"""
|
||||
Detect Rate Limit Use Case - Erkennt Rate Limits und reagiert entsprechend
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
from domain.services.rate_limit_service import IRateLimitService
|
||||
from domain.value_objects.action_timing import ActionTiming, ActionType
|
||||
from domain.entities.error_event import ErrorEvent, ErrorType, ErrorContext
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
|
||||
logger = logging.getLogger("detect_rate_limit_use_case")
|
||||
|
||||
|
||||
class DetectRateLimitUseCase:
|
||||
"""
|
||||
Use Case für Rate Limit Erkennung und Reaktion.
|
||||
Analysiert Responses, erkennt Rate Limits und implementiert Backoff-Strategien.
|
||||
"""
|
||||
|
||||
def __init__(self, rate_limit_service: IRateLimitService):
|
||||
self.rate_limit_service = rate_limit_service
|
||||
self.detection_patterns = {
|
||||
'instagram': [
|
||||
"Bitte warte einige Minuten",
|
||||
"Please wait a few minutes",
|
||||
"Try again later",
|
||||
"Versuche es später erneut",
|
||||
"too many requests",
|
||||
"zu viele Anfragen",
|
||||
"We're sorry, but something went wrong",
|
||||
"temporarily blocked",
|
||||
"vorübergehend gesperrt",
|
||||
"Wir haben deine Anfrage eingeschränkt"
|
||||
],
|
||||
'tiktok': [
|
||||
"Too many attempts",
|
||||
"Zu viele Versuche",
|
||||
"Please slow down",
|
||||
"rate limited",
|
||||
"Try again in"
|
||||
],
|
||||
'general': [
|
||||
"429",
|
||||
"rate limit",
|
||||
"throttled",
|
||||
"quota exceeded"
|
||||
]
|
||||
}
|
||||
|
||||
def execute(self, response: Any, context: Optional[Dict[str, Any]] = None) -> Tuple[bool, Optional[ErrorEvent]]:
|
||||
"""
|
||||
Analysiert eine Response auf Rate Limiting.
|
||||
|
||||
Args:
|
||||
response: HTTP Response, Page Content oder Error Message
|
||||
context: Zusätzlicher Kontext (platform, action_type, etc.)
|
||||
|
||||
Returns:
|
||||
Tuple aus (is_rate_limited, error_event)
|
||||
"""
|
||||
# Erkenne Rate Limit
|
||||
is_rate_limited = self._detect_rate_limit(response, context)
|
||||
|
||||
if not is_rate_limited:
|
||||
return False, None
|
||||
|
||||
# Erstelle Error Event
|
||||
error_event = self._create_error_event(response, context)
|
||||
|
||||
# Handle Rate Limit
|
||||
self._handle_rate_limit(error_event, context)
|
||||
|
||||
return True, error_event
|
||||
|
||||
def _detect_rate_limit(self, response: Any, context: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""Erkennt ob Response auf Rate Limiting hindeutet"""
|
||||
# Nutze Service für Basis-Detection
|
||||
if self.rate_limit_service.detect_rate_limit(response):
|
||||
return True
|
||||
|
||||
# Erweiterte Detection basierend auf Platform
|
||||
platform = context.get('platform', 'general') if context else 'general'
|
||||
patterns = self.detection_patterns.get(platform, []) + self.detection_patterns['general']
|
||||
|
||||
# String-basierte Erkennung
|
||||
response_text = self._extract_text(response)
|
||||
if response_text:
|
||||
response_lower = response_text.lower()
|
||||
for pattern in patterns:
|
||||
if pattern.lower() in response_lower:
|
||||
logger.info(f"Rate limit detected: '{pattern}' found in response")
|
||||
return True
|
||||
|
||||
# Status Code Erkennung
|
||||
status = self._extract_status(response)
|
||||
if status in [429, 420, 503]: # Common rate limit codes
|
||||
logger.info(f"Rate limit detected: HTTP {status}")
|
||||
return True
|
||||
|
||||
# Timing-basierte Erkennung
|
||||
if context and 'timing' in context:
|
||||
timing = context['timing']
|
||||
if isinstance(timing, ActionTiming):
|
||||
# Sehr schnelle Fehler können auf Rate Limits hindeuten
|
||||
if not timing.success and timing.duration < 0.5:
|
||||
logger.warning("Possible rate limit: Fast failure detected")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _extract_text(self, response: Any) -> Optional[str]:
|
||||
"""Extrahiert Text aus verschiedenen Response-Typen"""
|
||||
if isinstance(response, str):
|
||||
return response
|
||||
elif hasattr(response, 'text'):
|
||||
try:
|
||||
return response.text
|
||||
except:
|
||||
pass
|
||||
elif hasattr(response, 'content'):
|
||||
try:
|
||||
if callable(response.content):
|
||||
return response.content()
|
||||
return str(response.content)
|
||||
except:
|
||||
pass
|
||||
elif hasattr(response, 'message'):
|
||||
return str(response.message)
|
||||
|
||||
return str(response) if response else None
|
||||
|
||||
def _extract_status(self, response: Any) -> Optional[int]:
|
||||
"""Extrahiert Status Code aus Response"""
|
||||
if hasattr(response, 'status'):
|
||||
return response.status
|
||||
elif hasattr(response, 'status_code'):
|
||||
return response.status_code
|
||||
elif hasattr(response, 'code'):
|
||||
try:
|
||||
return int(response.code)
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _create_error_event(self, response: Any, context: Optional[Dict[str, Any]] = None) -> ErrorEvent:
|
||||
"""Erstellt Error Event für Rate Limit"""
|
||||
error_context = ErrorContext(
|
||||
url=context.get('url') if context else None,
|
||||
action=context.get('action_type').value if context and 'action_type' in context else None,
|
||||
step_name=context.get('step_name') if context else None,
|
||||
screenshot_path=context.get('screenshot_path') if context else None,
|
||||
additional_data={
|
||||
'platform': context.get('platform') if context else None,
|
||||
'response_text': self._extract_text(response)[:500] if self._extract_text(response) else None,
|
||||
'status_code': self._extract_status(response),
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
return ErrorEvent(
|
||||
error_type=ErrorType.RATE_LIMIT,
|
||||
error_message="Rate limit detected",
|
||||
context=error_context,
|
||||
platform=context.get('platform') if context else None,
|
||||
session_id=context.get('session_id') if context else None,
|
||||
correlation_id=context.get('correlation_id') if context else None
|
||||
)
|
||||
|
||||
def _handle_rate_limit(self, error_event: ErrorEvent, context: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Behandelt erkanntes Rate Limit"""
|
||||
# Extrahiere Wait-Zeit aus Response wenn möglich
|
||||
wait_time = self._extract_wait_time(error_event.context.additional_data.get('response_text', ''))
|
||||
|
||||
if not wait_time:
|
||||
# Verwende exponentielles Backoff
|
||||
retry_count = context.get('retry_count', 0) if context else 0
|
||||
wait_time = self._calculate_backoff(retry_count)
|
||||
|
||||
logger.warning(f"Rate limit detected - waiting {wait_time}s before retry")
|
||||
|
||||
# Update Rate Limit Policy für zukünftige Requests
|
||||
if context and 'action_type' in context:
|
||||
action_type = context['action_type']
|
||||
current_policy = self.rate_limit_service.get_policy(action_type)
|
||||
|
||||
# Erhöhe Delays temporär
|
||||
updated_policy = RateLimitPolicy(
|
||||
min_delay=min(current_policy.min_delay * 1.5, 10.0),
|
||||
max_delay=min(current_policy.max_delay * 2, 60.0),
|
||||
adaptive=current_policy.adaptive,
|
||||
backoff_multiplier=min(current_policy.backoff_multiplier * 1.2, 3.0),
|
||||
max_retries=current_policy.max_retries
|
||||
)
|
||||
|
||||
self.rate_limit_service.update_policy(action_type, updated_policy)
|
||||
|
||||
def _extract_wait_time(self, response_text: str) -> Optional[float]:
|
||||
"""Versucht Wait-Zeit aus Response zu extrahieren"""
|
||||
if not response_text:
|
||||
return None
|
||||
|
||||
import re
|
||||
|
||||
# Patterns für Zeitangaben
|
||||
patterns = [
|
||||
r'wait (\d+) seconds',
|
||||
r'warte (\d+) Sekunden',
|
||||
r'try again in (\d+)s',
|
||||
r'retry after (\d+)',
|
||||
r'(\d+) Minuten warten',
|
||||
r'wait (\d+) minutes'
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
match = re.search(pattern, response_text.lower())
|
||||
if match:
|
||||
value = int(match.group(1))
|
||||
# Konvertiere Minuten zu Sekunden wenn nötig
|
||||
if 'minute' in pattern or 'minuten' in pattern:
|
||||
value *= 60
|
||||
return float(min(value, 300)) # Max 5 Minuten
|
||||
|
||||
return None
|
||||
|
||||
def _calculate_backoff(self, retry_count: int) -> float:
|
||||
"""Berechnet exponentielles Backoff"""
|
||||
base_wait = 5.0 # 5 Sekunden Basis
|
||||
max_wait = 300.0 # Max 5 Minuten
|
||||
|
||||
# Exponentielles Backoff mit Jitter
|
||||
wait_time = min(base_wait * (2 ** retry_count), max_wait)
|
||||
|
||||
# Füge Jitter hinzu (±20%)
|
||||
import random
|
||||
jitter = wait_time * 0.2 * (random.random() - 0.5)
|
||||
|
||||
return wait_time + jitter
|
||||
|
||||
def analyze_patterns(self, platform: str, timeframe_hours: int = 24) -> Dict[str, Any]:
|
||||
"""Analysiert Rate Limit Muster für eine Plattform"""
|
||||
# Diese Methode würde mit einem Analytics Repository arbeiten
|
||||
# um Muster in Rate Limits zu erkennen
|
||||
|
||||
analysis = {
|
||||
'platform': platform,
|
||||
'timeframe_hours': timeframe_hours,
|
||||
'peak_times': [],
|
||||
'safe_times': [],
|
||||
'recommended_delays': {},
|
||||
'incidents': 0
|
||||
}
|
||||
|
||||
# TODO: Implementiere mit Analytics Repository
|
||||
|
||||
return analysis
|
||||
187
application/use_cases/export_accounts_use_case.py
Normale Datei
187
application/use_cases/export_accounts_use_case.py
Normale Datei
@ -0,0 +1,187 @@
|
||||
"""
|
||||
Export Accounts Use Case - Exportiert Account-Daten in verschiedene Formate
|
||||
"""
|
||||
|
||||
import logging
|
||||
import csv
|
||||
import json
|
||||
from io import StringIO
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger("export_accounts_use_case")
|
||||
|
||||
|
||||
class ExportAccountsUseCase:
|
||||
"""
|
||||
Use Case für Account-Export.
|
||||
Exportiert Account-Daten in verschiedene Formate (CSV, JSON).
|
||||
"""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
self.db_manager = db_manager
|
||||
|
||||
def execute(self,
|
||||
platform: Optional[str] = None,
|
||||
format: str = 'csv',
|
||||
include_passwords: bool = True) -> bytes:
|
||||
"""
|
||||
Exportiert Account-Daten.
|
||||
|
||||
Args:
|
||||
platform: Filter für spezifische Plattform (None = alle)
|
||||
format: Export-Format ('csv' oder 'json')
|
||||
include_passwords: Ob Passwörter inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
Exportierte Daten als Bytes
|
||||
"""
|
||||
# Hole Account-Daten
|
||||
if platform and platform.lower() not in ["all", ""]:
|
||||
accounts = self.db_manager.get_accounts_by_platform(platform.lower())
|
||||
else:
|
||||
accounts = self.db_manager.get_all_accounts()
|
||||
|
||||
if not accounts:
|
||||
logger.warning(f"Keine Accounts gefunden für Export (platform: {platform})")
|
||||
return b""
|
||||
|
||||
# Exportiere basierend auf Format
|
||||
if format.lower() == 'csv':
|
||||
result = self._export_csv(accounts, include_passwords)
|
||||
elif format.lower() == 'json':
|
||||
result = self._export_json(accounts, include_passwords)
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {format}")
|
||||
|
||||
logger.info(f"Exported {len(accounts)} accounts as {format}")
|
||||
return result
|
||||
|
||||
def _export_csv(self, accounts: List[Dict[str, Any]], include_passwords: bool) -> bytes:
|
||||
"""
|
||||
Exportiert Accounts als CSV.
|
||||
|
||||
Args:
|
||||
accounts: Liste der Account-Daten
|
||||
include_passwords: Ob Passwörter inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
CSV-Daten als Bytes
|
||||
"""
|
||||
output = StringIO()
|
||||
|
||||
# Definiere Header basierend auf Passwort-Einstellung
|
||||
headers = [
|
||||
'Plattform',
|
||||
'Benutzername',
|
||||
'E-Mail',
|
||||
'Handynummer',
|
||||
'Name',
|
||||
'Geburtstag',
|
||||
'Erstellt am'
|
||||
]
|
||||
|
||||
if include_passwords:
|
||||
headers.insert(2, 'Passwort')
|
||||
|
||||
writer = csv.DictWriter(output, fieldnames=headers)
|
||||
writer.writeheader()
|
||||
|
||||
# Schreibe Account-Daten
|
||||
for account in accounts:
|
||||
row = {
|
||||
'Plattform': account.get('platform', ''),
|
||||
'Benutzername': account.get('username', ''),
|
||||
'E-Mail': account.get('email', ''),
|
||||
'Handynummer': account.get('phone', ''),
|
||||
'Name': account.get('full_name', ''),
|
||||
'Geburtstag': account.get('birthday', ''),
|
||||
'Erstellt am': account.get('created_at', '')
|
||||
}
|
||||
|
||||
if include_passwords:
|
||||
row['Passwort'] = account.get('password', '')
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
return output.getvalue().encode('utf-8-sig') # UTF-8 mit BOM für Excel
|
||||
|
||||
def _export_json(self, accounts: List[Dict[str, Any]], include_passwords: bool) -> bytes:
|
||||
"""
|
||||
Exportiert Accounts als JSON.
|
||||
|
||||
Args:
|
||||
accounts: Liste der Account-Daten
|
||||
include_passwords: Ob Passwörter inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
JSON-Daten als Bytes
|
||||
"""
|
||||
export_data = {
|
||||
'export_date': datetime.now().isoformat(),
|
||||
'account_count': len(accounts),
|
||||
'accounts': []
|
||||
}
|
||||
|
||||
for account in accounts:
|
||||
account_data = {
|
||||
'platform': account.get('platform', ''),
|
||||
'username': account.get('username', ''),
|
||||
'email': account.get('email', ''),
|
||||
'phone': account.get('phone', ''),
|
||||
'full_name': account.get('full_name', ''),
|
||||
'birthday': account.get('birthday', ''),
|
||||
'created_at': account.get('created_at', '')
|
||||
}
|
||||
|
||||
if include_passwords:
|
||||
account_data['password'] = account.get('password', '')
|
||||
|
||||
export_data['accounts'].append(account_data)
|
||||
|
||||
return json.dumps(export_data, ensure_ascii=False, indent=2).encode('utf-8')
|
||||
|
||||
def execute_with_accounts(self,
|
||||
accounts: List[Dict[str, Any]],
|
||||
format: str = 'csv',
|
||||
include_passwords: bool = True) -> bytes:
|
||||
"""
|
||||
Exportiert spezifische Account-Daten.
|
||||
|
||||
Args:
|
||||
accounts: Liste der zu exportierenden Accounts
|
||||
format: Export-Format ('csv' oder 'json')
|
||||
include_passwords: Ob Passwörter inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
Exportierte Daten als Bytes
|
||||
"""
|
||||
if not accounts:
|
||||
logger.warning("Keine Accounts zum Export übergeben")
|
||||
return b""
|
||||
|
||||
# Exportiere basierend auf Format
|
||||
if format.lower() == 'csv':
|
||||
result = self._export_csv(accounts, include_passwords)
|
||||
elif format.lower() == 'json':
|
||||
result = self._export_json(accounts, include_passwords)
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {format}")
|
||||
|
||||
logger.info(f"Exported {len(accounts)} specific accounts as {format}")
|
||||
return result
|
||||
|
||||
def get_export_filename(self, platform: Optional[str], format: str) -> str:
|
||||
"""
|
||||
Generiert einen passenden Dateinamen für den Export.
|
||||
|
||||
Args:
|
||||
platform: Plattform-Filter
|
||||
format: Export-Format
|
||||
|
||||
Returns:
|
||||
Vorgeschlagener Dateiname
|
||||
"""
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
platform_str = platform.lower() if platform else 'alle'
|
||||
return f"accounts_{platform_str}_{timestamp}.{format}"
|
||||
122
application/use_cases/generate_account_fingerprint_use_case.py
Normale Datei
122
application/use_cases/generate_account_fingerprint_use_case.py
Normale Datei
@ -0,0 +1,122 @@
|
||||
"""
|
||||
Generate Account Fingerprint Use Case - Generiert und verwaltet Fingerprints für Accounts
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import json
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
from infrastructure.services.advanced_fingerprint_service import AdvancedFingerprintService
|
||||
|
||||
logger = logging.getLogger("generate_account_fingerprint_use_case")
|
||||
|
||||
|
||||
class GenerateAccountFingerprintUseCase:
|
||||
"""
|
||||
Use Case für die Generierung und Zuweisung von Browser-Fingerprints zu Accounts.
|
||||
Stellt sicher, dass jeder Account einen eindeutigen Fingerprint hat.
|
||||
"""
|
||||
|
||||
def __init__(self, db_manager, fingerprint_service=None):
|
||||
self.db_manager = db_manager
|
||||
self.fingerprint_service = fingerprint_service or AdvancedFingerprintService()
|
||||
|
||||
def execute(self, account_id: int) -> Optional[str]:
|
||||
"""
|
||||
Generiert einen Fingerprint für einen Account oder gibt den existierenden zurück.
|
||||
|
||||
Args:
|
||||
account_id: ID des Accounts
|
||||
|
||||
Returns:
|
||||
Fingerprint ID oder None bei Fehler
|
||||
"""
|
||||
try:
|
||||
# Prüfe ob Account bereits einen Fingerprint hat
|
||||
conn = self.db_manager.get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"SELECT fingerprint_id FROM accounts WHERE id = ?",
|
||||
(account_id,)
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
|
||||
if result and result[0]:
|
||||
logger.info(f"Account {account_id} hat bereits Fingerprint: {result[0]}")
|
||||
return result[0]
|
||||
|
||||
# Generiere neuen Fingerprint über AdvancedFingerprintService
|
||||
fingerprint = self.fingerprint_service.create_account_fingerprint(
|
||||
account_id=str(account_id),
|
||||
profile_type="desktop"
|
||||
)
|
||||
|
||||
# Aktualisiere Account mit Fingerprint ID
|
||||
cursor.execute(
|
||||
"UPDATE accounts SET fingerprint_id = ? WHERE id = ?",
|
||||
(fingerprint.fingerprint_id, account_id)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Neuer Fingerprint {fingerprint.fingerprint_id} für Account {account_id} generiert und verknüpft")
|
||||
return fingerprint.fingerprint_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints für Account {account_id}: {e}")
|
||||
return None
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def assign_fingerprints_to_all_accounts(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Weist allen Accounts ohne Fingerprint einen neuen zu.
|
||||
|
||||
Returns:
|
||||
Statistik über die Zuweisung
|
||||
"""
|
||||
stats = {
|
||||
"total_accounts": 0,
|
||||
"accounts_without_fingerprint": 0,
|
||||
"fingerprints_assigned": 0,
|
||||
"errors": 0
|
||||
}
|
||||
|
||||
try:
|
||||
# Hole alle Accounts ohne Fingerprint
|
||||
conn = self.db_manager.get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM accounts")
|
||||
stats["total_accounts"] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute(
|
||||
"SELECT id, username, platform FROM accounts WHERE fingerprint_id IS NULL"
|
||||
)
|
||||
accounts = cursor.fetchall()
|
||||
stats["accounts_without_fingerprint"] = len(accounts)
|
||||
|
||||
for account_id, username, platform in accounts:
|
||||
logger.info(f"Generiere Fingerprint für Account {username} ({platform})")
|
||||
|
||||
fingerprint_id = self.execute(account_id)
|
||||
if fingerprint_id:
|
||||
stats["fingerprints_assigned"] += 1
|
||||
else:
|
||||
stats["errors"] += 1
|
||||
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Fingerprint-Zuweisung abgeschlossen: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Fingerprint-Zuweisung: {e}")
|
||||
stats["errors"] += 1
|
||||
return stats
|
||||
|
||||
548
application/use_cases/generate_reports_use_case.py
Normale Datei
548
application/use_cases/generate_reports_use_case.py
Normale Datei
@ -0,0 +1,548 @@
|
||||
"""
|
||||
Generate Reports Use Case - Erstellt detaillierte Berichte
|
||||
"""
|
||||
|
||||
import logging
|
||||
import json
|
||||
import csv
|
||||
from io import StringIO
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
from domain.services.analytics_service import IAnalyticsService
|
||||
from domain.value_objects.report import (
|
||||
Report, ReportType, Metric, PlatformStats,
|
||||
TimeSeriesData, MetricType
|
||||
)
|
||||
|
||||
logger = logging.getLogger("generate_reports_use_case")
|
||||
|
||||
|
||||
class GenerateReportsUseCase:
|
||||
"""
|
||||
Use Case für Report-Generierung.
|
||||
Erstellt tägliche/wöchentliche Reports mit Erfolgsstatistiken,
|
||||
Performance-Metriken und Fehler-Zusammenfassungen.
|
||||
"""
|
||||
|
||||
def __init__(self, analytics_service: IAnalyticsService):
|
||||
self.analytics_service = analytics_service
|
||||
self.report_templates = {
|
||||
ReportType.DAILY: self._generate_daily_report,
|
||||
ReportType.WEEKLY: self._generate_weekly_report,
|
||||
ReportType.MONTHLY: self._generate_monthly_report,
|
||||
ReportType.CUSTOM: self._generate_custom_report,
|
||||
ReportType.REAL_TIME: self._generate_realtime_report
|
||||
}
|
||||
|
||||
def execute(self,
|
||||
report_type: ReportType,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None,
|
||||
platforms: Optional[List[str]] = None,
|
||||
include_charts: bool = True) -> Report:
|
||||
"""
|
||||
Generiert einen Report.
|
||||
|
||||
Args:
|
||||
report_type: Typ des Reports
|
||||
start_date: Startdatum (optional für vordefinierte Typen)
|
||||
end_date: Enddatum (optional für vordefinierte Typen)
|
||||
platforms: Filter für spezifische Plattformen
|
||||
include_charts: Ob Zeitreihen-Daten inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
Generierter Report
|
||||
"""
|
||||
# Bestimme Zeitrahmen basierend auf Report-Typ
|
||||
if not start_date or not end_date:
|
||||
start_date, end_date = self._determine_timeframe(report_type)
|
||||
|
||||
# Generiere Report mit entsprechendem Template
|
||||
generator = self.report_templates.get(report_type, self._generate_custom_report)
|
||||
report = generator(start_date, end_date, platforms, include_charts)
|
||||
|
||||
# Logge Report-Generierung
|
||||
logger.info(f"Generated {report_type.value} report: {report.report_id} "
|
||||
f"({report.total_accounts_created} accounts, "
|
||||
f"{report.overall_success_rate:.1%} success rate)")
|
||||
|
||||
return report
|
||||
|
||||
def _determine_timeframe(self, report_type: ReportType) -> tuple[datetime, datetime]:
|
||||
"""Bestimmt Zeitrahmen basierend auf Report-Typ"""
|
||||
end_date = datetime.now()
|
||||
|
||||
if report_type == ReportType.DAILY:
|
||||
start_date = end_date - timedelta(days=1)
|
||||
elif report_type == ReportType.WEEKLY:
|
||||
start_date = end_date - timedelta(weeks=1)
|
||||
elif report_type == ReportType.MONTHLY:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
elif report_type == ReportType.REAL_TIME:
|
||||
start_date = end_date - timedelta(hours=1)
|
||||
else:
|
||||
start_date = end_date - timedelta(days=7) # Default
|
||||
|
||||
return start_date, end_date
|
||||
|
||||
def _generate_daily_report(self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]],
|
||||
include_charts: bool) -> Report:
|
||||
"""Generiert täglichen Report"""
|
||||
# Hole Basis-Report
|
||||
base_report = self.analytics_service.generate_report(
|
||||
ReportType.DAILY, start, end, platforms
|
||||
)
|
||||
|
||||
# Erweitere mit täglichen Insights
|
||||
insights = [
|
||||
self._generate_daily_summary(base_report),
|
||||
self._generate_peak_time_insight(base_report),
|
||||
self._generate_error_trend_insight(base_report)
|
||||
]
|
||||
|
||||
# Füge Empfehlungen hinzu
|
||||
recommendations = self._generate_daily_recommendations(base_report)
|
||||
|
||||
# Erstelle finalen Report
|
||||
return Report(
|
||||
report_id=base_report.report_id,
|
||||
report_type=ReportType.DAILY,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
generated_at=datetime.now(),
|
||||
total_accounts_created=base_report.total_accounts_created,
|
||||
total_attempts=base_report.total_attempts,
|
||||
overall_success_rate=base_report.overall_success_rate,
|
||||
avg_creation_time=base_report.avg_creation_time,
|
||||
metrics=base_report.metrics,
|
||||
platform_stats=base_report.platform_stats,
|
||||
error_summaries=base_report.error_summaries,
|
||||
success_rate_timeline=base_report.success_rate_timeline,
|
||||
creation_rate_timeline=base_report.creation_rate_timeline,
|
||||
error_rate_timeline=base_report.error_rate_timeline,
|
||||
insights=insights,
|
||||
recommendations=recommendations
|
||||
)
|
||||
|
||||
def _generate_weekly_report(self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]],
|
||||
include_charts: bool) -> Report:
|
||||
"""Generiert wöchentlichen Report"""
|
||||
base_report = self.analytics_service.generate_report(
|
||||
ReportType.WEEKLY, start, end, platforms
|
||||
)
|
||||
|
||||
# Wöchentliche Trends
|
||||
insights = [
|
||||
self._generate_weekly_trend(base_report),
|
||||
self._generate_platform_comparison(base_report),
|
||||
self._generate_success_pattern_insight(base_report)
|
||||
]
|
||||
|
||||
recommendations = self._generate_weekly_recommendations(base_report)
|
||||
|
||||
return Report(
|
||||
report_id=base_report.report_id,
|
||||
report_type=ReportType.WEEKLY,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
generated_at=datetime.now(),
|
||||
total_accounts_created=base_report.total_accounts_created,
|
||||
total_attempts=base_report.total_attempts,
|
||||
overall_success_rate=base_report.overall_success_rate,
|
||||
avg_creation_time=base_report.avg_creation_time,
|
||||
metrics=base_report.metrics,
|
||||
platform_stats=base_report.platform_stats,
|
||||
error_summaries=base_report.error_summaries,
|
||||
success_rate_timeline=base_report.success_rate_timeline,
|
||||
creation_rate_timeline=base_report.creation_rate_timeline,
|
||||
error_rate_timeline=base_report.error_rate_timeline,
|
||||
insights=insights,
|
||||
recommendations=recommendations
|
||||
)
|
||||
|
||||
def _generate_monthly_report(self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]],
|
||||
include_charts: bool) -> Report:
|
||||
"""Generiert monatlichen Report"""
|
||||
base_report = self.analytics_service.generate_report(
|
||||
ReportType.MONTHLY, start, end, platforms
|
||||
)
|
||||
|
||||
# Monatliche Zusammenfassung
|
||||
insights = [
|
||||
self._generate_monthly_summary(base_report),
|
||||
self._generate_growth_analysis(base_report),
|
||||
self._generate_efficiency_insight(base_report)
|
||||
]
|
||||
|
||||
recommendations = self._generate_strategic_recommendations(base_report)
|
||||
|
||||
return Report(
|
||||
report_id=base_report.report_id,
|
||||
report_type=ReportType.MONTHLY,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
generated_at=datetime.now(),
|
||||
total_accounts_created=base_report.total_accounts_created,
|
||||
total_attempts=base_report.total_attempts,
|
||||
overall_success_rate=base_report.overall_success_rate,
|
||||
avg_creation_time=base_report.avg_creation_time,
|
||||
metrics=base_report.metrics,
|
||||
platform_stats=base_report.platform_stats,
|
||||
error_summaries=base_report.error_summaries,
|
||||
success_rate_timeline=base_report.success_rate_timeline,
|
||||
creation_rate_timeline=base_report.creation_rate_timeline,
|
||||
error_rate_timeline=base_report.error_rate_timeline,
|
||||
insights=insights,
|
||||
recommendations=recommendations
|
||||
)
|
||||
|
||||
def _generate_custom_report(self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]],
|
||||
include_charts: bool) -> Report:
|
||||
"""Generiert benutzerdefinierten Report"""
|
||||
return self.analytics_service.generate_report(
|
||||
ReportType.CUSTOM, start, end, platforms
|
||||
)
|
||||
|
||||
def _generate_realtime_report(self,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]],
|
||||
include_charts: bool) -> Report:
|
||||
"""Generiert Echtzeit-Report"""
|
||||
# Hole aktuelle Metriken
|
||||
realtime_metrics = self.analytics_service.get_real_time_metrics()
|
||||
|
||||
# Konvertiere zu Report-Format
|
||||
metrics = [
|
||||
Metric(
|
||||
name="active_sessions",
|
||||
value=realtime_metrics.get('active_sessions', 0),
|
||||
unit="count",
|
||||
trend=0.0
|
||||
),
|
||||
Metric(
|
||||
name="accounts_last_hour",
|
||||
value=realtime_metrics.get('accounts_last_hour', 0),
|
||||
unit="count",
|
||||
trend=realtime_metrics.get('hourly_trend', 0.0)
|
||||
),
|
||||
Metric(
|
||||
name="current_success_rate",
|
||||
value=realtime_metrics.get('success_rate_last_hour', 0.0),
|
||||
unit="percentage",
|
||||
trend=realtime_metrics.get('success_trend', 0.0)
|
||||
)
|
||||
]
|
||||
|
||||
return Report(
|
||||
report_id=str(uuid.uuid4()),
|
||||
report_type=ReportType.REAL_TIME,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
generated_at=datetime.now(),
|
||||
total_accounts_created=realtime_metrics.get('accounts_last_hour', 0),
|
||||
total_attempts=realtime_metrics.get('attempts_last_hour', 0),
|
||||
overall_success_rate=realtime_metrics.get('success_rate_last_hour', 0.0),
|
||||
avg_creation_time=realtime_metrics.get('avg_creation_time', 0.0),
|
||||
metrics=metrics,
|
||||
platform_stats=[],
|
||||
error_summaries=[],
|
||||
insights=[
|
||||
f"Aktuell {realtime_metrics.get('active_sessions', 0)} aktive Sessions",
|
||||
f"Erfolgsrate letzte Stunde: {realtime_metrics.get('success_rate_last_hour', 0):.1%}"
|
||||
],
|
||||
recommendations=[]
|
||||
)
|
||||
|
||||
def _generate_daily_summary(self, report: Report) -> str:
|
||||
"""Generiert tägliche Zusammenfassung"""
|
||||
if report.overall_success_rate >= 0.9:
|
||||
performance = "ausgezeichnet"
|
||||
elif report.overall_success_rate >= 0.7:
|
||||
performance = "gut"
|
||||
elif report.overall_success_rate >= 0.5:
|
||||
performance = "durchschnittlich"
|
||||
else:
|
||||
performance = "verbesserungswürdig"
|
||||
|
||||
return (f"Tagesleistung war {performance} mit "
|
||||
f"{report.total_accounts_created} erstellten Accounts "
|
||||
f"({report.overall_success_rate:.1%} Erfolgsrate)")
|
||||
|
||||
def _generate_peak_time_insight(self, report: Report) -> str:
|
||||
"""Generiert Insight über Peak-Zeiten"""
|
||||
if report.creation_rate_timeline:
|
||||
peak_hour = max(zip(report.creation_rate_timeline.timestamps,
|
||||
report.creation_rate_timeline.values),
|
||||
key=lambda x: x[1])
|
||||
return f"Höchste Aktivität um {peak_hour[0].strftime('%H:%M')} Uhr"
|
||||
return "Keine ausgeprägten Peak-Zeiten erkennbar"
|
||||
|
||||
def _generate_error_trend_insight(self, report: Report) -> str:
|
||||
"""Generiert Insight über Fehler-Trends"""
|
||||
if report.error_rate_timeline:
|
||||
trend = report.error_rate_timeline.get_trend()
|
||||
if trend > 10:
|
||||
return "⚠️ Fehlerrate steigt - Intervention empfohlen"
|
||||
elif trend < -10:
|
||||
return "✅ Fehlerrate sinkt - positive Entwicklung"
|
||||
else:
|
||||
return "Fehlerrate stabil"
|
||||
return "Keine Fehler-Trend-Daten verfügbar"
|
||||
|
||||
def _generate_daily_recommendations(self, report: Report) -> List[str]:
|
||||
"""Generiert tägliche Empfehlungen"""
|
||||
recommendations = []
|
||||
|
||||
if report.overall_success_rate < 0.7:
|
||||
recommendations.append(
|
||||
"Erfolgsrate unter 70% - prüfen Sie Rate Limits und Proxy-Qualität"
|
||||
)
|
||||
|
||||
if report.avg_creation_time > 120:
|
||||
recommendations.append(
|
||||
"Durchschnittliche Erstellungszeit über 2 Minuten - "
|
||||
"Performance-Optimierung empfohlen"
|
||||
)
|
||||
|
||||
# Platform-spezifische Empfehlungen
|
||||
for platform_stat in report.platform_stats:
|
||||
if platform_stat.success_rate < 0.5:
|
||||
recommendations.append(
|
||||
f"{platform_stat.platform}: Niedrige Erfolgsrate - "
|
||||
f"spezifische Anpassungen erforderlich"
|
||||
)
|
||||
|
||||
if not recommendations:
|
||||
recommendations.append("Keine dringenden Maßnahmen erforderlich")
|
||||
|
||||
return recommendations
|
||||
|
||||
def _generate_weekly_trend(self, report: Report) -> str:
|
||||
"""Generiert wöchentlichen Trend"""
|
||||
trend_direction = "stabil"
|
||||
if report.success_rate_timeline:
|
||||
trend = report.success_rate_timeline.get_trend()
|
||||
if trend > 5:
|
||||
trend_direction = "steigend"
|
||||
elif trend < -5:
|
||||
trend_direction = "fallend"
|
||||
|
||||
return f"Wöchentlicher Trend: {trend_direction} ({report.accounts_per_day:.1f} Accounts/Tag)"
|
||||
|
||||
def _generate_platform_comparison(self, report: Report) -> str:
|
||||
"""Vergleicht Platform-Performance"""
|
||||
if not report.platform_stats:
|
||||
return "Keine Platform-Daten verfügbar"
|
||||
|
||||
best_platform = max(report.platform_stats, key=lambda p: p.success_rate)
|
||||
worst_platform = min(report.platform_stats, key=lambda p: p.success_rate)
|
||||
|
||||
return (f"Beste Performance: {best_platform.platform} ({best_platform.success_rate:.1%}), "
|
||||
f"Schlechteste: {worst_platform.platform} ({worst_platform.success_rate:.1%})")
|
||||
|
||||
def _generate_success_pattern_insight(self, report: Report) -> str:
|
||||
"""Analysiert Erfolgsmuster"""
|
||||
success_metric = report.get_metric(MetricType.SUCCESS_RATE)
|
||||
if success_metric and success_metric.trend > 0:
|
||||
return f"Erfolgsrate verbessert sich um {success_metric.trend:.1f}%"
|
||||
return "Erfolgsrate zeigt keine klare Tendenz"
|
||||
|
||||
def _generate_weekly_recommendations(self, report: Report) -> List[str]:
|
||||
"""Generiert wöchentliche Empfehlungen"""
|
||||
recommendations = []
|
||||
|
||||
# Trend-basierte Empfehlungen
|
||||
if report.success_rate_timeline:
|
||||
trend = report.success_rate_timeline.get_trend()
|
||||
if trend < -10:
|
||||
recommendations.append(
|
||||
"Negativer Trend erkannt - analysieren Sie Änderungen der letzten Woche"
|
||||
)
|
||||
|
||||
# Effizienz-Empfehlungen
|
||||
if report.total_attempts > report.total_accounts_created * 2:
|
||||
recommendations.append(
|
||||
"Hohe Retry-Rate - verbessern Sie Fehlerbehandlung"
|
||||
)
|
||||
|
||||
return recommendations
|
||||
|
||||
def _generate_monthly_summary(self, report: Report) -> str:
|
||||
"""Generiert monatliche Zusammenfassung"""
|
||||
total_value = report.total_accounts_created
|
||||
daily_avg = report.accounts_per_day
|
||||
|
||||
return (f"Monat: {total_value} Accounts erstellt "
|
||||
f"(Ø {daily_avg:.1f}/Tag, {report.overall_success_rate:.1%} Erfolg)")
|
||||
|
||||
def _generate_growth_analysis(self, report: Report) -> str:
|
||||
"""Analysiert Wachstum"""
|
||||
# Vereinfacht - würde historische Daten vergleichen
|
||||
return "Wachstumsanalyse: Vergleich mit Vormonat ausstehend"
|
||||
|
||||
def _generate_efficiency_insight(self, report: Report) -> str:
|
||||
"""Analysiert Effizienz"""
|
||||
efficiency = report.total_accounts_created / report.total_attempts if report.total_attempts > 0 else 0
|
||||
return f"Effizienz: {efficiency:.1%} der Versuche erfolgreich"
|
||||
|
||||
def _generate_strategic_recommendations(self, report: Report) -> List[str]:
|
||||
"""Generiert strategische Empfehlungen"""
|
||||
return [
|
||||
"Monatliche Review der Error-Patterns durchführen",
|
||||
"Proxy-Pool-Qualität evaluieren",
|
||||
"Fingerprint-Rotation-Strategie anpassen"
|
||||
]
|
||||
|
||||
def export_report(self,
|
||||
report: Report,
|
||||
format: str = 'json',
|
||||
include_sensitive: bool = False) -> bytes:
|
||||
"""
|
||||
Exportiert Report in verschiedenen Formaten.
|
||||
|
||||
Args:
|
||||
report: Zu exportierender Report
|
||||
format: Export-Format ('json', 'csv', 'html')
|
||||
include_sensitive: Ob sensitive Daten inkludiert werden sollen
|
||||
|
||||
Returns:
|
||||
Report als Bytes
|
||||
"""
|
||||
if format == 'json':
|
||||
return self._export_json(report, include_sensitive)
|
||||
elif format == 'csv':
|
||||
return self._export_csv(report)
|
||||
elif format == 'html':
|
||||
return self._export_html(report)
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {format}")
|
||||
|
||||
def _export_json(self, report: Report, include_sensitive: bool) -> bytes:
|
||||
"""Exportiert als JSON"""
|
||||
data = report.to_dict()
|
||||
|
||||
# Entferne sensitive Daten wenn gewünscht
|
||||
if not include_sensitive:
|
||||
# Entferne Account-spezifische Daten
|
||||
for platform_stat in data.get('platform_stats', []):
|
||||
if 'account_details' in platform_stat:
|
||||
del platform_stat['account_details']
|
||||
|
||||
return json.dumps(data, indent=2).encode('utf-8')
|
||||
|
||||
def _export_csv(self, report: Report) -> bytes:
|
||||
"""Exportiert als CSV"""
|
||||
output = StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Header
|
||||
writer.writerow(['Metric', 'Value', 'Unit', 'Trend'])
|
||||
|
||||
# Metrics
|
||||
for metric in report.metrics:
|
||||
writer.writerow([metric.name, metric.value, metric.unit, metric.trend])
|
||||
|
||||
# Platform Stats
|
||||
writer.writerow([])
|
||||
writer.writerow(['Platform', 'Attempts', 'Success', 'Success Rate', 'Avg Duration'])
|
||||
|
||||
for stat in report.platform_stats:
|
||||
writer.writerow([
|
||||
stat.platform,
|
||||
stat.total_attempts,
|
||||
stat.successful_accounts,
|
||||
f"{stat.success_rate:.1%}",
|
||||
f"{stat.avg_duration_seconds:.1f}s"
|
||||
])
|
||||
|
||||
return output.getvalue().encode('utf-8')
|
||||
|
||||
def _export_html(self, report: Report) -> bytes:
|
||||
"""Exportiert als HTML"""
|
||||
html = f"""
|
||||
<html>
|
||||
<head>
|
||||
<title>Report {report.report_id}</title>
|
||||
<style>
|
||||
body {{ font-family: Arial, sans-serif; margin: 20px; }}
|
||||
h1 {{ color: #333; }}
|
||||
.metric {{ margin: 10px 0; }}
|
||||
.success {{ color: green; }}
|
||||
.warning {{ color: orange; }}
|
||||
.error {{ color: red; }}
|
||||
table {{ border-collapse: collapse; width: 100%; }}
|
||||
th, td {{ border: 1px solid #ddd; padding: 8px; text-align: left; }}
|
||||
th {{ background-color: #f2f2f2; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>{report.report_type.value.title()} Report</h1>
|
||||
<p>Period: {report.start_date.strftime('%Y-%m-%d')} to {report.end_date.strftime('%Y-%m-%d')}</p>
|
||||
|
||||
<h2>Summary</h2>
|
||||
<div class="metric">Total Accounts: <strong>{report.total_accounts_created}</strong></div>
|
||||
<div class="metric">Success Rate: <strong class="{'success' if report.overall_success_rate > 0.7 else 'warning'}">{report.overall_success_rate:.1%}</strong></div>
|
||||
<div class="metric">Average Creation Time: <strong>{report.avg_creation_time:.1f}s</strong></div>
|
||||
|
||||
<h2>Platform Statistics</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<th>Platform</th>
|
||||
<th>Attempts</th>
|
||||
<th>Success</th>
|
||||
<th>Success Rate</th>
|
||||
</tr>
|
||||
"""
|
||||
|
||||
for stat in report.platform_stats:
|
||||
html += f"""
|
||||
<tr>
|
||||
<td>{stat.platform}</td>
|
||||
<td>{stat.total_attempts}</td>
|
||||
<td>{stat.successful_accounts}</td>
|
||||
<td class="{'success' if stat.success_rate > 0.7 else 'warning'}">{stat.success_rate:.1%}</td>
|
||||
</tr>
|
||||
"""
|
||||
|
||||
html += """
|
||||
</table>
|
||||
|
||||
<h2>Insights</h2>
|
||||
<ul>
|
||||
"""
|
||||
|
||||
for insight in report.insights:
|
||||
html += f"<li>{insight}</li>"
|
||||
|
||||
html += """
|
||||
</ul>
|
||||
|
||||
<h2>Recommendations</h2>
|
||||
<ul>
|
||||
"""
|
||||
|
||||
for rec in report.recommendations:
|
||||
html += f"<li>{rec}</li>"
|
||||
|
||||
html += """
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
return html.encode('utf-8')
|
||||
335
application/use_cases/log_account_creation_use_case.py
Normale Datei
335
application/use_cases/log_account_creation_use_case.py
Normale Datei
@ -0,0 +1,335 @@
|
||||
"""
|
||||
Log Account Creation Use Case - Strukturiertes Logging für Account-Erstellung
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Dict, Any, Optional, List
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
from domain.services.analytics_service import IAnalyticsService
|
||||
from domain.entities.account_creation_event import (
|
||||
AccountCreationEvent, AccountData, WorkflowStep,
|
||||
WorkflowStepStatus, ErrorDetails
|
||||
)
|
||||
from domain.value_objects.action_timing import ActionType
|
||||
|
||||
logger = logging.getLogger("log_account_creation_use_case")
|
||||
|
||||
|
||||
class LogAccountCreationUseCase:
|
||||
"""
|
||||
Use Case für strukturiertes Logging von Account-Erstellungen.
|
||||
Trackt detaillierte Steps, Performance-Metriken und Fehler-Kontextualisierung.
|
||||
"""
|
||||
|
||||
def __init__(self, analytics_service: IAnalyticsService):
|
||||
self.analytics_service = analytics_service
|
||||
self.active_events = {} # event_id -> AccountCreationEvent
|
||||
|
||||
def start_tracking(self,
|
||||
platform: str,
|
||||
session_id: str,
|
||||
fingerprint_id: str,
|
||||
context: Optional[Dict[str, Any]] = None) -> str:
|
||||
"""
|
||||
Startet Tracking für neue Account-Erstellung.
|
||||
|
||||
Args:
|
||||
platform: Zielplattform
|
||||
session_id: Session ID
|
||||
fingerprint_id: Fingerprint ID
|
||||
context: Zusätzlicher Kontext
|
||||
|
||||
Returns:
|
||||
Event ID für weiteres Tracking
|
||||
"""
|
||||
event = AccountCreationEvent(
|
||||
event_id=str(uuid.uuid4()),
|
||||
timestamp=datetime.now(),
|
||||
session_id=session_id,
|
||||
fingerprint_id=fingerprint_id,
|
||||
proxy_used=context.get('proxy_used', False) if context else False,
|
||||
proxy_type=context.get('proxy_type') if context else None,
|
||||
browser_type=context.get('browser_type', 'chromium') if context else 'chromium',
|
||||
headless=context.get('headless', False) if context else False
|
||||
)
|
||||
|
||||
# Speichere temporär für Step-Tracking
|
||||
self.active_events[event.event_id] = event
|
||||
|
||||
logger.info(f"Started tracking account creation {event.event_id} for {platform}")
|
||||
|
||||
return event.event_id
|
||||
|
||||
def track_step(self,
|
||||
event_id: str,
|
||||
step_name: str,
|
||||
metadata: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""
|
||||
Beginnt Tracking eines Workflow-Schritts.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
step_name: Name des Schritts
|
||||
metadata: Zusätzliche Metadaten
|
||||
"""
|
||||
event = self.active_events.get(event_id)
|
||||
if not event:
|
||||
logger.error(f"No active event found for {event_id}")
|
||||
return
|
||||
|
||||
step = WorkflowStep(
|
||||
step_name=step_name,
|
||||
start_time=datetime.now(),
|
||||
status=WorkflowStepStatus.IN_PROGRESS,
|
||||
metadata=metadata or {}
|
||||
)
|
||||
|
||||
event.add_step(step)
|
||||
logger.debug(f"Started step '{step_name}' for event {event_id}")
|
||||
|
||||
def complete_step(self,
|
||||
event_id: str,
|
||||
step_name: str,
|
||||
success: bool = True,
|
||||
error_message: Optional[str] = None,
|
||||
retry_count: int = 0) -> None:
|
||||
"""
|
||||
Markiert einen Schritt als abgeschlossen.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
step_name: Name des Schritts
|
||||
success: Ob Schritt erfolgreich war
|
||||
error_message: Fehlermeldung bei Misserfolg
|
||||
retry_count: Anzahl der Wiederholungen
|
||||
"""
|
||||
event = self.active_events.get(event_id)
|
||||
if not event:
|
||||
logger.error(f"No active event found for {event_id}")
|
||||
return
|
||||
|
||||
step = event.get_step(step_name)
|
||||
if not step:
|
||||
logger.error(f"Step '{step_name}' not found in event {event_id}")
|
||||
return
|
||||
|
||||
step.end_time = datetime.now()
|
||||
step.status = WorkflowStepStatus.COMPLETED if success else WorkflowStepStatus.FAILED
|
||||
step.retry_count = retry_count
|
||||
step.error_message = error_message
|
||||
|
||||
# Update Metriken
|
||||
event.network_requests += step.metadata.get('network_requests', 0)
|
||||
event.screenshots_taken += step.metadata.get('screenshots', 0)
|
||||
|
||||
logger.debug(f"Completed step '{step_name}' for event {event_id} "
|
||||
f"(success: {success}, duration: {step.duration})")
|
||||
|
||||
def set_account_data(self,
|
||||
event_id: str,
|
||||
username: str,
|
||||
password: str,
|
||||
email: str,
|
||||
additional_data: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""
|
||||
Setzt Account-Daten für erfolgreich erstellten Account.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
username: Benutzername
|
||||
password: Passwort
|
||||
email: E-Mail
|
||||
additional_data: Zusätzliche Daten
|
||||
"""
|
||||
event = self.active_events.get(event_id)
|
||||
if not event:
|
||||
logger.error(f"No active event found for {event_id}")
|
||||
return
|
||||
|
||||
event.account_data = AccountData(
|
||||
platform=additional_data.get('platform', 'unknown') if additional_data else 'unknown',
|
||||
username=username,
|
||||
password=password,
|
||||
email=email,
|
||||
phone=additional_data.get('phone') if additional_data else None,
|
||||
full_name=additional_data.get('full_name') if additional_data else None,
|
||||
birthday=additional_data.get('birthday') if additional_data else None,
|
||||
verification_status=additional_data.get('verification_status', 'unverified') if additional_data else 'unverified',
|
||||
metadata=additional_data or {}
|
||||
)
|
||||
|
||||
logger.info(f"Set account data for {username} in event {event_id}")
|
||||
|
||||
def log_error(self,
|
||||
event_id: str,
|
||||
error_type: str,
|
||||
error_message: str,
|
||||
stack_trace: Optional[str] = None,
|
||||
screenshot_path: Optional[str] = None,
|
||||
context: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""
|
||||
Loggt einen Fehler während der Account-Erstellung.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
error_type: Typ des Fehlers
|
||||
error_message: Fehlermeldung
|
||||
stack_trace: Stack Trace
|
||||
screenshot_path: Pfad zum Fehler-Screenshot
|
||||
context: Fehler-Kontext
|
||||
"""
|
||||
event = self.active_events.get(event_id)
|
||||
if not event:
|
||||
logger.error(f"No active event found for {event_id}")
|
||||
return
|
||||
|
||||
event.error_details = ErrorDetails(
|
||||
error_type=error_type,
|
||||
error_message=error_message,
|
||||
stack_trace=stack_trace,
|
||||
screenshot_path=screenshot_path,
|
||||
context=context or {}
|
||||
)
|
||||
|
||||
logger.error(f"Logged error for event {event_id}: {error_type} - {error_message}")
|
||||
|
||||
def finish_tracking(self,
|
||||
event_id: str,
|
||||
success: bool,
|
||||
final_status: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""
|
||||
Beendet Tracking und speichert Event.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
success: Ob Account-Erstellung erfolgreich war
|
||||
final_status: Finaler Status/Metadaten
|
||||
"""
|
||||
event = self.active_events.get(event_id)
|
||||
if not event:
|
||||
logger.error(f"No active event found for {event_id}")
|
||||
return
|
||||
|
||||
# Setze finale Eigenschaften
|
||||
event.success = success
|
||||
event.calculate_duration()
|
||||
|
||||
# Füge finale Metadaten hinzu
|
||||
if final_status:
|
||||
if event.account_data:
|
||||
event.account_data.metadata.update(final_status)
|
||||
|
||||
# Logge Event
|
||||
self.analytics_service.log_event(event)
|
||||
|
||||
# Entferne aus aktiven Events
|
||||
del self.active_events[event_id]
|
||||
|
||||
# Log Summary
|
||||
self._log_summary(event)
|
||||
|
||||
def _log_summary(self, event: AccountCreationEvent) -> None:
|
||||
"""Loggt eine Zusammenfassung des Events"""
|
||||
summary = f"Account creation {event.event_id} "
|
||||
|
||||
if event.success:
|
||||
summary += f"SUCCEEDED"
|
||||
if event.account_data:
|
||||
summary += f" - {event.account_data.username} on {event.account_data.platform}"
|
||||
else:
|
||||
summary += f"FAILED"
|
||||
if event.error_details:
|
||||
summary += f" - {event.error_details.error_type}: {event.error_details.error_message}"
|
||||
|
||||
if event.duration:
|
||||
summary += f" (duration: {event.duration.total_seconds():.1f}s"
|
||||
summary += f", steps: {len(event.steps_completed)}"
|
||||
summary += f", retries: {event.total_retry_count})"
|
||||
|
||||
logger.info(summary)
|
||||
|
||||
def track_performance_metric(self,
|
||||
event_id: str,
|
||||
metric_name: str,
|
||||
value: float,
|
||||
tags: Optional[Dict[str, str]] = None) -> None:
|
||||
"""
|
||||
Trackt eine Performance-Metrik.
|
||||
|
||||
Args:
|
||||
event_id: Event ID
|
||||
metric_name: Name der Metrik
|
||||
value: Wert der Metrik
|
||||
tags: Zusätzliche Tags
|
||||
"""
|
||||
# Tracke über Analytics Service
|
||||
metric_tags = tags or {}
|
||||
metric_tags['event_id'] = event_id
|
||||
|
||||
self.analytics_service.track_performance(metric_name, value, metric_tags)
|
||||
|
||||
def get_active_events(self) -> List[Dict[str, Any]]:
|
||||
"""Gibt Liste aktiver Events zurück"""
|
||||
active = []
|
||||
|
||||
for event_id, event in self.active_events.items():
|
||||
duration = (datetime.now() - event.timestamp).total_seconds()
|
||||
current_step = None
|
||||
|
||||
# Finde aktuellen Schritt
|
||||
for step in event.steps_completed:
|
||||
if step.status == WorkflowStepStatus.IN_PROGRESS:
|
||||
current_step = step.step_name
|
||||
break
|
||||
|
||||
active.append({
|
||||
'event_id': event_id,
|
||||
'started_at': event.timestamp.isoformat(),
|
||||
'duration_seconds': duration,
|
||||
'current_step': current_step,
|
||||
'steps_completed': len([s for s in event.steps_completed
|
||||
if s.status == WorkflowStepStatus.COMPLETED]),
|
||||
'platform': event.account_data.platform if event.account_data else 'unknown'
|
||||
})
|
||||
|
||||
return active
|
||||
|
||||
def cleanup_stale_events(self, timeout_minutes: int = 30) -> int:
|
||||
"""
|
||||
Bereinigt Events die zu lange laufen.
|
||||
|
||||
Args:
|
||||
timeout_minutes: Timeout in Minuten
|
||||
|
||||
Returns:
|
||||
Anzahl bereinigter Events
|
||||
"""
|
||||
stale_events = []
|
||||
timeout = timedelta(minutes=timeout_minutes)
|
||||
|
||||
for event_id, event in self.active_events.items():
|
||||
if datetime.now() - event.timestamp > timeout:
|
||||
stale_events.append(event_id)
|
||||
|
||||
for event_id in stale_events:
|
||||
event = self.active_events[event_id]
|
||||
|
||||
# Markiere als Timeout
|
||||
self.log_error(
|
||||
event_id,
|
||||
'timeout',
|
||||
f'Event timed out after {timeout_minutes} minutes',
|
||||
context={'timeout_minutes': timeout_minutes}
|
||||
)
|
||||
|
||||
# Beende Tracking
|
||||
self.finish_tracking(event_id, success=False,
|
||||
final_status={'reason': 'timeout'})
|
||||
|
||||
if stale_events:
|
||||
logger.warning(f"Cleaned up {len(stale_events)} stale events")
|
||||
|
||||
return len(stale_events)
|
||||
362
application/use_cases/method_rotation_use_case.py
Normale Datei
362
application/use_cases/method_rotation_use_case.py
Normale Datei
@ -0,0 +1,362 @@
|
||||
"""
|
||||
Use cases for method rotation system.
|
||||
Implements business logic for method selection, rotation, and performance tracking.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Dict, Any
|
||||
from dataclasses import dataclass
|
||||
|
||||
from domain.entities.method_rotation import (
|
||||
MethodStrategy, RotationSession, RotationEvent, PlatformMethodState,
|
||||
RotationEventType, RotationStrategy, RiskLevel
|
||||
)
|
||||
from domain.repositories.method_rotation_repository import (
|
||||
IMethodStrategyRepository, IRotationSessionRepository,
|
||||
IPlatformMethodStateRepository
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RotationContext:
|
||||
"""Context information for rotation decisions"""
|
||||
platform: str
|
||||
account_id: Optional[str] = None
|
||||
fingerprint_id: Optional[str] = None
|
||||
excluded_methods: List[str] = None
|
||||
max_risk_level: RiskLevel = RiskLevel.HIGH
|
||||
emergency_mode: bool = False
|
||||
session_metadata: Dict[str, Any] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.excluded_methods is None:
|
||||
self.excluded_methods = []
|
||||
if self.session_metadata is None:
|
||||
self.session_metadata = {}
|
||||
|
||||
|
||||
class MethodRotationUseCase:
|
||||
"""
|
||||
Core use case for method rotation operations.
|
||||
Handles method selection, rotation logic, and performance tracking.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
strategy_repo: IMethodStrategyRepository,
|
||||
session_repo: IRotationSessionRepository,
|
||||
state_repo: IPlatformMethodStateRepository):
|
||||
self.strategy_repo = strategy_repo
|
||||
self.session_repo = session_repo
|
||||
self.state_repo = state_repo
|
||||
|
||||
def start_rotation_session(self, context: RotationContext) -> RotationSession:
|
||||
"""
|
||||
Start a new rotation session and select the optimal initial method.
|
||||
"""
|
||||
# Check for existing active session
|
||||
existing_session = self.session_repo.find_active_session(
|
||||
context.platform, context.account_id
|
||||
)
|
||||
|
||||
if existing_session:
|
||||
# Archive the old session and start fresh
|
||||
self.session_repo.archive_session(existing_session.session_id, False)
|
||||
|
||||
# Get optimal method for initial attempt
|
||||
optimal_method = self.get_optimal_method(context)
|
||||
|
||||
if not optimal_method:
|
||||
raise ValueError(f"No available methods for platform {context.platform}")
|
||||
|
||||
# Create new session
|
||||
session = RotationSession(
|
||||
session_id=f"session_{uuid.uuid4().hex}",
|
||||
platform=context.platform,
|
||||
account_id=context.account_id,
|
||||
current_method=optimal_method.method_name,
|
||||
fingerprint_id=context.fingerprint_id,
|
||||
session_metadata=context.session_metadata.copy()
|
||||
)
|
||||
|
||||
# Update platform state
|
||||
platform_state = self.state_repo.get_or_create_state(context.platform)
|
||||
platform_state.increment_daily_attempts(optimal_method.method_name)
|
||||
self.state_repo.save(platform_state)
|
||||
|
||||
# Save session
|
||||
self.session_repo.save(session)
|
||||
|
||||
return session
|
||||
|
||||
def get_optimal_method(self, context: RotationContext) -> Optional[MethodStrategy]:
|
||||
"""
|
||||
Get the optimal method based on current conditions and strategy.
|
||||
"""
|
||||
platform_state = self.state_repo.get_or_create_state(context.platform)
|
||||
|
||||
# In emergency mode, use only the safest methods
|
||||
if context.emergency_mode or platform_state.emergency_mode:
|
||||
return self._get_emergency_method(context)
|
||||
|
||||
# Use platform-specific rotation strategy
|
||||
if platform_state.rotation_strategy == RotationStrategy.ADAPTIVE:
|
||||
return self._get_adaptive_method(context, platform_state)
|
||||
elif platform_state.rotation_strategy == RotationStrategy.SEQUENTIAL:
|
||||
return self._get_sequential_method(context, platform_state)
|
||||
elif platform_state.rotation_strategy == RotationStrategy.RANDOM:
|
||||
return self._get_random_method(context, platform_state)
|
||||
else:
|
||||
return self._get_smart_method(context, platform_state)
|
||||
|
||||
def rotate_method(self, session_id: str, reason: str = "failure") -> Optional[MethodStrategy]:
|
||||
"""
|
||||
Rotate to the next best available method for an active session.
|
||||
"""
|
||||
session = self.session_repo.find_by_id(session_id)
|
||||
if not session or not session.is_active:
|
||||
return None
|
||||
|
||||
# Create context for finding next method
|
||||
context = RotationContext(
|
||||
platform=session.platform,
|
||||
account_id=session.account_id,
|
||||
fingerprint_id=session.fingerprint_id,
|
||||
excluded_methods=session.attempted_methods.copy()
|
||||
)
|
||||
|
||||
# Find next method
|
||||
next_method = self.get_optimal_method(context)
|
||||
|
||||
if not next_method:
|
||||
# No more methods available
|
||||
self.session_repo.archive_session(session_id, False)
|
||||
return None
|
||||
|
||||
# Update session
|
||||
session.rotate_to_method(next_method.method_name, reason)
|
||||
self.session_repo.save(session)
|
||||
|
||||
# Update platform state
|
||||
platform_state = self.state_repo.get_or_create_state(session.platform)
|
||||
platform_state.increment_daily_attempts(next_method.method_name)
|
||||
self.state_repo.save(platform_state)
|
||||
|
||||
return next_method
|
||||
|
||||
def record_method_result(self, session_id: str, method_name: str,
|
||||
success: bool, execution_time: float = 0.0,
|
||||
error_details: Optional[Dict] = None) -> None:
|
||||
"""
|
||||
Record the result of a method execution and update metrics.
|
||||
"""
|
||||
session = self.session_repo.find_by_id(session_id)
|
||||
if not session:
|
||||
return
|
||||
|
||||
# Update session metrics
|
||||
error_message = error_details.get('message') if error_details else None
|
||||
self.session_repo.update_session_metrics(
|
||||
session_id, success, method_name, error_message
|
||||
)
|
||||
|
||||
# Update method strategy performance
|
||||
strategy = self.strategy_repo.find_by_platform_and_method(
|
||||
session.platform, method_name
|
||||
)
|
||||
if strategy:
|
||||
self.strategy_repo.update_performance_metrics(
|
||||
strategy.strategy_id, success, execution_time
|
||||
)
|
||||
|
||||
# Update platform state
|
||||
if success:
|
||||
self.state_repo.record_method_success(session.platform, method_name)
|
||||
# Archive successful session
|
||||
self.session_repo.archive_session(session_id, True)
|
||||
else:
|
||||
# Handle failure - might trigger automatic rotation
|
||||
self._handle_method_failure(session, method_name, error_details or {})
|
||||
|
||||
def should_rotate_method(self, session_id: str) -> bool:
|
||||
"""
|
||||
Determine if method rotation should occur based on current session state.
|
||||
"""
|
||||
session = self.session_repo.find_by_id(session_id)
|
||||
if not session or not session.is_active:
|
||||
return False
|
||||
|
||||
return session.should_rotate
|
||||
|
||||
def get_session_status(self, session_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get detailed status information for a rotation session.
|
||||
"""
|
||||
session = self.session_repo.find_by_id(session_id)
|
||||
if not session:
|
||||
return None
|
||||
|
||||
current_strategy = self.strategy_repo.find_by_platform_and_method(
|
||||
session.platform, session.current_method
|
||||
)
|
||||
|
||||
return {
|
||||
'session_id': session.session_id,
|
||||
'platform': session.platform,
|
||||
'is_active': session.is_active,
|
||||
'current_method': session.current_method,
|
||||
'attempted_methods': session.attempted_methods,
|
||||
'rotation_count': session.rotation_count,
|
||||
'success_count': session.success_count,
|
||||
'failure_count': session.failure_count,
|
||||
'success_rate': session.success_rate,
|
||||
'session_duration_minutes': session.session_duration.total_seconds() / 60,
|
||||
'current_strategy_effectiveness': current_strategy.effectiveness_score if current_strategy else 0.0,
|
||||
'should_rotate': session.should_rotate
|
||||
}
|
||||
|
||||
def get_platform_method_recommendations(self, platform: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get method recommendations and insights for a platform.
|
||||
"""
|
||||
strategies = self.strategy_repo.find_active_by_platform(platform)
|
||||
platform_stats = self.strategy_repo.get_platform_statistics(platform)
|
||||
session_stats = self.session_repo.get_session_statistics(platform, days=30)
|
||||
platform_state = self.state_repo.find_by_platform(platform)
|
||||
|
||||
recommendations = []
|
||||
|
||||
for strategy in strategies[:3]: # Top 3 methods
|
||||
recommendations.append({
|
||||
'method_name': strategy.method_name,
|
||||
'effectiveness_score': strategy.effectiveness_score,
|
||||
'success_rate': strategy.success_rate,
|
||||
'risk_level': strategy.risk_level.value,
|
||||
'is_on_cooldown': strategy.is_on_cooldown,
|
||||
'daily_attempts_remaining': strategy.max_daily_attempts - platform_state.daily_attempt_counts.get(strategy.method_name, 0) if platform_state else strategy.max_daily_attempts
|
||||
})
|
||||
|
||||
return {
|
||||
'platform': platform,
|
||||
'recommended_methods': recommendations,
|
||||
'platform_statistics': platform_stats,
|
||||
'session_statistics': session_stats,
|
||||
'emergency_mode': platform_state.emergency_mode if platform_state else False,
|
||||
'rotation_strategy': platform_state.rotation_strategy.value if platform_state else 'adaptive'
|
||||
}
|
||||
|
||||
def enable_emergency_mode(self, platform: str, reason: str = "system_override") -> None:
|
||||
"""Enable emergency mode for a platform"""
|
||||
self.state_repo.set_emergency_mode(platform, True)
|
||||
|
||||
# Archive all active sessions for safety
|
||||
active_sessions = self.session_repo.find_active_sessions_by_platform(platform)
|
||||
for session in active_sessions:
|
||||
session.session_metadata['emergency_archived'] = True
|
||||
session.session_metadata['emergency_reason'] = reason
|
||||
self.session_repo.archive_session(session.session_id, False)
|
||||
|
||||
def disable_emergency_mode(self, platform: str) -> None:
|
||||
"""Disable emergency mode for a platform"""
|
||||
self.state_repo.set_emergency_mode(platform, False)
|
||||
|
||||
def _get_adaptive_method(self, context: RotationContext,
|
||||
platform_state: PlatformMethodState) -> Optional[MethodStrategy]:
|
||||
"""Get method using adaptive strategy based on recent performance"""
|
||||
# Prefer last successful method if it's available
|
||||
if (platform_state.last_successful_method and
|
||||
platform_state.last_successful_method not in context.excluded_methods):
|
||||
|
||||
strategy = self.strategy_repo.find_by_platform_and_method(
|
||||
context.platform, platform_state.last_successful_method
|
||||
)
|
||||
|
||||
if (strategy and strategy.is_active and
|
||||
not strategy.is_on_cooldown and
|
||||
platform_state.is_method_available(strategy.method_name, strategy.max_daily_attempts)):
|
||||
return strategy
|
||||
|
||||
# Fall back to best available method
|
||||
return self.strategy_repo.get_next_available_method(
|
||||
context.platform, context.excluded_methods, context.max_risk_level.value
|
||||
)
|
||||
|
||||
def _get_sequential_method(self, context: RotationContext,
|
||||
platform_state: PlatformMethodState) -> Optional[MethodStrategy]:
|
||||
"""Get method using sequential strategy"""
|
||||
for method_name in platform_state.preferred_methods:
|
||||
if method_name in context.excluded_methods:
|
||||
continue
|
||||
|
||||
strategy = self.strategy_repo.find_by_platform_and_method(
|
||||
context.platform, method_name
|
||||
)
|
||||
|
||||
if (strategy and strategy.is_active and
|
||||
not strategy.is_on_cooldown and
|
||||
platform_state.is_method_available(method_name, strategy.max_daily_attempts)):
|
||||
return strategy
|
||||
|
||||
return None
|
||||
|
||||
def _get_random_method(self, context: RotationContext,
|
||||
platform_state: PlatformMethodState) -> Optional[MethodStrategy]:
|
||||
"""Get method using random strategy"""
|
||||
import random
|
||||
|
||||
available_strategies = []
|
||||
for method_name in platform_state.preferred_methods:
|
||||
if method_name in context.excluded_methods:
|
||||
continue
|
||||
|
||||
strategy = self.strategy_repo.find_by_platform_and_method(
|
||||
context.platform, method_name
|
||||
)
|
||||
|
||||
if (strategy and strategy.is_active and
|
||||
not strategy.is_on_cooldown and
|
||||
platform_state.is_method_available(method_name, strategy.max_daily_attempts)):
|
||||
available_strategies.append(strategy)
|
||||
|
||||
return random.choice(available_strategies) if available_strategies else None
|
||||
|
||||
def _get_smart_method(self, context: RotationContext,
|
||||
platform_state: PlatformMethodState) -> Optional[MethodStrategy]:
|
||||
"""Get method using AI-driven smart strategy"""
|
||||
# For now, smart strategy is the same as adaptive
|
||||
# This can be enhanced with ML models in the future
|
||||
return self._get_adaptive_method(context, platform_state)
|
||||
|
||||
def _get_emergency_method(self, context: RotationContext) -> Optional[MethodStrategy]:
|
||||
"""Get the safest available method for emergency mode"""
|
||||
emergency_strategies = self.strategy_repo.get_emergency_methods(context.platform)
|
||||
|
||||
for strategy in emergency_strategies:
|
||||
if (strategy.method_name not in context.excluded_methods and
|
||||
not strategy.is_on_cooldown):
|
||||
return strategy
|
||||
|
||||
return None
|
||||
|
||||
def _handle_method_failure(self, session: RotationSession, method_name: str,
|
||||
error_details: Dict) -> None:
|
||||
"""Handle method failure and determine if action is needed"""
|
||||
# Check if this is a recurring failure pattern
|
||||
if error_details.get('error_type') == 'rate_limit':
|
||||
# Temporarily block the method
|
||||
self.state_repo.block_method(
|
||||
session.platform, method_name,
|
||||
f"Rate limited: {error_details.get('message', 'Unknown')}"
|
||||
)
|
||||
|
||||
elif error_details.get('error_type') == 'account_suspended':
|
||||
# This might indicate method detection, block temporarily
|
||||
self.state_repo.block_method(
|
||||
session.platform, method_name,
|
||||
f"Possible detection: {error_details.get('message', 'Unknown')}"
|
||||
)
|
||||
|
||||
# Check if we need to enable emergency mode
|
||||
platform_stats = self.strategy_repo.get_platform_statistics(session.platform)
|
||||
if platform_stats.get('recent_failures_24h', 0) > 10:
|
||||
self.enable_emergency_mode(session.platform, "high_failure_rate")
|
||||
81
application/use_cases/one_click_login_use_case.py
Normale Datei
81
application/use_cases/one_click_login_use_case.py
Normale Datei
@ -0,0 +1,81 @@
|
||||
"""
|
||||
One-Click Login Use Case - Ermöglicht Login mit gespeicherter Session
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
from domain.value_objects.login_credentials import LoginCredentials
|
||||
from infrastructure.repositories.fingerprint_repository import FingerprintRepository
|
||||
from infrastructure.repositories.account_repository import AccountRepository
|
||||
|
||||
logger = logging.getLogger("one_click_login_use_case")
|
||||
|
||||
|
||||
class OneClickLoginUseCase:
|
||||
"""
|
||||
Use Case für Ein-Klick-Login mit gespeicherter Session.
|
||||
Lädt Session und Fingerprint für konsistenten Browser-Start.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
fingerprint_repository: FingerprintRepository = None,
|
||||
account_repository: AccountRepository = None):
|
||||
self.fingerprint_repository = fingerprint_repository
|
||||
self.account_repository = account_repository
|
||||
|
||||
def execute(self, account_id: str, platform: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Ein-Klick-Login deaktiviert - führt immer normalen Login durch.
|
||||
|
||||
Args:
|
||||
account_id: ID des Accounts
|
||||
platform: Plattform (instagram, facebook, etc.)
|
||||
|
||||
Returns:
|
||||
Dict mit Anweisung für normalen Login
|
||||
"""
|
||||
try:
|
||||
# Session-Login deaktiviert - führe immer normalen Login durch
|
||||
logger.info(f"Session-Login deaktiviert für Account {account_id} - verwende normalen Login")
|
||||
|
||||
# Account-Daten laden falls Repository verfügbar
|
||||
account_data = None
|
||||
if self.account_repository:
|
||||
try:
|
||||
account = self.account_repository.get_by_id(int(account_id))
|
||||
if account:
|
||||
account_data = {
|
||||
'username': account.get('username'),
|
||||
'password': account.get('password'),
|
||||
'platform': account.get('platform'),
|
||||
'fingerprint_id': account.get('fingerprint_id')
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden der Account-Daten: {e}")
|
||||
|
||||
return {
|
||||
'success': False, # Kein Session-Login möglich
|
||||
'can_perform_login': True, # Normaler Login möglich
|
||||
'account_data': account_data,
|
||||
'message': 'Session-Login deaktiviert - normaler Login erforderlich',
|
||||
'requires_manual_login': False
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim One-Click-Login: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'can_perform_login': True,
|
||||
'account_data': None,
|
||||
'message': f'Fehler beim Login: {str(e)}',
|
||||
'requires_manual_login': False
|
||||
}
|
||||
|
||||
def check_session_status(self, account_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Session-Status-Check deaktiviert (Session-Funktionalität entfernt).
|
||||
"""
|
||||
# Session-Funktionalität wurde entfernt
|
||||
return {'state': 'unknown', 'message': 'Session-Status deaktiviert'}
|
||||
0
browser/__init__.py
Normale Datei
0
browser/__init__.py
Normale Datei
251
browser/cookie_consent_handler.py
Normale Datei
251
browser/cookie_consent_handler.py
Normale Datei
@ -0,0 +1,251 @@
|
||||
"""
|
||||
Cookie Consent Handler für Browser-Sessions
|
||||
|
||||
Behandelt Cookie-Consent-Seiten bei der Session-Wiederherstellung
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
from playwright.sync_api import Page
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CookieConsentHandler:
|
||||
"""Behandelt Cookie-Consent-Dialoge verschiedener Plattformen"""
|
||||
|
||||
@staticmethod
|
||||
def handle_instagram_consent(page: Page) -> bool:
|
||||
"""
|
||||
Behandelt Instagram's Cookie-Consent-Seite
|
||||
|
||||
Args:
|
||||
page: Playwright Page-Objekt
|
||||
|
||||
Returns:
|
||||
bool: True wenn Consent behandelt wurde, False sonst
|
||||
"""
|
||||
try:
|
||||
# Warte kurz auf Seitenladung
|
||||
page.wait_for_load_state('networkidle', timeout=5000)
|
||||
|
||||
# Prüfe ob wir auf der Cookie-Consent-Seite sind
|
||||
consent_indicators = [
|
||||
# Deutsche Texte
|
||||
"text=/.*cookies erlauben.*/i",
|
||||
"text=/.*optionale cookies ablehnen.*/i",
|
||||
"button:has-text('Optionale Cookies ablehnen')",
|
||||
"button:has-text('Nur erforderliche Cookies erlauben')",
|
||||
# Englische Texte
|
||||
"button:has-text('Decline optional cookies')",
|
||||
"button:has-text('Only allow essential cookies')",
|
||||
# Allgemeine Selektoren
|
||||
"[aria-label*='cookie']",
|
||||
"text=/.*verwendung von cookies.*/i"
|
||||
]
|
||||
|
||||
# Versuche "Optionale Cookies ablehnen" zu klicken (datenschutzfreundlich)
|
||||
decline_buttons = [
|
||||
"button:has-text('Optionale Cookies ablehnen')",
|
||||
"button:has-text('Nur erforderliche Cookies erlauben')",
|
||||
"button:has-text('Decline optional cookies')",
|
||||
"button:has-text('Only allow essential cookies')"
|
||||
]
|
||||
|
||||
for button_selector in decline_buttons:
|
||||
try:
|
||||
button = page.locator(button_selector).first
|
||||
if button.is_visible():
|
||||
logger.info(f"Found consent decline button: {button_selector}")
|
||||
|
||||
# Verwende robuste Click-Methoden für Cookie-Consent
|
||||
success = False
|
||||
try:
|
||||
# Strategie 1: Standard Click
|
||||
button.click(timeout=5000)
|
||||
success = True
|
||||
except Exception as click_error:
|
||||
logger.warning(f"Standard click fehlgeschlagen: {click_error}")
|
||||
|
||||
# Strategie 2: Force Click
|
||||
try:
|
||||
button.click(force=True, timeout=5000)
|
||||
success = True
|
||||
except Exception as force_error:
|
||||
logger.warning(f"Force click fehlgeschlagen: {force_error}")
|
||||
|
||||
# Strategie 3: JavaScript Click
|
||||
try:
|
||||
js_result = page.evaluate(f"""
|
||||
() => {{
|
||||
const button = document.querySelector('{button_selector}');
|
||||
if (button) {{
|
||||
button.click();
|
||||
return true;
|
||||
}}
|
||||
return false;
|
||||
}}
|
||||
""")
|
||||
if js_result:
|
||||
success = True
|
||||
logger.info("JavaScript click erfolgreich für Cookie-Consent")
|
||||
except Exception as js_error:
|
||||
logger.warning(f"JavaScript click fehlgeschlagen: {js_error}")
|
||||
|
||||
if success:
|
||||
logger.info("Clicked decline optional cookies button")
|
||||
|
||||
# Warte auf Navigation
|
||||
page.wait_for_load_state('networkidle', timeout=5000)
|
||||
|
||||
# Setze Consent im LocalStorage
|
||||
page.evaluate("""
|
||||
() => {
|
||||
// Instagram Consent Storage für "nur erforderliche Cookies"
|
||||
localStorage.setItem('ig_cb', '2'); // 2 = nur erforderliche Cookies
|
||||
localStorage.setItem('ig_consent_timestamp', Date.now().toString());
|
||||
|
||||
// Meta Consent
|
||||
localStorage.setItem('consent_status', 'essential_only');
|
||||
localStorage.setItem('cookie_consent', JSON.stringify({
|
||||
necessary: true,
|
||||
analytics: false,
|
||||
marketing: false,
|
||||
timestamp: Date.now()
|
||||
}));
|
||||
}
|
||||
""")
|
||||
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Alle Click-Strategien für Cookie-Consent Button fehlgeschlagen: {button_selector}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.debug(f"Consent check failed for {button_selector}: {e}")
|
||||
continue
|
||||
|
||||
# Fallback: Prüfe ob Consent-Seite überhaupt angezeigt wird
|
||||
for indicator in consent_indicators:
|
||||
try:
|
||||
if page.locator(indicator).first.is_visible():
|
||||
logger.warning("Cookie consent page detected but couldn't find decline button")
|
||||
|
||||
# Als letzter Ausweg: Akzeptiere alle Cookies
|
||||
accept_buttons = [
|
||||
"button:has-text('Alle Cookies erlauben')",
|
||||
"button:has-text('Allow all cookies')",
|
||||
"button:has-text('Accept all')",
|
||||
# Spezifischer Instagram-Selektor basierend auf div-role
|
||||
"div[role='button']:has-text('Alle Cookies erlauben')",
|
||||
# Fallback mit Partial Text
|
||||
"[role='button']:has-text('Cookies erlauben')",
|
||||
# XPath als letzter Fallback
|
||||
"xpath=//div[@role='button' and contains(text(),'Alle Cookies erlauben')]"
|
||||
]
|
||||
|
||||
for accept_button in accept_buttons:
|
||||
try:
|
||||
button = page.locator(accept_button).first
|
||||
if button.is_visible():
|
||||
logger.info(f"Fallback: Accepting all cookies with {accept_button}")
|
||||
|
||||
# Verwende robuste Click-Methoden
|
||||
success = False
|
||||
try:
|
||||
# Strategie 1: Standard Click
|
||||
button.click(timeout=5000)
|
||||
success = True
|
||||
except Exception as click_error:
|
||||
logger.warning(f"Standard click fehlgeschlagen für Accept: {click_error}")
|
||||
|
||||
# Strategie 2: Force Click
|
||||
try:
|
||||
button.click(force=True, timeout=5000)
|
||||
success = True
|
||||
except Exception as force_error:
|
||||
logger.warning(f"Force click fehlgeschlagen für Accept: {force_error}")
|
||||
|
||||
# Strategie 3: JavaScript Click für div[role='button']
|
||||
try:
|
||||
# Spezielle Behandlung für div-basierte Buttons
|
||||
js_result = page.evaluate("""
|
||||
(selector) => {
|
||||
const elements = document.querySelectorAll(selector);
|
||||
for (const elem of elements) {
|
||||
if (elem && elem.textContent && elem.textContent.includes('Cookies erlauben')) {
|
||||
elem.click();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Fallback: Suche nach role='button' mit Text
|
||||
const roleButtons = document.querySelectorAll('[role="button"]');
|
||||
for (const btn of roleButtons) {
|
||||
if (btn && btn.textContent && btn.textContent.includes('Cookies erlauben')) {
|
||||
btn.click();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
""", "[role='button']")
|
||||
|
||||
if js_result:
|
||||
success = True
|
||||
logger.info("JavaScript click erfolgreich für Cookie Accept Button")
|
||||
except Exception as js_error:
|
||||
logger.warning(f"JavaScript click fehlgeschlagen für Accept: {js_error}")
|
||||
|
||||
if success:
|
||||
page.wait_for_load_state('networkidle', timeout=5000)
|
||||
|
||||
# Setze Consent im LocalStorage für "alle Cookies"
|
||||
page.evaluate("""
|
||||
() => {
|
||||
// Instagram Consent Storage für "alle Cookies"
|
||||
localStorage.setItem('ig_cb', '1'); // 1 = alle Cookies akzeptiert
|
||||
localStorage.setItem('ig_consent_timestamp', Date.now().toString());
|
||||
|
||||
// Meta Consent
|
||||
localStorage.setItem('consent_status', 'all_accepted');
|
||||
localStorage.setItem('cookie_consent', JSON.stringify({
|
||||
necessary: true,
|
||||
analytics: true,
|
||||
marketing: true,
|
||||
timestamp: Date.now()
|
||||
}));
|
||||
}
|
||||
""")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei Accept-Button {accept_button}: {e}")
|
||||
continue
|
||||
|
||||
return False
|
||||
except:
|
||||
continue
|
||||
|
||||
logger.debug("No cookie consent page detected")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling cookie consent: {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def check_and_handle_consent(page: Page, platform: str = "instagram") -> bool:
|
||||
"""
|
||||
Prüft und behandelt Cookie-Consent für die angegebene Plattform
|
||||
|
||||
Args:
|
||||
page: Playwright Page-Objekt
|
||||
platform: Plattform-Name (default: "instagram")
|
||||
|
||||
Returns:
|
||||
bool: True wenn Consent behandelt wurde, False sonst
|
||||
"""
|
||||
if platform.lower() == "instagram":
|
||||
return CookieConsentHandler.handle_instagram_consent(page)
|
||||
else:
|
||||
logger.warning(f"No consent handler implemented for platform: {platform}")
|
||||
return False
|
||||
1119
browser/fingerprint_protection.py
Normale Datei
1119
browser/fingerprint_protection.py
Normale Datei
Datei-Diff unterdrückt, da er zu groß ist
Diff laden
521
browser/instagram_video_bypass.py
Normale Datei
521
browser/instagram_video_bypass.py
Normale Datei
@ -0,0 +1,521 @@
|
||||
# Instagram Video Bypass - Emergency Deep Level Fixes
|
||||
"""
|
||||
Tiefgreifende Instagram Video Bypass Techniken
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import random
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
logger = logging.getLogger("instagram_video_bypass")
|
||||
|
||||
class InstagramVideoBypass:
|
||||
"""Deep-level Instagram video bypass techniques"""
|
||||
|
||||
def __init__(self, page: Any):
|
||||
self.page = page
|
||||
|
||||
def apply_emergency_bypass(self) -> None:
|
||||
"""Wendet Emergency Deep-Level Bypass an"""
|
||||
|
||||
# 1. Complete Automation Signature Removal
|
||||
automation_removal_script = """
|
||||
() => {
|
||||
// Remove ALL automation signatures
|
||||
|
||||
// 1. Navigator properties cleanup
|
||||
delete navigator.__webdriver_script_fn;
|
||||
delete navigator.__fxdriver_evaluate;
|
||||
delete navigator.__driver_unwrapped;
|
||||
delete navigator.__webdriver_unwrapped;
|
||||
delete navigator.__driver_evaluate;
|
||||
delete navigator.__selenium_unwrapped;
|
||||
delete navigator.__fxdriver_unwrapped;
|
||||
|
||||
// 2. Window properties cleanup
|
||||
delete window.navigator.webdriver;
|
||||
delete window.webdriver;
|
||||
delete window.chrome.webdriver;
|
||||
delete window.callPhantom;
|
||||
delete window._phantom;
|
||||
delete window.__nightmare;
|
||||
delete window._selenium;
|
||||
delete window.calledSelenium;
|
||||
delete window.$cdc_asdjflasutopfhvcZLmcfl_;
|
||||
delete window.$chrome_asyncScriptInfo;
|
||||
delete window.__webdriver_evaluate;
|
||||
delete window.__selenium_evaluate;
|
||||
delete window.__webdriver_script_function;
|
||||
delete window.__webdriver_script_func;
|
||||
delete window.__webdriver_script_fn;
|
||||
delete window.__fxdriver_evaluate;
|
||||
delete window.__driver_unwrapped;
|
||||
delete window.__webdriver_unwrapped;
|
||||
delete window.__driver_evaluate;
|
||||
delete window.__selenium_unwrapped;
|
||||
delete window.__fxdriver_unwrapped;
|
||||
|
||||
// 3. Document cleanup
|
||||
delete document.__webdriver_script_fn;
|
||||
delete document.__selenium_unwrapped;
|
||||
delete document.__webdriver_unwrapped;
|
||||
delete document.__driver_evaluate;
|
||||
delete document.__webdriver_evaluate;
|
||||
delete document.__fxdriver_evaluate;
|
||||
delete document.__fxdriver_unwrapped;
|
||||
delete document.__driver_unwrapped;
|
||||
|
||||
// 4. Chrome object enhancement
|
||||
if (!window.chrome) {
|
||||
window.chrome = {};
|
||||
}
|
||||
if (!window.chrome.runtime) {
|
||||
window.chrome.runtime = {
|
||||
onConnect: {addListener: function() {}},
|
||||
onMessage: {addListener: function() {}},
|
||||
connect: function() { return {postMessage: function() {}, onMessage: {addListener: function() {}}} }
|
||||
};
|
||||
}
|
||||
if (!window.chrome.app) {
|
||||
window.chrome.app = {
|
||||
isInstalled: false,
|
||||
InstallState: {DISABLED: 'disabled', INSTALLED: 'installed', NOT_INSTALLED: 'not_installed'},
|
||||
RunningState: {CANNOT_RUN: 'cannot_run', READY_TO_RUN: 'ready_to_run', RUNNING: 'running'}
|
||||
};
|
||||
}
|
||||
|
||||
// 5. Plugin array enhancement
|
||||
const fakePlugins = [
|
||||
{name: 'Chrome PDF Plugin', filename: 'internal-pdf-viewer', description: 'Portable Document Format'},
|
||||
{name: 'Chrome PDF Viewer', filename: 'mhjfbmdgcfjbbpaeojofohoefgiehjai', description: 'Portable Document Format'},
|
||||
{name: 'Native Client', filename: 'internal-nacl-plugin', description: 'Native Client'}
|
||||
];
|
||||
|
||||
Object.defineProperty(navigator, 'plugins', {
|
||||
get: () => {
|
||||
const pluginArray = [...fakePlugins];
|
||||
pluginArray.length = fakePlugins.length;
|
||||
pluginArray.item = function(index) { return this[index] || null; };
|
||||
pluginArray.namedItem = function(name) { return this.find(p => p.name === name) || null; };
|
||||
pluginArray.refresh = function() {};
|
||||
return pluginArray;
|
||||
},
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
"""
|
||||
|
||||
# 2. Instagram-specific video API spoofing
|
||||
instagram_video_api_script = """
|
||||
() => {
|
||||
// Instagram Video API Deep Spoofing
|
||||
|
||||
// 1. MSE (Media Source Extensions) proper support
|
||||
if (window.MediaSource) {
|
||||
const originalIsTypeSupported = MediaSource.isTypeSupported;
|
||||
MediaSource.isTypeSupported = function(type) {
|
||||
const supportedTypes = [
|
||||
'video/mp4; codecs="avc1.42E01E"',
|
||||
'video/mp4; codecs="avc1.4D401F"',
|
||||
'video/mp4; codecs="avc1.640028"',
|
||||
'video/webm; codecs="vp8"',
|
||||
'video/webm; codecs="vp9"',
|
||||
'audio/mp4; codecs="mp4a.40.2"',
|
||||
'audio/webm; codecs="opus"'
|
||||
];
|
||||
|
||||
if (supportedTypes.includes(type)) {
|
||||
return true;
|
||||
}
|
||||
return originalIsTypeSupported.call(this, type);
|
||||
};
|
||||
}
|
||||
|
||||
// 2. Encrypted Media Extensions deep spoofing
|
||||
if (navigator.requestMediaKeySystemAccess) {
|
||||
const originalRequestAccess = navigator.requestMediaKeySystemAccess;
|
||||
navigator.requestMediaKeySystemAccess = function(keySystem, supportedConfigurations) {
|
||||
if (keySystem === 'com.widevine.alpha') {
|
||||
return Promise.resolve({
|
||||
keySystem: 'com.widevine.alpha',
|
||||
getConfiguration: () => ({
|
||||
initDataTypes: ['cenc', 'keyids', 'webm'],
|
||||
audioCapabilities: [
|
||||
{contentType: 'audio/mp4; codecs="mp4a.40.2"', robustness: 'SW_SECURE_CRYPTO'},
|
||||
{contentType: 'audio/webm; codecs="opus"', robustness: 'SW_SECURE_CRYPTO'}
|
||||
],
|
||||
videoCapabilities: [
|
||||
{contentType: 'video/mp4; codecs="avc1.42E01E"', robustness: 'SW_SECURE_DECODE'},
|
||||
{contentType: 'video/mp4; codecs="avc1.4D401F"', robustness: 'SW_SECURE_DECODE'},
|
||||
{contentType: 'video/webm; codecs="vp9"', robustness: 'SW_SECURE_DECODE'}
|
||||
],
|
||||
distinctiveIdentifier: 'optional',
|
||||
persistentState: 'required',
|
||||
sessionTypes: ['temporary', 'persistent-license']
|
||||
}),
|
||||
createMediaKeys: () => Promise.resolve({
|
||||
createSession: (sessionType) => {
|
||||
const session = {
|
||||
sessionId: 'session_' + Math.random().toString(36).substr(2, 9),
|
||||
expiration: NaN,
|
||||
closed: Promise.resolve(),
|
||||
keyStatuses: new Map(),
|
||||
addEventListener: function() {},
|
||||
removeEventListener: function() {},
|
||||
generateRequest: function(initDataType, initData) {
|
||||
setTimeout(() => {
|
||||
if (this.onmessage) {
|
||||
this.onmessage({
|
||||
type: 'message',
|
||||
message: new ArrayBuffer(8)
|
||||
});
|
||||
}
|
||||
}, 100);
|
||||
return Promise.resolve();
|
||||
},
|
||||
load: function() { return Promise.resolve(false); },
|
||||
update: function(response) {
|
||||
setTimeout(() => {
|
||||
if (this.onkeystatuseschange) {
|
||||
this.onkeystatuseschange();
|
||||
}
|
||||
}, 50);
|
||||
return Promise.resolve();
|
||||
},
|
||||
close: function() { return Promise.resolve(); },
|
||||
remove: function() { return Promise.resolve(); }
|
||||
};
|
||||
|
||||
// Add event target methods
|
||||
session.dispatchEvent = function() {};
|
||||
|
||||
return session;
|
||||
},
|
||||
setServerCertificate: () => Promise.resolve(true)
|
||||
})
|
||||
});
|
||||
}
|
||||
return originalRequestAccess.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
// 3. Hardware media key handling
|
||||
if (navigator.mediaSession) {
|
||||
navigator.mediaSession.setActionHandler = function() {};
|
||||
navigator.mediaSession.playbackState = 'playing';
|
||||
} else {
|
||||
navigator.mediaSession = {
|
||||
metadata: null,
|
||||
playbackState: 'playing',
|
||||
setActionHandler: function() {},
|
||||
setPositionState: function() {}
|
||||
};
|
||||
}
|
||||
|
||||
// 4. Picture-in-Picture API
|
||||
if (!document.pictureInPictureEnabled) {
|
||||
Object.defineProperty(document, 'pictureInPictureEnabled', {
|
||||
get: () => true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
// 5. Web Audio API enhancement for video
|
||||
if (window.AudioContext || window.webkitAudioContext) {
|
||||
const AudioCtx = window.AudioContext || window.webkitAudioContext;
|
||||
const originalAudioContext = AudioCtx;
|
||||
|
||||
window.AudioContext = function(...args) {
|
||||
const ctx = new originalAudioContext(...args);
|
||||
|
||||
// Override audio context properties for consistency
|
||||
Object.defineProperty(ctx, 'baseLatency', {
|
||||
get: () => 0.01,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(ctx, 'outputLatency', {
|
||||
get: () => 0.02,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
return ctx;
|
||||
};
|
||||
|
||||
// Copy static methods
|
||||
Object.keys(originalAudioContext).forEach(key => {
|
||||
window.AudioContext[key] = originalAudioContext[key];
|
||||
});
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# 3. Network request interception for video
|
||||
network_interception_script = """
|
||||
() => {
|
||||
// Advanced network request interception for Instagram videos
|
||||
|
||||
const originalFetch = window.fetch;
|
||||
window.fetch = function(input, init) {
|
||||
const url = typeof input === 'string' ? input : input.url;
|
||||
|
||||
// Instagram video CDN requests
|
||||
if (url.includes('instagram.com') || url.includes('fbcdn.net') || url.includes('cdninstagram.com')) {
|
||||
const enhancedInit = {
|
||||
...init,
|
||||
headers: {
|
||||
...init?.headers,
|
||||
'Accept': '*/*',
|
||||
'Accept-Encoding': 'identity;q=1, *;q=0',
|
||||
'Accept-Language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'Cache-Control': 'no-cache',
|
||||
'DNT': '1',
|
||||
'Origin': 'https://www.instagram.com',
|
||||
'Pragma': 'no-cache',
|
||||
'Referer': 'https://www.instagram.com/',
|
||||
'Sec-Ch-Ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
|
||||
'Sec-Ch-Ua-Mobile': '?0',
|
||||
'Sec-Ch-Ua-Platform': '"Windows"',
|
||||
'Sec-Fetch-Dest': 'video',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'cross-site',
|
||||
'User-Agent': navigator.userAgent,
|
||||
'X-Asbd-Id': '129477',
|
||||
'X-Fb-Lsd': document.querySelector('[name="fb_dtsg"]')?.value || '',
|
||||
'X-Instagram-Ajax': '1'
|
||||
}
|
||||
};
|
||||
|
||||
// Remove problematic headers that might indicate automation
|
||||
delete enhancedInit.headers['sec-ch-ua-arch'];
|
||||
delete enhancedInit.headers['sec-ch-ua-bitness'];
|
||||
delete enhancedInit.headers['sec-ch-ua-full-version'];
|
||||
delete enhancedInit.headers['sec-ch-ua-full-version-list'];
|
||||
delete enhancedInit.headers['sec-ch-ua-model'];
|
||||
delete enhancedInit.headers['sec-ch-ua-wow64'];
|
||||
|
||||
return originalFetch.call(this, input, enhancedInit);
|
||||
}
|
||||
|
||||
return originalFetch.apply(this, arguments);
|
||||
};
|
||||
|
||||
// XMLHttpRequest interception
|
||||
const originalXHROpen = XMLHttpRequest.prototype.open;
|
||||
XMLHttpRequest.prototype.open = function(method, url, async, user, password) {
|
||||
this._url = url;
|
||||
return originalXHROpen.apply(this, arguments);
|
||||
};
|
||||
|
||||
const originalXHRSend = XMLHttpRequest.prototype.send;
|
||||
XMLHttpRequest.prototype.send = function(body) {
|
||||
if (this._url && (this._url.includes('instagram.com') || this._url.includes('fbcdn.net'))) {
|
||||
// Add video-specific headers
|
||||
this.setRequestHeader('Accept', '*/*');
|
||||
this.setRequestHeader('Accept-Language', 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7');
|
||||
this.setRequestHeader('Cache-Control', 'no-cache');
|
||||
this.setRequestHeader('Pragma', 'no-cache');
|
||||
this.setRequestHeader('X-Requested-With', 'XMLHttpRequest');
|
||||
}
|
||||
return originalXHRSend.apply(this, arguments);
|
||||
};
|
||||
}
|
||||
"""
|
||||
|
||||
# 4. Timing and behavior normalization
|
||||
timing_script = """
|
||||
() => {
|
||||
// Normalize timing functions to avoid detection
|
||||
|
||||
// Performance timing spoofing
|
||||
if (window.performance && window.performance.timing) {
|
||||
const timing = performance.timing;
|
||||
const now = Date.now();
|
||||
|
||||
Object.defineProperty(performance.timing, 'navigationStart', {
|
||||
get: () => now - Math.floor(Math.random() * 1000) - 1000,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(performance.timing, 'loadEventEnd', {
|
||||
get: () => now - Math.floor(Math.random() * 500),
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
// Date/Time consistency
|
||||
const originalDate = Date;
|
||||
const startTime = originalDate.now();
|
||||
|
||||
Date.now = function() {
|
||||
return startTime + (originalDate.now() - startTime);
|
||||
};
|
||||
|
||||
// Remove timing inconsistencies that indicate automation
|
||||
const originalSetTimeout = window.setTimeout;
|
||||
window.setTimeout = function(fn, delay, ...args) {
|
||||
// Add slight randomization to timing
|
||||
const randomDelay = delay + Math.floor(Math.random() * 10) - 5;
|
||||
return originalSetTimeout.call(this, fn, Math.max(0, randomDelay), ...args);
|
||||
};
|
||||
}
|
||||
"""
|
||||
|
||||
# Apply all scripts in sequence
|
||||
scripts = [
|
||||
automation_removal_script,
|
||||
instagram_video_api_script,
|
||||
network_interception_script,
|
||||
timing_script
|
||||
]
|
||||
|
||||
for i, script in enumerate(scripts):
|
||||
try:
|
||||
self.page.add_init_script(script)
|
||||
logger.info(f"Applied emergency bypass script {i+1}/4")
|
||||
time.sleep(0.1) # Small delay between scripts
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to apply emergency bypass script {i+1}: {e}")
|
||||
|
||||
logger.info("Emergency Instagram video bypass applied")
|
||||
|
||||
def inject_video_session_data(self) -> None:
|
||||
"""Injiziert realistische Video-Session-Daten"""
|
||||
|
||||
session_script = """
|
||||
() => {
|
||||
// Inject realistic video session data
|
||||
|
||||
// 1. Video viewing history
|
||||
localStorage.setItem('instagram_video_history', JSON.stringify({
|
||||
last_viewed: Date.now() - Math.floor(Math.random() * 86400000),
|
||||
view_count: Math.floor(Math.random() * 50) + 10,
|
||||
preferences: {
|
||||
autoplay: true,
|
||||
quality: 'auto',
|
||||
captions: false
|
||||
}
|
||||
}));
|
||||
|
||||
// 2. Media session state
|
||||
localStorage.setItem('media_session_state', JSON.stringify({
|
||||
hasInteracted: true,
|
||||
lastInteraction: Date.now() - Math.floor(Math.random() * 3600000),
|
||||
playbackRate: 1,
|
||||
volume: 0.8
|
||||
}));
|
||||
|
||||
// 3. DRM license cache simulation
|
||||
sessionStorage.setItem('drm_licenses', JSON.stringify({
|
||||
widevine: {
|
||||
version: '4.10.2449.0',
|
||||
lastUpdate: Date.now() - Math.floor(Math.random() * 604800000),
|
||||
status: 'valid'
|
||||
}
|
||||
}));
|
||||
|
||||
// 4. Instagram session tokens
|
||||
const csrfToken = document.querySelector('[name="csrfmiddlewaretoken"]')?.value ||
|
||||
document.querySelector('meta[name="csrf-token"]')?.content ||
|
||||
'missing';
|
||||
|
||||
if (csrfToken !== 'missing') {
|
||||
sessionStorage.setItem('csrf_token', csrfToken);
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
self.page.evaluate(session_script)
|
||||
logger.info("Video session data injected successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to inject video session data: {e}")
|
||||
|
||||
def simulate_user_interaction(self) -> None:
|
||||
"""Simuliert authentische Benutzerinteraktion"""
|
||||
|
||||
try:
|
||||
# Random mouse movements
|
||||
for _ in range(3):
|
||||
x = random.randint(100, 800)
|
||||
y = random.randint(100, 600)
|
||||
self.page.mouse.move(x, y)
|
||||
time.sleep(random.uniform(0.1, 0.3))
|
||||
|
||||
# Random scroll
|
||||
self.page.mouse.wheel(0, random.randint(-200, 200))
|
||||
time.sleep(random.uniform(0.2, 0.5))
|
||||
|
||||
# Click somewhere safe (not on video)
|
||||
self.page.click('body', position={'x': random.randint(50, 100), 'y': random.randint(50, 100)})
|
||||
time.sleep(random.uniform(0.3, 0.7))
|
||||
|
||||
logger.info("User interaction simulation completed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to simulate user interaction: {e}")
|
||||
|
||||
def check_video_errors(self) -> Dict[str, Any]:
|
||||
"""Überprüft Video-Fehler und DRM-Status"""
|
||||
|
||||
try:
|
||||
result = self.page.evaluate("""
|
||||
() => {
|
||||
const errors = [];
|
||||
const diagnostics = {
|
||||
drm_support: false,
|
||||
media_source: false,
|
||||
codec_support: {},
|
||||
video_elements: 0,
|
||||
error_messages: []
|
||||
};
|
||||
|
||||
// Check for DRM support
|
||||
if (navigator.requestMediaKeySystemAccess) {
|
||||
diagnostics.drm_support = true;
|
||||
}
|
||||
|
||||
// Check Media Source Extensions
|
||||
if (window.MediaSource) {
|
||||
diagnostics.media_source = true;
|
||||
diagnostics.codec_support = {
|
||||
h264: MediaSource.isTypeSupported('video/mp4; codecs="avc1.42E01E"'),
|
||||
vp9: MediaSource.isTypeSupported('video/webm; codecs="vp9"'),
|
||||
aac: MediaSource.isTypeSupported('audio/mp4; codecs="mp4a.40.2"')
|
||||
};
|
||||
}
|
||||
|
||||
// Count video elements
|
||||
diagnostics.video_elements = document.querySelectorAll('video').length;
|
||||
|
||||
// Look for error messages
|
||||
const errorElements = document.querySelectorAll('[class*="error"], [class*="fail"]');
|
||||
errorElements.forEach(el => {
|
||||
if (el.textContent.includes('Video') || el.textContent.includes('video')) {
|
||||
diagnostics.error_messages.push(el.textContent.trim());
|
||||
}
|
||||
});
|
||||
|
||||
// Console errors
|
||||
const consoleErrors = [];
|
||||
const originalConsoleError = console.error;
|
||||
console.error = function(...args) {
|
||||
consoleErrors.push(args.join(' '));
|
||||
originalConsoleError.apply(console, arguments);
|
||||
};
|
||||
|
||||
return {
|
||||
diagnostics,
|
||||
console_errors: consoleErrors,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
}
|
||||
""")
|
||||
|
||||
logger.info(f"Video diagnostics: {result}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Video error check failed: {e}")
|
||||
return {}
|
||||
127
browser/playwright_extensions.py
Normale Datei
127
browser/playwright_extensions.py
Normale Datei
@ -0,0 +1,127 @@
|
||||
# browser/playwright_extensions.py
|
||||
|
||||
"""
|
||||
Erweiterungen für den PlaywrightManager - Fügt zusätzliche Funktionalität hinzu
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from browser.fingerprint_protection import FingerprintProtection
|
||||
|
||||
logger = logging.getLogger("playwright_extensions")
|
||||
|
||||
class PlaywrightExtensions:
|
||||
"""
|
||||
Erweiterungsklasse für den PlaywrightManager.
|
||||
Bietet zusätzliche Funktionalität, ohne die Hauptklasse zu verändern.
|
||||
"""
|
||||
|
||||
def __init__(self, playwright_manager):
|
||||
"""
|
||||
Initialisiert die Erweiterungsklasse.
|
||||
|
||||
Args:
|
||||
playwright_manager: Eine Instanz des PlaywrightManager
|
||||
"""
|
||||
self.playwright_manager = playwright_manager
|
||||
self.fingerprint_protection = None
|
||||
self.enhanced_stealth_enabled = False
|
||||
|
||||
def enable_enhanced_fingerprint_protection(self, config: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""
|
||||
Aktiviert den erweiterten Fingerprint-Schutz.
|
||||
|
||||
Args:
|
||||
config: Optionale Konfiguration für den Fingerprint-Schutz
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
try:
|
||||
# Sicherstellen, dass der Browser gestartet wurde
|
||||
if not hasattr(self.playwright_manager, 'context') or self.playwright_manager.context is None:
|
||||
logger.warning("Browser muss zuerst gestartet werden, bevor der Fingerprint-Schutz aktiviert werden kann")
|
||||
return False
|
||||
|
||||
# Basis-Stealth-Konfiguration aus dem PlaywrightManager verwenden
|
||||
stealth_config = getattr(self.playwright_manager, 'stealth_config', {})
|
||||
|
||||
# Mit der benutzerdefinierten Konfiguration erweitern, falls vorhanden
|
||||
if config:
|
||||
stealth_config.update(config)
|
||||
|
||||
# Fingerprint-Schutz initialisieren
|
||||
self.fingerprint_protection = FingerprintProtection(
|
||||
context=self.playwright_manager.context,
|
||||
stealth_config=stealth_config
|
||||
)
|
||||
|
||||
# Schutzmaßnahmen auf den Kontext anwenden
|
||||
self.fingerprint_protection.apply_to_context()
|
||||
|
||||
# Status aktualisieren
|
||||
self.enhanced_stealth_enabled = True
|
||||
|
||||
logger.info("Erweiterter Fingerprint-Schutz aktiviert")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Aktivieren des erweiterten Fingerprint-Schutzes: {e}")
|
||||
return False
|
||||
|
||||
def rotate_fingerprint(self, noise_level: Optional[float] = None) -> bool:
|
||||
"""
|
||||
Rotiert den Browser-Fingerprint.
|
||||
|
||||
Args:
|
||||
noise_level: Optionales neues Rauschniveau (0.0-1.0)
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
if not self.enhanced_stealth_enabled or self.fingerprint_protection is None:
|
||||
logger.warning("Erweiterter Fingerprint-Schutz ist nicht aktiviert")
|
||||
return False
|
||||
|
||||
return self.fingerprint_protection.rotate_fingerprint(noise_level)
|
||||
|
||||
def get_fingerprint_status(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Gibt den aktuellen Status des Fingerprint-Schutzes zurück.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Status des Fingerprint-Schutzes
|
||||
"""
|
||||
if not self.enhanced_stealth_enabled or self.fingerprint_protection is None:
|
||||
return {"active": False, "message": "Erweiterter Fingerprint-Schutz ist nicht aktiviert"}
|
||||
|
||||
return self.fingerprint_protection.get_fingerprint_status()
|
||||
|
||||
def hook_into_playwright_manager(self) -> None:
|
||||
"""
|
||||
Hängt die Erweiterungsmethoden an den PlaywrightManager.
|
||||
"""
|
||||
if not self.playwright_manager:
|
||||
logger.error("Kein PlaywrightManager zum Anhängen der Erweiterungen")
|
||||
return
|
||||
|
||||
# Originalstart-Methode speichern
|
||||
original_start = self.playwright_manager.start
|
||||
|
||||
# Die start-Methode überschreiben, um den Fingerprint-Schutz automatisch zu aktivieren
|
||||
def enhanced_start(*args, **kwargs):
|
||||
result = original_start(*args, **kwargs)
|
||||
|
||||
# Wenn start erfolgreich war und erweiterter Schutz aktiviert ist,
|
||||
# wenden wir den Fingerprint-Schutz auf den neuen Kontext an
|
||||
if result and self.enhanced_stealth_enabled and self.fingerprint_protection:
|
||||
self.fingerprint_protection.set_context(self.playwright_manager.context)
|
||||
self.fingerprint_protection.apply_to_context()
|
||||
|
||||
return result
|
||||
|
||||
# Methoden dynamisch zum PlaywrightManager hinzufügen
|
||||
self.playwright_manager.enable_enhanced_fingerprint_protection = self.enable_enhanced_fingerprint_protection
|
||||
self.playwright_manager.rotate_fingerprint = self.rotate_fingerprint
|
||||
self.playwright_manager.get_fingerprint_status = self.get_fingerprint_status
|
||||
self.playwright_manager.start = enhanced_start
|
||||
906
browser/playwright_manager.py
Normale Datei
906
browser/playwright_manager.py
Normale Datei
@ -0,0 +1,906 @@
|
||||
"""
|
||||
Playwright Manager - Hauptklasse für die Browser-Steuerung mit Anti-Bot-Erkennung
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, List, Any, Tuple
|
||||
from playwright.sync_api import sync_playwright, Browser, Page, BrowserContext, ElementHandle
|
||||
|
||||
from domain.value_objects.browser_protection_style import BrowserProtectionStyle
|
||||
from infrastructure.services.browser_protection_service import BrowserProtectionService
|
||||
|
||||
# Konfiguriere Logger
|
||||
logger = logging.getLogger("playwright_manager")
|
||||
|
||||
class PlaywrightManager:
|
||||
"""
|
||||
Verwaltet Browser-Sitzungen mit Playwright, einschließlich Stealth-Modus und Proxy-Einstellungen.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
headless: bool = False,
|
||||
proxy: Optional[Dict[str, str]] = None,
|
||||
browser_type: str = "chromium",
|
||||
user_agent: Optional[str] = None,
|
||||
screenshots_dir: str = "screenshots",
|
||||
slowmo: int = 0,
|
||||
window_position: Optional[Tuple[int, int]] = None):
|
||||
"""
|
||||
Initialisiert den PlaywrightManager.
|
||||
|
||||
Args:
|
||||
headless: Ob der Browser im Headless-Modus ausgeführt werden soll
|
||||
proxy: Proxy-Konfiguration (z.B. {'server': 'http://myproxy.com:3128', 'username': 'user', 'password': 'pass'})
|
||||
browser_type: Welcher Browser-Typ verwendet werden soll ("chromium", "firefox", oder "webkit")
|
||||
user_agent: Benutzerdefinierter User-Agent
|
||||
screenshots_dir: Verzeichnis für Screenshots
|
||||
slowmo: Verzögerung zwischen Aktionen in Millisekunden (nützlich für Debugging)
|
||||
window_position: Optionale Fensterposition als Tupel (x, y)
|
||||
"""
|
||||
self.headless = headless
|
||||
self.proxy = proxy
|
||||
self.browser_type = browser_type
|
||||
self.user_agent = user_agent
|
||||
self.screenshots_dir = screenshots_dir
|
||||
self.slowmo = slowmo
|
||||
self.window_position = window_position
|
||||
|
||||
# Stelle sicher, dass das Screenshots-Verzeichnis existiert
|
||||
os.makedirs(self.screenshots_dir, exist_ok=True)
|
||||
|
||||
# Playwright-Instanzen
|
||||
self.playwright = None
|
||||
self.browser = None
|
||||
self.context = None
|
||||
self.page = None
|
||||
|
||||
# Zähler für Wiederhholungsversuche
|
||||
self.retry_counter = {}
|
||||
|
||||
# Lade Stealth-Konfigurationen
|
||||
self.stealth_config = self._load_stealth_config()
|
||||
|
||||
# Browser Protection Service
|
||||
self.protection_service = BrowserProtectionService()
|
||||
self.protection_applied = False
|
||||
self.protection_style = None
|
||||
|
||||
def _load_stealth_config(self) -> Dict[str, Any]:
|
||||
"""Lädt die Stealth-Konfigurationen aus der Datei oder verwendet Standardwerte."""
|
||||
try:
|
||||
config_dir = Path(__file__).parent.parent / "config"
|
||||
stealth_config_path = config_dir / "stealth_config.json"
|
||||
|
||||
if stealth_config_path.exists():
|
||||
with open(stealth_config_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
logger.warning(f"Konnte Stealth-Konfiguration nicht laden: {e}")
|
||||
|
||||
# Verwende Standardwerte, wenn das Laden fehlschlägt
|
||||
return {
|
||||
"vendor": "Google Inc.",
|
||||
"platform": "Win32",
|
||||
"webdriver": False,
|
||||
"accept_language": "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"timezone_id": "Europe/Berlin",
|
||||
"fingerprint_noise": True,
|
||||
"device_scale_factor": 1.0,
|
||||
}
|
||||
|
||||
def start(self) -> Page:
|
||||
"""
|
||||
Startet die Playwright-Sitzung und gibt die Browser-Seite zurück.
|
||||
|
||||
Returns:
|
||||
Page: Die Browser-Seite
|
||||
"""
|
||||
if self.page is not None:
|
||||
return self.page
|
||||
|
||||
try:
|
||||
self.playwright = sync_playwright().start()
|
||||
|
||||
# Wähle den Browser-Typ
|
||||
if self.browser_type == "firefox":
|
||||
browser_instance = self.playwright.firefox
|
||||
elif self.browser_type == "webkit":
|
||||
browser_instance = self.playwright.webkit
|
||||
else:
|
||||
browser_instance = self.playwright.chromium
|
||||
|
||||
# Browser-Startoptionen
|
||||
browser_args = []
|
||||
|
||||
if self.browser_type == "chromium":
|
||||
# Chrome-spezifische Argumente für Anti-Bot-Erkennung
|
||||
browser_args.extend([
|
||||
'--disable-blink-features=AutomationControlled',
|
||||
'--disable-features=IsolateOrigins,site-per-process',
|
||||
'--disable-site-isolation-trials',
|
||||
])
|
||||
|
||||
# Browser-Launch-Optionen
|
||||
launch_options = {
|
||||
"headless": self.headless,
|
||||
"args": browser_args,
|
||||
"slow_mo": self.slowmo
|
||||
}
|
||||
|
||||
# Fensterposition setzen wenn angegeben
|
||||
if self.window_position and not self.headless:
|
||||
x, y = self.window_position
|
||||
browser_args.extend([
|
||||
f'--window-position={x},{y}'
|
||||
])
|
||||
|
||||
# Browser starten
|
||||
self.browser = browser_instance.launch(**launch_options)
|
||||
|
||||
# Kontext-Optionen für Stealth-Modus
|
||||
context_options = {
|
||||
"viewport": {"width": 1920, "height": 1080},
|
||||
"device_scale_factor": self.stealth_config.get("device_scale_factor", 1.0),
|
||||
"locale": "de-DE",
|
||||
"timezone_id": self.stealth_config.get("timezone_id", "Europe/Berlin"),
|
||||
"accept_downloads": True,
|
||||
}
|
||||
|
||||
# User-Agent setzen
|
||||
if self.user_agent:
|
||||
context_options["user_agent"] = self.user_agent
|
||||
|
||||
# Proxy-Einstellungen, falls vorhanden
|
||||
if self.proxy:
|
||||
context_options["proxy"] = self.proxy
|
||||
|
||||
# Browserkontext erstellen
|
||||
self.context = self.browser.new_context(**context_options)
|
||||
|
||||
# JavaScript-Fingerprinting-Schutz
|
||||
self._apply_stealth_scripts()
|
||||
|
||||
# Neue Seite erstellen
|
||||
self.page = self.context.new_page()
|
||||
|
||||
# Event-Listener für Konsolen-Logs
|
||||
self.page.on("console", lambda msg: logger.debug(f"BROWSER CONSOLE: {msg.text}"))
|
||||
|
||||
return self.page
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Starten des Browsers: {e}")
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def _apply_stealth_scripts(self):
|
||||
"""Wendet JavaScript-Skripte an, um Browser-Fingerprinting zu umgehen."""
|
||||
# Diese Skripte überschreiben Eigenschaften, die für Bot-Erkennung verwendet werden
|
||||
scripts = [
|
||||
# WebDriver-Eigenschaft überschreiben
|
||||
"""
|
||||
() => {
|
||||
Object.defineProperty(navigator, 'webdriver', {
|
||||
get: () => false,
|
||||
});
|
||||
}
|
||||
""",
|
||||
|
||||
# Navigator-Eigenschaften überschreiben
|
||||
f"""
|
||||
() => {{
|
||||
const newProto = navigator.__proto__;
|
||||
delete newProto.webdriver;
|
||||
navigator.__proto__ = newProto;
|
||||
|
||||
Object.defineProperty(navigator, 'platform', {{
|
||||
get: () => '{self.stealth_config.get("platform", "Win32")}'
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'languages', {{
|
||||
get: () => ['de-DE', 'de', 'en-US', 'en']
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'vendor', {{
|
||||
get: () => '{self.stealth_config.get("vendor", "Google Inc.")}'
|
||||
}});
|
||||
}}
|
||||
""",
|
||||
|
||||
# Chrome-Objekte hinzufügen, die in normalen Browsern vorhanden sind
|
||||
"""
|
||||
() => {
|
||||
// Fügt chrome.runtime hinzu, falls nicht vorhanden
|
||||
if (!window.chrome) {
|
||||
window.chrome = {};
|
||||
}
|
||||
if (!window.chrome.runtime) {
|
||||
window.chrome.runtime = {};
|
||||
window.chrome.runtime.sendMessage = function() {};
|
||||
}
|
||||
}
|
||||
""",
|
||||
|
||||
# Plugin-Fingerprinting
|
||||
"""
|
||||
() => {
|
||||
const originalQuery = window.navigator.permissions.query;
|
||||
window.navigator.permissions.query = (parameters) => (
|
||||
parameters.name === 'notifications' ?
|
||||
Promise.resolve({ state: Notification.permission }) :
|
||||
originalQuery(parameters)
|
||||
);
|
||||
}
|
||||
"""
|
||||
]
|
||||
|
||||
# Wenn Fingerprint-Noise aktiviert ist, füge zufällige Variationen hinzu
|
||||
if self.stealth_config.get("fingerprint_noise", True):
|
||||
scripts.append("""
|
||||
() => {
|
||||
// Canvas-Fingerprinting leicht verändern
|
||||
const originalToDataURL = HTMLCanvasElement.prototype.toDataURL;
|
||||
HTMLCanvasElement.prototype.toDataURL = function(type) {
|
||||
const result = originalToDataURL.apply(this, arguments);
|
||||
|
||||
if (this.width > 16 && this.height > 16) {
|
||||
// Kleines Rauschen in Pixels einfügen
|
||||
const context = this.getContext('2d');
|
||||
const imageData = context.getImageData(0, 0, 2, 2);
|
||||
const pixelArray = imageData.data;
|
||||
|
||||
// Ändere einen zufälligen Pixel leicht
|
||||
const randomPixel = Math.floor(Math.random() * pixelArray.length / 4) * 4;
|
||||
pixelArray[randomPixel] = (pixelArray[randomPixel] + Math.floor(Math.random() * 10)) % 256;
|
||||
|
||||
context.putImageData(imageData, 0, 0);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
}
|
||||
""")
|
||||
|
||||
# Skripte auf den Browser-Kontext anwenden
|
||||
for script in scripts:
|
||||
self.context.add_init_script(script)
|
||||
|
||||
def navigate_to(self, url: str, wait_until: str = "networkidle", timeout: int = 30000) -> bool:
|
||||
"""
|
||||
Navigiert zu einer bestimmten URL und wartet, bis die Seite geladen ist.
|
||||
|
||||
Args:
|
||||
url: Die Ziel-URL
|
||||
wait_until: Wann die Navigation als abgeschlossen gilt ("load", "domcontentloaded", "networkidle")
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True bei erfolgreicher Navigation, False sonst
|
||||
"""
|
||||
if self.page is None:
|
||||
self.start()
|
||||
|
||||
try:
|
||||
logger.info(f"Navigiere zu: {url}")
|
||||
self.page.goto(url, wait_until=wait_until, timeout=timeout)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Navigation zu {url}: {e}")
|
||||
self.take_screenshot(f"navigation_error_{int(time.time())}")
|
||||
return False
|
||||
|
||||
def wait_for_selector(self, selector: str, timeout: int = 30000) -> Optional[ElementHandle]:
|
||||
"""
|
||||
Wartet auf ein Element mit dem angegebenen Selektor.
|
||||
|
||||
Args:
|
||||
selector: CSS- oder XPath-Selektor
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
Optional[ElementHandle]: Das Element oder None, wenn nicht gefunden
|
||||
"""
|
||||
if self.page is None:
|
||||
raise ValueError("Browser nicht gestartet. Rufe zuerst start() auf.")
|
||||
|
||||
try:
|
||||
element = self.page.wait_for_selector(selector, timeout=timeout)
|
||||
return element
|
||||
except Exception as e:
|
||||
logger.warning(f"Element nicht gefunden: {selector} - {e}")
|
||||
return None
|
||||
|
||||
def fill_form_field(self, selector: str, value: str, timeout: int = 5000) -> bool:
|
||||
"""
|
||||
Füllt ein Formularfeld aus.
|
||||
|
||||
Args:
|
||||
selector: Selektor für das Feld
|
||||
value: Einzugebender Wert
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
try:
|
||||
# Auf Element warten
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
|
||||
# Element fokussieren
|
||||
element.focus()
|
||||
time.sleep(random.uniform(0.1, 0.3))
|
||||
|
||||
# Vorhandenen Text löschen (optional)
|
||||
current_value = element.evaluate("el => el.value")
|
||||
if current_value:
|
||||
element.fill("")
|
||||
time.sleep(random.uniform(0.1, 0.2))
|
||||
|
||||
# Text menschenähnlich eingeben
|
||||
for char in value:
|
||||
element.type(char, delay=random.uniform(20, 100))
|
||||
time.sleep(random.uniform(0.01, 0.05))
|
||||
|
||||
logger.info(f"Feld {selector} gefüllt mit: {value}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Ausfüllen von {selector}: {e}")
|
||||
key = f"fill_{selector}"
|
||||
return self._retry_action(key, lambda: self.fill_form_field(selector, value, timeout))
|
||||
|
||||
def click_element(self, selector: str, force: bool = False, timeout: int = 5000) -> bool:
|
||||
"""
|
||||
Klickt auf ein Element mit Anti-Bot-Bypass-Strategien.
|
||||
|
||||
Args:
|
||||
selector: Selektor für das Element
|
||||
force: Force-Click verwenden
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
try:
|
||||
# Auf Element warten
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
|
||||
# Scroll zum Element
|
||||
self.page.evaluate("element => element.scrollIntoView({ behavior: 'smooth', block: 'center' })", element)
|
||||
time.sleep(random.uniform(0.3, 0.7))
|
||||
|
||||
# Menschenähnliches Verhalten - leichte Verzögerung vor dem Klick
|
||||
time.sleep(random.uniform(0.2, 0.5))
|
||||
|
||||
# Element klicken
|
||||
element.click(force=force, delay=random.uniform(20, 100))
|
||||
|
||||
logger.info(f"Element geklickt: {selector}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Klicken auf {selector}: {e}")
|
||||
# Bei Fehlern verwende robuste Click-Strategien
|
||||
return self.robust_click(selector, timeout)
|
||||
|
||||
def robust_click(self, selector: str, timeout: int = 5000) -> bool:
|
||||
"""
|
||||
Robuste Click-Methode mit mehreren Anti-Bot-Bypass-Strategien.
|
||||
Speziell für Instagram's Click-Interceptors entwickelt.
|
||||
|
||||
Args:
|
||||
selector: Selektor für das Element
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
logger.info(f"Verwende robuste Click-Strategien für: {selector}")
|
||||
|
||||
strategies = [
|
||||
# Strategie 1: Standard Playwright Click
|
||||
lambda: self._strategy_standard_click(selector, timeout),
|
||||
|
||||
# Strategie 2: Force Click
|
||||
lambda: self._strategy_force_click(selector, timeout),
|
||||
|
||||
# Strategie 3: JavaScript Event Dispatch
|
||||
lambda: self._strategy_javascript_click(selector),
|
||||
|
||||
# Strategie 4: Overlay-Entfernung + Click
|
||||
lambda: self._strategy_remove_overlays_click(selector, timeout),
|
||||
|
||||
# Strategie 5: Focus + Enter (für Buttons/Links)
|
||||
lambda: self._strategy_focus_and_enter(selector),
|
||||
|
||||
# Strategie 6: Mouse Position Click
|
||||
lambda: self._strategy_coordinate_click(selector)
|
||||
]
|
||||
|
||||
for i, strategy in enumerate(strategies, 1):
|
||||
try:
|
||||
logger.debug(f"Versuche Click-Strategie {i} für {selector}")
|
||||
if strategy():
|
||||
logger.info(f"Click erfolgreich mit Strategie {i} für {selector}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Strategie {i} fehlgeschlagen: {e}")
|
||||
continue
|
||||
|
||||
logger.error(f"Alle Click-Strategien fehlgeschlagen für {selector}")
|
||||
return False
|
||||
|
||||
def _strategy_standard_click(self, selector: str, timeout: int) -> bool:
|
||||
"""Strategie 1: Standard Playwright Click"""
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
element.click()
|
||||
return True
|
||||
|
||||
def _strategy_force_click(self, selector: str, timeout: int) -> bool:
|
||||
"""Strategie 2: Force Click um Event-Blockierungen zu umgehen"""
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
element.click(force=True)
|
||||
return True
|
||||
|
||||
def _strategy_javascript_click(self, selector: str) -> bool:
|
||||
"""Strategie 3: JavaScript Event Dispatch um Overlays zu umgehen"""
|
||||
script = f"""
|
||||
(function() {{
|
||||
const element = document.querySelector('{selector}');
|
||||
if (!element) return false;
|
||||
|
||||
// Erstelle und sende Click-Event
|
||||
const event = new MouseEvent('click', {{
|
||||
bubbles: true,
|
||||
cancelable: true,
|
||||
view: window,
|
||||
detail: 1,
|
||||
button: 0,
|
||||
buttons: 1
|
||||
}});
|
||||
|
||||
element.dispatchEvent(event);
|
||||
|
||||
// Zusätzlich: Focus und Click Events
|
||||
element.focus();
|
||||
|
||||
const clickEvent = new Event('click', {{
|
||||
bubbles: true,
|
||||
cancelable: true
|
||||
}});
|
||||
element.dispatchEvent(clickEvent);
|
||||
|
||||
return true;
|
||||
}})();
|
||||
"""
|
||||
|
||||
return self.page.evaluate(script)
|
||||
|
||||
def _strategy_remove_overlays_click(self, selector: str, timeout: int) -> bool:
|
||||
"""Strategie 4: Entferne Click-Interceptors und klicke dann"""
|
||||
# Entferne Overlays die Click-Events abfangen
|
||||
self._remove_click_interceptors()
|
||||
|
||||
# Warte kurz damit DOM-Änderungen wirksam werden
|
||||
time.sleep(0.2)
|
||||
|
||||
# Jetzt normaler Click
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
element.click()
|
||||
return True
|
||||
|
||||
def _strategy_focus_and_enter(self, selector: str) -> bool:
|
||||
"""Strategie 5: Focus Element und verwende Enter-Taste"""
|
||||
script = f"""
|
||||
(function() {{
|
||||
const element = document.querySelector('{selector}');
|
||||
if (!element) return false;
|
||||
|
||||
// Element fokussieren
|
||||
element.focus();
|
||||
element.scrollIntoView({{ block: 'center' }});
|
||||
|
||||
// Enter-Event senden
|
||||
const enterEvent = new KeyboardEvent('keydown', {{
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
keyCode: 13,
|
||||
which: 13,
|
||||
bubbles: true,
|
||||
cancelable: true
|
||||
}});
|
||||
|
||||
element.dispatchEvent(enterEvent);
|
||||
|
||||
// Zusätzlich keyup Event
|
||||
const keyupEvent = new KeyboardEvent('keyup', {{
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
keyCode: 13,
|
||||
which: 13,
|
||||
bubbles: true,
|
||||
cancelable: true
|
||||
}});
|
||||
|
||||
element.dispatchEvent(keyupEvent);
|
||||
|
||||
return true;
|
||||
}})();
|
||||
"""
|
||||
|
||||
return self.page.evaluate(script)
|
||||
|
||||
def _strategy_coordinate_click(self, selector: str) -> bool:
|
||||
"""Strategie 6: Click auf Koordinaten des Elements"""
|
||||
try:
|
||||
element = self.page.locator(selector).first
|
||||
if not element.is_visible():
|
||||
return False
|
||||
|
||||
# Hole Element-Position
|
||||
box = element.bounding_box()
|
||||
if not box:
|
||||
return False
|
||||
|
||||
# Klicke in die Mitte des Elements
|
||||
x = box['x'] + box['width'] / 2
|
||||
y = box['y'] + box['height'] / 2
|
||||
|
||||
self.page.mouse.click(x, y)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _remove_click_interceptors(self) -> None:
|
||||
"""
|
||||
Entfernt invisible Overlays die Click-Events abfangen.
|
||||
Speziell für Instagram's Anti-Bot-Maßnahmen entwickelt.
|
||||
"""
|
||||
script = """
|
||||
(function() {
|
||||
console.log('AccountForger: Entferne Click-Interceptors...');
|
||||
|
||||
// Liste typischer Instagram Click-Interceptor Klassen
|
||||
const interceptorSelectors = [
|
||||
// Instagram's bekannte Interceptor-Klassen
|
||||
'.x1lliihq.x1plvlek.xryxfnj',
|
||||
'.x1n2onr6.xzkaem6',
|
||||
'span[dir="auto"]',
|
||||
|
||||
// Allgemeine Interceptor-Eigenschaften
|
||||
'[style*="pointer-events: all"]',
|
||||
'[style*="position: absolute"]',
|
||||
'[style*="z-index"]'
|
||||
];
|
||||
|
||||
let removedCount = 0;
|
||||
|
||||
// Entferne Interceptor-Elemente
|
||||
interceptorSelectors.forEach(selector => {
|
||||
try {
|
||||
const elements = document.querySelectorAll(selector);
|
||||
elements.forEach(el => {
|
||||
const style = window.getComputedStyle(el);
|
||||
|
||||
// Prüfe ob Element ein Click-Interceptor ist
|
||||
const isInterceptor = (
|
||||
style.pointerEvents === 'all' ||
|
||||
(style.position === 'absolute' && parseInt(style.zIndex) > 1000) ||
|
||||
(el.offsetWidth > 0 && el.offsetHeight > 0 &&
|
||||
el.textContent.trim() === '' &&
|
||||
style.backgroundColor === 'rgba(0, 0, 0, 0)')
|
||||
);
|
||||
|
||||
if (isInterceptor) {
|
||||
// Deaktiviere Pointer-Events
|
||||
el.style.pointerEvents = 'none';
|
||||
el.style.display = 'none';
|
||||
el.style.visibility = 'hidden';
|
||||
removedCount++;
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn('Fehler beim Entfernen von Interceptors:', e);
|
||||
}
|
||||
});
|
||||
|
||||
// Zusätzlich: Entferne alle unsichtbaren absolute Elemente die über anderen liegen
|
||||
const allElements = document.querySelectorAll('*');
|
||||
allElements.forEach(el => {
|
||||
const style = window.getComputedStyle(el);
|
||||
|
||||
if (style.position === 'absolute' || style.position === 'fixed') {
|
||||
const rect = el.getBoundingClientRect();
|
||||
|
||||
// Prüfe ob Element unsichtbar aber vorhanden ist
|
||||
if (rect.width > 0 && rect.height > 0 &&
|
||||
style.opacity !== '0' &&
|
||||
style.visibility !== 'hidden' &&
|
||||
el.textContent.trim() === '' &&
|
||||
parseInt(style.zIndex) > 10) {
|
||||
|
||||
el.style.pointerEvents = 'none';
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`AccountForger: ${removedCount} Click-Interceptors entfernt`);
|
||||
|
||||
// Markiere dass Interceptors entfernt wurden
|
||||
window.__accountforge_interceptors_removed = true;
|
||||
|
||||
return removedCount;
|
||||
})();
|
||||
"""
|
||||
|
||||
try:
|
||||
removed_count = self.page.evaluate(script)
|
||||
if removed_count > 0:
|
||||
logger.info(f"Click-Interceptors entfernt: {removed_count}")
|
||||
else:
|
||||
logger.debug("Keine Click-Interceptors gefunden")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Entfernen von Click-Interceptors: {e}")
|
||||
|
||||
def select_option(self, selector: str, value: str, timeout: int = 5000) -> bool:
|
||||
"""
|
||||
Wählt eine Option aus einem Dropdown-Menü.
|
||||
|
||||
Args:
|
||||
selector: Selektor für das Dropdown
|
||||
value: Wert oder sichtbarer Text der Option
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
try:
|
||||
# Auf Element warten
|
||||
element = self.wait_for_selector(selector, timeout)
|
||||
if not element:
|
||||
return False
|
||||
|
||||
# Option auswählen
|
||||
self.page.select_option(selector, value=value)
|
||||
|
||||
logger.info(f"Option '{value}' ausgewählt in {selector}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Auswahl von '{value}' in {selector}: {e}")
|
||||
key = f"select_{selector}"
|
||||
return self._retry_action(key, lambda: self.select_option(selector, value, timeout))
|
||||
|
||||
def is_element_visible(self, selector: str, timeout: int = 5000) -> bool:
|
||||
"""
|
||||
Prüft, ob ein Element sichtbar ist.
|
||||
|
||||
Args:
|
||||
selector: Selektor für das Element
|
||||
timeout: Timeout in Millisekunden
|
||||
|
||||
Returns:
|
||||
bool: True wenn sichtbar, False sonst
|
||||
"""
|
||||
try:
|
||||
element = self.page.wait_for_selector(selector, timeout=timeout, state="visible")
|
||||
return element is not None
|
||||
except:
|
||||
return False
|
||||
|
||||
def take_screenshot(self, name: str = None) -> str:
|
||||
"""
|
||||
Erstellt einen Screenshot der aktuellen Seite.
|
||||
|
||||
Args:
|
||||
name: Name für den Screenshot (ohne Dateierweiterung)
|
||||
|
||||
Returns:
|
||||
str: Pfad zum erstellten Screenshot
|
||||
"""
|
||||
if self.page is None:
|
||||
raise ValueError("Browser nicht gestartet. Rufe zuerst start() auf.")
|
||||
|
||||
timestamp = int(time.time())
|
||||
filename = f"{name}_{timestamp}.png" if name else f"screenshot_{timestamp}.png"
|
||||
path = os.path.join(self.screenshots_dir, filename)
|
||||
|
||||
self.page.screenshot(path=path, full_page=True)
|
||||
logger.info(f"Screenshot erstellt: {path}")
|
||||
return path
|
||||
|
||||
def _retry_action(self, key: str, action_func, max_retries: int = 3) -> bool:
|
||||
"""
|
||||
Wiederholt eine Aktion bei Fehler.
|
||||
|
||||
Args:
|
||||
key: Eindeutiger Schlüssel für die Aktion
|
||||
action_func: Funktion, die ausgeführt werden soll
|
||||
max_retries: Maximale Anzahl der Wiederholungen
|
||||
|
||||
Returns:
|
||||
bool: Ergebnis der Aktion
|
||||
"""
|
||||
if key not in self.retry_counter:
|
||||
self.retry_counter[key] = 0
|
||||
|
||||
self.retry_counter[key] += 1
|
||||
|
||||
if self.retry_counter[key] <= max_retries:
|
||||
logger.info(f"Wiederhole Aktion {key} (Versuch {self.retry_counter[key]} von {max_retries})")
|
||||
time.sleep(random.uniform(0.5, 1.0))
|
||||
return action_func()
|
||||
else:
|
||||
logger.warning(f"Maximale Anzahl von Wiederholungen für {key} erreicht")
|
||||
self.retry_counter[key] = 0
|
||||
return False
|
||||
|
||||
def apply_protection(self, protection_style: Optional[BrowserProtectionStyle] = None) -> None:
|
||||
"""
|
||||
Wendet Browser-Schutz an, um versehentliche Benutzerinteraktionen zu verhindern.
|
||||
|
||||
Args:
|
||||
protection_style: Konfiguration für den Schutzstil. Verwendet Standardwerte wenn None.
|
||||
"""
|
||||
if self.page is None:
|
||||
raise ValueError("Browser nicht gestartet. Rufe zuerst start() auf.")
|
||||
|
||||
if protection_style is None:
|
||||
protection_style = BrowserProtectionStyle()
|
||||
|
||||
# Speichere den Stil für spätere Wiederanwendung
|
||||
self.protection_style = protection_style
|
||||
|
||||
# Wende Schutz initial an
|
||||
self.protection_service.protect_browser(self.page, protection_style)
|
||||
self.protection_applied = True
|
||||
|
||||
# Registriere Event-Handler für Seitenwechsel
|
||||
self._setup_protection_listeners()
|
||||
|
||||
logger.info(f"Browser-Schutz angewendet mit Level: {protection_style.level.value}")
|
||||
|
||||
def _setup_protection_listeners(self) -> None:
|
||||
"""Setzt Event-Listener auf, um Schutz bei Seitenwechsel neu anzuwenden."""
|
||||
if self.page is None:
|
||||
return
|
||||
|
||||
# Bei Navigation (Seitenwechsel) Schutz neu anwenden
|
||||
def on_navigation():
|
||||
if self.protection_applied and self.protection_style:
|
||||
# Kurz warten bis neue Seite geladen ist
|
||||
self.page.wait_for_load_state("domcontentloaded")
|
||||
# Schutz neu anwenden
|
||||
self.protection_service.protect_browser(self.page, self.protection_style)
|
||||
logger.debug("Browser-Schutz nach Navigation neu angewendet")
|
||||
|
||||
# Registriere Handler für verschiedene Events
|
||||
self.page.on("framenavigated", lambda frame: on_navigation() if frame == self.page.main_frame else None)
|
||||
|
||||
# Zusätzlich: Wende Schutz bei DOM-Änderungen neu an
|
||||
self.context.add_init_script("""
|
||||
// Überwache DOM-Änderungen und wende Schutz neu an wenn nötig
|
||||
const observer = new MutationObserver(() => {
|
||||
const shield = document.getElementById('accountforge-shield');
|
||||
if (!shield && window.__accountforge_protection) {
|
||||
// Schutz wurde entfernt, wende neu an
|
||||
setTimeout(() => {
|
||||
if (!document.getElementById('accountforge-shield')) {
|
||||
eval(window.__accountforge_protection);
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
});
|
||||
observer.observe(document.body, { childList: true, subtree: true });
|
||||
""")
|
||||
|
||||
def remove_protection(self) -> None:
|
||||
"""Entfernt den Browser-Schutz."""
|
||||
if self.page is None or not self.protection_applied:
|
||||
return
|
||||
|
||||
self.protection_service.remove_protection(self.page)
|
||||
self.protection_applied = False
|
||||
self.protection_style = None
|
||||
logger.info("Browser-Schutz entfernt")
|
||||
|
||||
def close(self):
|
||||
"""Schließt den Browser und gibt Ressourcen frei."""
|
||||
try:
|
||||
# Entferne Schutz vor dem Schließen
|
||||
if self.protection_applied:
|
||||
self.remove_protection()
|
||||
|
||||
# Seite erst schließen, dann Kontext, dann Browser, dann Playwright
|
||||
if self.page:
|
||||
try:
|
||||
self.page.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler beim Schließen der Page: {e}")
|
||||
self.page = None
|
||||
|
||||
if self.context:
|
||||
try:
|
||||
self.context.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler beim Schließen des Context: {e}")
|
||||
self.context = None
|
||||
|
||||
if self.browser:
|
||||
try:
|
||||
self.browser.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler beim Schließen des Browsers: {e}")
|
||||
self.browser = None
|
||||
|
||||
# Playwright stop mit Retry-Logik
|
||||
if self.playwright:
|
||||
try:
|
||||
self.playwright.stop()
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler beim Stoppen von Playwright: {e}")
|
||||
# Versuche force stop
|
||||
try:
|
||||
import time
|
||||
time.sleep(0.5) # Kurz warten
|
||||
self.playwright.stop()
|
||||
except Exception as e2:
|
||||
logger.error(f"Force stop fehlgeschlagen: {e2}")
|
||||
self.playwright = None
|
||||
|
||||
logger.info("Browser-Sitzung erfolgreich geschlossen")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Schließen des Browsers: {e}")
|
||||
# Versuche Ressourcen trotzdem zu nullen
|
||||
self.page = None
|
||||
self.context = None
|
||||
self.browser = None
|
||||
self.playwright = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Kontext-Manager-Eintritt."""
|
||||
self.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Kontext-Manager-Austritt."""
|
||||
self.close()
|
||||
|
||||
|
||||
# Beispielnutzung, wenn direkt ausgeführt
|
||||
if __name__ == "__main__":
|
||||
# Konfiguriere Logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
# Beispiel für einen Proxy (ohne Anmeldedaten)
|
||||
proxy_config = {
|
||||
"server": "http://example-proxy.com:8080"
|
||||
}
|
||||
|
||||
# Browser starten und zu einer Seite navigieren
|
||||
with PlaywrightManager(headless=False) as manager:
|
||||
manager.navigate_to("https://www.instagram.com")
|
||||
time.sleep(5) # Kurze Pause zum Anzeigen der Seite
|
||||
216
browser/stealth_config.py
Normale Datei
216
browser/stealth_config.py
Normale Datei
@ -0,0 +1,216 @@
|
||||
"""
|
||||
Stealth-Konfiguration für Playwright - Anti-Bot-Erkennung
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List
|
||||
|
||||
# Konfiguriere Logger
|
||||
logger = logging.getLogger("stealth_config")
|
||||
|
||||
class StealthConfig:
|
||||
"""
|
||||
Konfiguriert Anti-Bot-Erkennungs-Einstellungen für Playwright.
|
||||
Generiert und verwaltet verschiedene Fingerprint-Einstellungen.
|
||||
"""
|
||||
|
||||
# Standardwerte für User-Agents
|
||||
CHROME_DESKTOP_AGENTS = [
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36"
|
||||
]
|
||||
|
||||
MOBILE_AGENTS = [
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 16_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (Linux; Android 13; SM-G991B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/135.0.0.0 Mobile/15E148 Safari/604.1"
|
||||
]
|
||||
|
||||
# Plattformen
|
||||
PLATFORMS = {
|
||||
"windows": "Win32",
|
||||
"macos": "MacIntel",
|
||||
"linux": "Linux x86_64",
|
||||
"android": "Linux armv8l",
|
||||
"ios": "iPhone"
|
||||
}
|
||||
|
||||
# Browser-Sprachen
|
||||
LANGUAGES = [
|
||||
"de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"de-DE,de;q=0.9,en;q=0.8",
|
||||
"de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"en-US,en;q=0.9,de;q=0.8"
|
||||
]
|
||||
|
||||
# Zeitzone für Deutschland
|
||||
TIMEZONE_ID = "Europe/Berlin"
|
||||
|
||||
def __init__(self, config_dir: str = None):
|
||||
"""
|
||||
Initialisiert die Stealth-Konfiguration.
|
||||
|
||||
Args:
|
||||
config_dir: Verzeichnis für Konfigurationsdateien
|
||||
"""
|
||||
self.config_dir = config_dir or os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "config")
|
||||
os.makedirs(self.config_dir, exist_ok=True)
|
||||
|
||||
self.config_path = os.path.join(self.config_dir, "stealth_config.json")
|
||||
|
||||
# Lade benutzerdefinierte User-Agents, falls vorhanden
|
||||
self.user_agents = self._load_user_agents()
|
||||
|
||||
# Lade gespeicherte Konfiguration oder erstelle eine neue
|
||||
self.config = self._load_or_create_config()
|
||||
|
||||
def _load_user_agents(self) -> Dict[str, List[str]]:
|
||||
"""Lädt benutzerdefinierte User-Agents aus der Konfigurationsdatei."""
|
||||
user_agents_path = os.path.join(self.config_dir, "user_agents.json")
|
||||
|
||||
if os.path.exists(user_agents_path):
|
||||
try:
|
||||
with open(user_agents_path, 'r', encoding='utf-8') as f:
|
||||
agents = json.load(f)
|
||||
|
||||
if isinstance(agents, dict) and "desktop" in agents and "mobile" in agents:
|
||||
return agents
|
||||
except Exception as e:
|
||||
logger.warning(f"Fehler beim Laden von user_agents.json: {e}")
|
||||
|
||||
# Standardwerte zurückgeben
|
||||
return {
|
||||
"desktop": self.CHROME_DESKTOP_AGENTS,
|
||||
"mobile": self.MOBILE_AGENTS
|
||||
}
|
||||
|
||||
def _load_or_create_config(self) -> Dict[str, Any]:
|
||||
"""Lädt die Konfiguration oder erstellt eine neue, falls keine existiert."""
|
||||
if os.path.exists(self.config_path):
|
||||
try:
|
||||
with open(self.config_path, 'r', encoding='utf-8') as f:
|
||||
config = json.load(f)
|
||||
logger.info("Stealth-Konfiguration geladen")
|
||||
return config
|
||||
except Exception as e:
|
||||
logger.warning(f"Konnte Stealth-Konfiguration nicht laden: {e}")
|
||||
|
||||
# Erstelle eine neue Konfiguration
|
||||
config = self.generate_config()
|
||||
self.save_config(config)
|
||||
return config
|
||||
|
||||
def generate_config(self, device_type: str = "desktop") -> Dict[str, Any]:
|
||||
"""
|
||||
Generiert eine neue Stealth-Konfiguration.
|
||||
|
||||
Args:
|
||||
device_type: "desktop" oder "mobile"
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Die generierte Konfiguration
|
||||
"""
|
||||
# Wähle Plattform und entsprechenden User-Agent
|
||||
if device_type == "mobile":
|
||||
platform_name = random.choice(["android", "ios"])
|
||||
user_agent = random.choice(self.user_agents["mobile"])
|
||||
else:
|
||||
# Wähle eine Plattform, die zum System passt
|
||||
system = platform.system().lower()
|
||||
if system == "darwin":
|
||||
platform_name = "macos"
|
||||
elif system == "windows":
|
||||
platform_name = "windows"
|
||||
else:
|
||||
platform_name = "linux"
|
||||
|
||||
user_agent = random.choice(self.user_agents["desktop"])
|
||||
|
||||
platform_value = self.PLATFORMS.get(platform_name, "Win32")
|
||||
|
||||
# Wähle weitere Konfigurationen
|
||||
config = {
|
||||
"user_agent": user_agent,
|
||||
"platform": platform_value,
|
||||
"vendor": "Google Inc." if "Chrome" in user_agent else "Apple Computer, Inc.",
|
||||
"accept_language": random.choice(self.LANGUAGES),
|
||||
"timezone_id": self.TIMEZONE_ID,
|
||||
"device_scale_factor": random.choice([1.0, 1.25, 1.5, 2.0]) if random.random() < 0.3 else 1.0,
|
||||
"color_depth": random.choice([24, 30, 48]),
|
||||
"hardware_concurrency": random.choice([2, 4, 8, 12, 16]),
|
||||
"device_memory": random.choice([2, 4, 8, 16]),
|
||||
"webdriver": False,
|
||||
"fingerprint_noise": True,
|
||||
"device_type": device_type
|
||||
}
|
||||
|
||||
return config
|
||||
|
||||
def save_config(self, config: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Speichert die Konfiguration in einer Datei.
|
||||
|
||||
Args:
|
||||
config: Die zu speichernde Konfiguration
|
||||
"""
|
||||
try:
|
||||
with open(self.config_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
logger.info(f"Stealth-Konfiguration gespeichert in: {self.config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern der Stealth-Konfiguration: {e}")
|
||||
|
||||
def get_config(self) -> Dict[str, Any]:
|
||||
"""Gibt die aktuelle Konfiguration zurück."""
|
||||
return self.config
|
||||
|
||||
def rotate_config(self, device_type: str = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Generiert eine neue Konfiguration und speichert sie.
|
||||
|
||||
Args:
|
||||
device_type: "desktop" oder "mobile", oder None für bestehenden Typ
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Die neue Konfiguration
|
||||
"""
|
||||
if device_type is None:
|
||||
device_type = self.config.get("device_type", "desktop")
|
||||
|
||||
self.config = self.generate_config(device_type)
|
||||
self.save_config(self.config)
|
||||
return self.config
|
||||
|
||||
def get_user_agent(self) -> str:
|
||||
"""Gibt den aktuellen User-Agent aus der Konfiguration zurück."""
|
||||
return self.config.get("user_agent", self.CHROME_DESKTOP_AGENTS[0])
|
||||
|
||||
|
||||
# Beispielnutzung, wenn direkt ausgeführt
|
||||
if __name__ == "__main__":
|
||||
# Konfiguriere Logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
# Beispiel für Konfigurationserstellung
|
||||
stealth = StealthConfig()
|
||||
|
||||
print("Aktuelle Konfiguration:")
|
||||
print(json.dumps(stealth.get_config(), indent=2))
|
||||
|
||||
print("\nNeue Desktop-Konfiguration:")
|
||||
desktop_config = stealth.rotate_config("desktop")
|
||||
print(json.dumps(desktop_config, indent=2))
|
||||
|
||||
print("\nNeue Mobile-Konfiguration:")
|
||||
mobile_config = stealth.rotate_config("mobile")
|
||||
print(json.dumps(mobile_config, indent=2))
|
||||
318
browser/video_stealth_enhancement.py
Normale Datei
318
browser/video_stealth_enhancement.py
Normale Datei
@ -0,0 +1,318 @@
|
||||
# Video Stealth Enhancement Module
|
||||
"""
|
||||
Erweiterte Video-spezifische Stealth-Maßnahmen für Instagram DRM-Schutz
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
logger = logging.getLogger("video_stealth_enhancement")
|
||||
|
||||
class VideoStealthEnhancement:
|
||||
"""Video-spezifische Anti-Detection und DRM-Schutz"""
|
||||
|
||||
def __init__(self, context: Any):
|
||||
self.context = context
|
||||
|
||||
def apply_video_stealth(self) -> None:
|
||||
"""Wendet erweiterte Video-Stealth-Maßnahmen an"""
|
||||
|
||||
# 1. DRM und Widevine Capability Spoofing
|
||||
drm_script = """
|
||||
() => {
|
||||
// Enhanced Widevine DRM Support
|
||||
if (!navigator.requestMediaKeySystemAccess) {
|
||||
navigator.requestMediaKeySystemAccess = function(keySystem, supportedConfigurations) {
|
||||
if (keySystem === 'com.widevine.alpha') {
|
||||
return Promise.resolve({
|
||||
keySystem: 'com.widevine.alpha',
|
||||
getConfiguration: () => ({
|
||||
initDataTypes: ['cenc'],
|
||||
audioCapabilities: [{contentType: 'audio/mp4; codecs="mp4a.40.2"'}],
|
||||
videoCapabilities: [{contentType: 'video/mp4; codecs="avc1.42E01E"'}],
|
||||
distinctiveIdentifier: 'optional',
|
||||
persistentState: 'optional'
|
||||
}),
|
||||
createMediaKeys: () => Promise.resolve({
|
||||
createSession: () => ({
|
||||
addEventListener: () => {},
|
||||
generateRequest: () => Promise.resolve(),
|
||||
update: () => Promise.resolve(),
|
||||
close: () => Promise.resolve()
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
return Promise.reject(new Error('KeySystem not supported'));
|
||||
};
|
||||
}
|
||||
|
||||
// EME (Encrypted Media Extensions) Support
|
||||
Object.defineProperty(HTMLMediaElement.prototype, 'canPlayType', {
|
||||
value: function(type) {
|
||||
const supportedTypes = {
|
||||
'video/mp4; codecs="avc1.42E01E"': 'probably',
|
||||
'video/mp4; codecs="avc1.4D4015"': 'probably',
|
||||
'video/webm; codecs="vp8"': 'probably',
|
||||
'video/webm; codecs="vp9"': 'probably',
|
||||
'audio/mp4; codecs="mp4a.40.2"': 'probably',
|
||||
'audio/webm; codecs="opus"': 'probably'
|
||||
};
|
||||
return supportedTypes[type] || 'maybe';
|
||||
}
|
||||
});
|
||||
|
||||
// Enhanced Media Capabilities
|
||||
if (!navigator.mediaCapabilities) {
|
||||
navigator.mediaCapabilities = {
|
||||
decodingInfo: function(config) {
|
||||
return Promise.resolve({
|
||||
supported: true,
|
||||
smooth: true,
|
||||
powerEfficient: true
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# 2. Video Element Enhancement
|
||||
video_element_script = """
|
||||
() => {
|
||||
// Enhanced Video Element Support
|
||||
const originalCreateElement = document.createElement;
|
||||
document.createElement = function(tagName) {
|
||||
const element = originalCreateElement.call(this, tagName);
|
||||
|
||||
if (tagName.toLowerCase() === 'video') {
|
||||
// Override video properties for better compatibility
|
||||
Object.defineProperty(element, 'webkitDisplayingFullscreen', {
|
||||
get: () => false,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(element, 'webkitSupportsFullscreen', {
|
||||
get: () => true,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(element, 'webkitDecodedFrameCount', {
|
||||
get: () => Math.floor(Math.random() * 1000) + 100,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(element, 'webkitDroppedFrameCount', {
|
||||
get: () => Math.floor(Math.random() * 10),
|
||||
configurable: true
|
||||
});
|
||||
|
||||
// Enhanced autoplay support
|
||||
Object.defineProperty(element, 'autoplay', {
|
||||
get: function() { return this._autoplay || false; },
|
||||
set: function(value) { this._autoplay = value; },
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
return element;
|
||||
};
|
||||
|
||||
// User Activation API (required for autoplay)
|
||||
if (!navigator.userActivation) {
|
||||
Object.defineProperty(navigator, 'userActivation', {
|
||||
get: () => ({
|
||||
hasBeenActive: true,
|
||||
isActive: true
|
||||
}),
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# 3. Enhanced Media Devices
|
||||
media_devices_script = """
|
||||
() => {
|
||||
// Enhanced MediaDevices for Instagram
|
||||
if (navigator.mediaDevices) {
|
||||
const originalEnumerateDevices = navigator.mediaDevices.enumerateDevices;
|
||||
navigator.mediaDevices.enumerateDevices = function() {
|
||||
return Promise.resolve([
|
||||
{
|
||||
deviceId: 'default',
|
||||
kind: 'audioinput',
|
||||
label: 'Default - Mikrofon (Realtek High Definition Audio)',
|
||||
groupId: 'group_audio_input'
|
||||
},
|
||||
{
|
||||
deviceId: 'communications',
|
||||
kind: 'audioinput',
|
||||
label: 'Kommunikation - Mikrofon (Realtek High Definition Audio)',
|
||||
groupId: 'group_audio_input'
|
||||
},
|
||||
{
|
||||
deviceId: 'default',
|
||||
kind: 'audiooutput',
|
||||
label: 'Standard - Lautsprecher (Realtek High Definition Audio)',
|
||||
groupId: 'group_audio_output'
|
||||
},
|
||||
{
|
||||
deviceId: 'communications',
|
||||
kind: 'audiooutput',
|
||||
label: 'Kommunikation - Lautsprecher (Realtek High Definition Audio)',
|
||||
groupId: 'group_audio_output'
|
||||
},
|
||||
{
|
||||
deviceId: 'video_device_1',
|
||||
kind: 'videoinput',
|
||||
label: 'HD-Webcam (USB)',
|
||||
groupId: 'group_video_input'
|
||||
}
|
||||
]);
|
||||
};
|
||||
|
||||
// Enhanced getUserMedia support
|
||||
if (!navigator.mediaDevices.getUserMedia) {
|
||||
navigator.mediaDevices.getUserMedia = function(constraints) {
|
||||
return Promise.resolve({
|
||||
getTracks: () => [],
|
||||
getAudioTracks: () => [],
|
||||
getVideoTracks: () => [],
|
||||
addTrack: () => {},
|
||||
removeTrack: () => {},
|
||||
addEventListener: () => {},
|
||||
removeEventListener: () => {}
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# 4. Instagram-spezifische Video Fixes
|
||||
instagram_video_script = """
|
||||
() => {
|
||||
// Instagram-specific video enhancements
|
||||
|
||||
// Simulate proper video loading behavior
|
||||
const originalFetch = window.fetch;
|
||||
window.fetch = function(input, init) {
|
||||
const url = typeof input === 'string' ? input : input.url;
|
||||
|
||||
// Enhance video CDN requests with proper headers
|
||||
if (url.includes('instagram.com') && (url.includes('.mp4') || url.includes('video'))) {
|
||||
const enhancedInit = {
|
||||
...init,
|
||||
headers: {
|
||||
...init?.headers,
|
||||
'Accept': 'video/webm,video/ogg,video/*;q=0.9,application/ogg;q=0.7,audio/*;q=0.6,*/*;q=0.5',
|
||||
'Accept-Encoding': 'identity;q=1, *;q=0',
|
||||
'Accept-Language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Pragma': 'no-cache',
|
||||
'Range': 'bytes=0-',
|
||||
'Sec-Fetch-Dest': 'video',
|
||||
'Sec-Fetch-Mode': 'no-cors',
|
||||
'Sec-Fetch-Site': 'cross-site'
|
||||
}
|
||||
};
|
||||
return originalFetch.call(this, input, enhancedInit);
|
||||
}
|
||||
|
||||
return originalFetch.apply(this, arguments);
|
||||
};
|
||||
|
||||
// Override video error handling
|
||||
const originalAddEventListener = HTMLVideoElement.prototype.addEventListener;
|
||||
HTMLVideoElement.prototype.addEventListener = function(type, listener, options) {
|
||||
if (type === 'error' || type === 'abort') {
|
||||
// Wrap error listener to prevent video error displays
|
||||
const wrappedListener = function(event) {
|
||||
console.debug('AccountForger: Video event intercepted:', type);
|
||||
// Prevent error propagation for DRM-related issues
|
||||
if (event.target && event.target.error && event.target.error.code === 3) {
|
||||
event.stopPropagation();
|
||||
event.preventDefault();
|
||||
return false;
|
||||
}
|
||||
return listener.call(this, event);
|
||||
};
|
||||
return originalAddEventListener.call(this, type, wrappedListener, options);
|
||||
}
|
||||
return originalAddEventListener.call(this, type, listener, options);
|
||||
};
|
||||
|
||||
// Simulate proper video metrics
|
||||
Object.defineProperty(HTMLVideoElement.prototype, 'buffered', {
|
||||
get: function() {
|
||||
return {
|
||||
length: 1,
|
||||
start: () => 0,
|
||||
end: () => this.duration || 30
|
||||
};
|
||||
},
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
"""
|
||||
|
||||
# Alle Skripte anwenden
|
||||
scripts = [drm_script, video_element_script, media_devices_script, instagram_video_script]
|
||||
|
||||
for script in scripts:
|
||||
try:
|
||||
self.context.add_init_script(script)
|
||||
logger.debug("Video stealth script applied successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to apply video stealth script: {e}")
|
||||
|
||||
logger.info("Video stealth enhancement applied - DRM and Instagram compatibility enabled")
|
||||
|
||||
def validate_video_capabilities(self, page: Any) -> Dict[str, bool]:
|
||||
"""Validiert Video-Capabilities des Browsers"""
|
||||
try:
|
||||
result = page.evaluate("""
|
||||
() => {
|
||||
const results = {
|
||||
widevine_support: false,
|
||||
media_devices: false,
|
||||
video_codecs: false,
|
||||
user_activation: false,
|
||||
autoplay_policy: false
|
||||
};
|
||||
|
||||
// Check Widevine support
|
||||
if (navigator.requestMediaKeySystemAccess) {
|
||||
results.widevine_support = true;
|
||||
}
|
||||
|
||||
// Check MediaDevices
|
||||
if (navigator.mediaDevices && navigator.mediaDevices.enumerateDevices) {
|
||||
results.media_devices = true;
|
||||
}
|
||||
|
||||
// Check Video Codecs
|
||||
const video = document.createElement('video');
|
||||
if (video.canPlayType('video/mp4; codecs="avc1.42E01E"') === 'probably') {
|
||||
results.video_codecs = true;
|
||||
}
|
||||
|
||||
// Check User Activation
|
||||
if (navigator.userActivation && navigator.userActivation.hasBeenActive) {
|
||||
results.user_activation = true;
|
||||
}
|
||||
|
||||
// Check Autoplay Policy
|
||||
results.autoplay_policy = true; // Always report as supported
|
||||
|
||||
return results;
|
||||
}
|
||||
""")
|
||||
|
||||
logger.info(f"Video capabilities validation: {result}")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Video capabilities validation failed: {e}")
|
||||
return {}
|
||||
154
check_rotation_system.py
Normale Datei
154
check_rotation_system.py
Normale Datei
@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Quick check script to verify method rotation system status.
|
||||
Run this to ensure everything is working before starting main.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to path
|
||||
project_root = Path(__file__).parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
def check_imports():
|
||||
"""Check if all rotation system imports work"""
|
||||
print("🔍 Checking imports...")
|
||||
|
||||
try:
|
||||
from domain.entities.method_rotation import MethodStrategy, RotationSession
|
||||
print("✅ Domain entities: OK")
|
||||
except Exception as e:
|
||||
print(f"❌ Domain entities: {e}")
|
||||
return False
|
||||
|
||||
try:
|
||||
from application.use_cases.method_rotation_use_case import MethodRotationUseCase
|
||||
print("✅ Use cases: OK")
|
||||
except Exception as e:
|
||||
print(f"❌ Use cases: {e}")
|
||||
return False
|
||||
|
||||
try:
|
||||
from controllers.platform_controllers.method_rotation_mixin import MethodRotationMixin
|
||||
print("✅ Controller mixin: OK")
|
||||
except Exception as e:
|
||||
print(f"❌ Controller mixin: {e}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_database():
|
||||
"""Check database and tables"""
|
||||
print("\n🗄️ Checking database...")
|
||||
|
||||
db_path = project_root / "database" / "accounts.db"
|
||||
if not db_path.exists():
|
||||
print(f"❌ Database not found: {db_path}")
|
||||
return False
|
||||
|
||||
print(f"✅ Database found: {db_path}")
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check for rotation tables
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND (
|
||||
name = 'method_strategies' OR
|
||||
name = 'rotation_sessions' OR
|
||||
name = 'platform_method_states'
|
||||
)
|
||||
""")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
conn.close()
|
||||
|
||||
if len(tables) >= 3:
|
||||
print(f"✅ Rotation tables found: {tables}")
|
||||
return True
|
||||
else:
|
||||
print(f"⚠️ Missing rotation tables. Found: {tables}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Database check failed: {e}")
|
||||
return False
|
||||
|
||||
def check_config():
|
||||
"""Check configuration files"""
|
||||
print("\n⚙️ Checking configuration...")
|
||||
|
||||
config_path = project_root / "config" / "method_rotation_config.json"
|
||||
if config_path.exists():
|
||||
print("✅ Rotation config found")
|
||||
return True
|
||||
else:
|
||||
print("⚠️ Rotation config not found (will use defaults)")
|
||||
return True # Not critical
|
||||
|
||||
def check_controllers():
|
||||
"""Check if controllers can be imported"""
|
||||
print("\n🎮 Checking controllers...")
|
||||
|
||||
try:
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
print("✅ Base controller: OK")
|
||||
|
||||
from controllers.platform_controllers.instagram_controller import InstagramController
|
||||
print("✅ Instagram controller: OK")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"❌ Controller check failed: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main check function"""
|
||||
print("🔧 Method Rotation System - Status Check")
|
||||
print("=" * 50)
|
||||
|
||||
checks = [
|
||||
("Imports", check_imports),
|
||||
("Database", check_database),
|
||||
("Config", check_config),
|
||||
("Controllers", check_controllers)
|
||||
]
|
||||
|
||||
all_good = True
|
||||
for name, check_func in checks:
|
||||
try:
|
||||
result = check_func()
|
||||
if not result:
|
||||
all_good = False
|
||||
except Exception as e:
|
||||
print(f"❌ {name} check crashed: {e}")
|
||||
all_good = False
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
if all_good:
|
||||
print("✅ Method rotation system is ready!")
|
||||
print("🚀 You can safely start main.py")
|
||||
print("\n💡 Expected behavior:")
|
||||
print(" - Account creation works as before")
|
||||
print(" - Additional rotation logs will appear")
|
||||
print(" - Automatic method switching on failures")
|
||||
print(" - Graceful fallback if any issues occur")
|
||||
else:
|
||||
print("⚠️ Some issues detected, but main.py should still work")
|
||||
print("🔄 Rotation system will fall back to original behavior")
|
||||
print("\n🛠️ To fix issues:")
|
||||
print(" 1. Run: python3 run_migration.py")
|
||||
print(" 2. Check file permissions")
|
||||
print(" 3. Restart main.py")
|
||||
|
||||
print("\n📝 To test rotation manually:")
|
||||
print(" - Create an account on any platform")
|
||||
print(" - Check logs for rotation messages")
|
||||
print(" - Simulate failures to see rotation in action")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
config/.hardware_id
Normale Datei
1
config/.hardware_id
Normale Datei
@ -0,0 +1 @@
|
||||
c4e65c9dfbe7593949576a1742d6c347a48307267ff2b73294b8c48409404639
|
||||
1
config/.machine_id
Normale Datei
1
config/.machine_id
Normale Datei
@ -0,0 +1 @@
|
||||
ae30d891-0b45-408e-8f47-75fada7cb094
|
||||
1
config/.session_data
Normale Datei
1
config/.session_data
Normale Datei
@ -0,0 +1 @@
|
||||
{"session_token": "b2f9c1be-e3d6-4eba-91c3-d4aa49bcbdb3", "license_key": "AF-F-202506-WY2J-ZZB9-7LZD", "activation_id": null, "timestamp": "2025-07-02T21:46:20.739060"}
|
||||
0
config/__init__.py
Normale Datei
0
config/__init__.py
Normale Datei
7
config/app_version.json
Normale Datei
7
config/app_version.json
Normale Datei
@ -0,0 +1,7 @@
|
||||
{
|
||||
"current_version": "1.0.0",
|
||||
"last_check": "2025-07-19T01:32:14.527285",
|
||||
"channel": "stable",
|
||||
"auto_check": true,
|
||||
"auto_download": false
|
||||
}
|
||||
0
config/browser_config.json
Normale Datei
0
config/browser_config.json
Normale Datei
6
config/email_config.json
Normale Datei
6
config/email_config.json
Normale Datei
@ -0,0 +1,6 @@
|
||||
{
|
||||
"imap_server": "imap.ionos.de",
|
||||
"imap_port": 993,
|
||||
"imap_user": "info@z5m7q9dk3ah2v1plx6ju.com",
|
||||
"imap_pass": "cz&ie.O9$!:!tYY@"
|
||||
}
|
||||
0
config/facebook_config.json
Normale Datei
0
config/facebook_config.json
Normale Datei
35
config/implementation_switch.py
Normale Datei
35
config/implementation_switch.py
Normale Datei
@ -0,0 +1,35 @@
|
||||
"""
|
||||
Einfacher Switch zwischen alter und neuer Implementation für schnelles Rollback
|
||||
"""
|
||||
|
||||
|
||||
class ImplementationSwitch:
|
||||
"""Einfacher Switch zwischen alter und neuer Implementation"""
|
||||
|
||||
# Direkt aktivieren im Testbetrieb
|
||||
USE_REFACTORED_CODE = True
|
||||
|
||||
@classmethod
|
||||
def rollback_to_legacy(cls):
|
||||
"""Schneller Rollback wenn nötig"""
|
||||
cls.USE_REFACTORED_CODE = False
|
||||
print("WARNUNG: Rollback zu Legacy-Implementation aktiviert!")
|
||||
|
||||
@classmethod
|
||||
def use_refactored_code(cls):
|
||||
"""Aktiviert die refaktorierte Implementation"""
|
||||
cls.USE_REFACTORED_CODE = True
|
||||
print("INFO: Refaktorierte Implementation aktiviert")
|
||||
|
||||
@classmethod
|
||||
def is_refactored_active(cls) -> bool:
|
||||
"""Prüft ob refaktorierte Implementation aktiv ist"""
|
||||
return cls.USE_REFACTORED_CODE
|
||||
|
||||
@classmethod
|
||||
def get_status(cls) -> str:
|
||||
"""Gibt den aktuellen Status zurück"""
|
||||
if cls.USE_REFACTORED_CODE:
|
||||
return "Refaktorierte Implementation (NEU)"
|
||||
else:
|
||||
return "Legacy Implementation (ALT)"
|
||||
0
config/instagram_config.json
Normale Datei
0
config/instagram_config.json
Normale Datei
16
config/license.json
Normale Datei
16
config/license.json
Normale Datei
@ -0,0 +1,16 @@
|
||||
{
|
||||
"key": "AF-F-202506-WY2J-ZZB9-7LZD",
|
||||
"activation_date": "2025-07-01T23:44:57.221243",
|
||||
"expiry_date": "",
|
||||
"status": "active",
|
||||
"status_text": "Lizenz erfolgreich aktiviert",
|
||||
"features": [
|
||||
"account_creation",
|
||||
"basic_export"
|
||||
],
|
||||
"last_online_check": "2025-07-01T23:44:57.221243",
|
||||
"signature": "",
|
||||
"activation_id": null,
|
||||
"max_activations": 0,
|
||||
"max_users": 0
|
||||
}
|
||||
11
config/license_config.json
Normale Datei
11
config/license_config.json
Normale Datei
@ -0,0 +1,11 @@
|
||||
{
|
||||
"key": "",
|
||||
"status": "inactive",
|
||||
"hardware_id": "",
|
||||
"activation_date": null,
|
||||
"expiry_date": null,
|
||||
"features": [],
|
||||
"last_check": null,
|
||||
"session_ip_mode": "auto",
|
||||
"ip_fallback": "0.0.0.0"
|
||||
}
|
||||
296
config/method_rotation_config.json
Normale Datei
296
config/method_rotation_config.json
Normale Datei
@ -0,0 +1,296 @@
|
||||
{
|
||||
"platforms": {
|
||||
"instagram": {
|
||||
"methods": {
|
||||
"stealth_basic": {
|
||||
"priority": 8,
|
||||
"max_daily_attempts": 20,
|
||||
"cooldown_period": 300,
|
||||
"risk_level": "LOW",
|
||||
"configuration": {
|
||||
"enhanced_stealth": false,
|
||||
"user_agent_rotation": false,
|
||||
"fingerprint_complexity": "basic",
|
||||
"canvas_noise": false,
|
||||
"webrtc_protection": "basic"
|
||||
},
|
||||
"tags": ["basic", "fast", "low_detection"],
|
||||
"success_threshold": 0.7
|
||||
},
|
||||
"stealth_enhanced": {
|
||||
"priority": 7,
|
||||
"max_daily_attempts": 15,
|
||||
"cooldown_period": 400,
|
||||
"risk_level": "MEDIUM",
|
||||
"configuration": {
|
||||
"enhanced_stealth": true,
|
||||
"user_agent_rotation": true,
|
||||
"fingerprint_complexity": "enhanced",
|
||||
"canvas_noise": true,
|
||||
"webrtc_protection": "enhanced",
|
||||
"viewport_randomization": true,
|
||||
"screen_resolution_spoof": true
|
||||
},
|
||||
"tags": ["enhanced", "reliable", "medium_stealth"],
|
||||
"success_threshold": 0.6
|
||||
},
|
||||
"stealth_maximum": {
|
||||
"priority": 6,
|
||||
"max_daily_attempts": 10,
|
||||
"cooldown_period": 600,
|
||||
"risk_level": "HIGH",
|
||||
"configuration": {
|
||||
"enhanced_stealth": true,
|
||||
"user_agent_rotation": true,
|
||||
"fingerprint_complexity": "maximum",
|
||||
"canvas_noise": true,
|
||||
"webrtc_protection": "maximum",
|
||||
"viewport_randomization": true,
|
||||
"navigator_spoof": true,
|
||||
"timing_randomization": true,
|
||||
"memory_spoof": true,
|
||||
"hardware_spoof": true
|
||||
},
|
||||
"tags": ["maximum", "complex", "high_stealth"],
|
||||
"success_threshold": 0.5
|
||||
}
|
||||
},
|
||||
"rotation_policy": {
|
||||
"max_failures_before_rotation": 1,
|
||||
"rotation_cooldown": 30,
|
||||
"prefer_high_success_rate": true,
|
||||
"avoid_recently_failed": true,
|
||||
"smart_fallback": true,
|
||||
"emergency_mode_trigger_failures": 8,
|
||||
"instant_rotation_errors": ["browser_level_error", "css", "javascript", "parsing"]
|
||||
},
|
||||
"emergency_methods": ["email"],
|
||||
"daily_reset_hour": 0
|
||||
},
|
||||
"tiktok": {
|
||||
"methods": {
|
||||
"email": {
|
||||
"priority": 8,
|
||||
"max_daily_attempts": 25,
|
||||
"cooldown_period": 240,
|
||||
"risk_level": "LOW",
|
||||
"configuration": {
|
||||
"email_domain": "z5m7q9dk3ah2v1plx6ju.com",
|
||||
"require_phone_verification": false,
|
||||
"auto_verify_email": true,
|
||||
"email_verification_timeout": 180
|
||||
},
|
||||
"tags": ["primary", "reliable", "tiktok"],
|
||||
"success_threshold": 0.8
|
||||
},
|
||||
"phone": {
|
||||
"priority": 7,
|
||||
"max_daily_attempts": 15,
|
||||
"cooldown_period": 480,
|
||||
"risk_level": "MEDIUM",
|
||||
"configuration": {
|
||||
"require_email_backup": false,
|
||||
"phone_verification_timeout": 180,
|
||||
"country_codes": ["+1", "+44"],
|
||||
"fast_verification": true
|
||||
},
|
||||
"tags": ["secondary", "fast", "mobile"],
|
||||
"success_threshold": 0.6
|
||||
}
|
||||
},
|
||||
"rotation_policy": {
|
||||
"max_failures_before_rotation": 2,
|
||||
"rotation_cooldown": 45,
|
||||
"prefer_high_success_rate": true,
|
||||
"avoid_recently_failed": true,
|
||||
"smart_fallback": true,
|
||||
"emergency_mode_trigger_failures": 15
|
||||
},
|
||||
"emergency_methods": ["email"],
|
||||
"daily_reset_hour": 0
|
||||
},
|
||||
"x": {
|
||||
"methods": {
|
||||
"email": {
|
||||
"priority": 8,
|
||||
"max_daily_attempts": 15,
|
||||
"cooldown_period": 360,
|
||||
"risk_level": "LOW",
|
||||
"configuration": {
|
||||
"email_domain": "z5m7q9dk3ah2v1plx6ju.com",
|
||||
"require_phone_verification": true,
|
||||
"auto_verify_email": true,
|
||||
"email_verification_timeout": 300
|
||||
},
|
||||
"tags": ["primary", "stable", "twitter"],
|
||||
"success_threshold": 0.6
|
||||
},
|
||||
"phone": {
|
||||
"priority": 6,
|
||||
"max_daily_attempts": 8,
|
||||
"cooldown_period": 720,
|
||||
"risk_level": "MEDIUM",
|
||||
"configuration": {
|
||||
"require_email_backup": true,
|
||||
"phone_verification_timeout": 300,
|
||||
"country_codes": ["+1", "+44", "+49"],
|
||||
"strict_verification": true
|
||||
},
|
||||
"tags": ["secondary", "verification", "strict"],
|
||||
"success_threshold": 0.4
|
||||
}
|
||||
},
|
||||
"rotation_policy": {
|
||||
"max_failures_before_rotation": 1,
|
||||
"rotation_cooldown": 90,
|
||||
"prefer_high_success_rate": true,
|
||||
"avoid_recently_failed": true,
|
||||
"smart_fallback": true,
|
||||
"emergency_mode_trigger_failures": 8
|
||||
},
|
||||
"emergency_methods": ["email"],
|
||||
"daily_reset_hour": 0
|
||||
},
|
||||
"gmail": {
|
||||
"methods": {
|
||||
"standard_registration": {
|
||||
"priority": 9,
|
||||
"max_daily_attempts": 30,
|
||||
"cooldown_period": 180,
|
||||
"risk_level": "LOW",
|
||||
"configuration": {
|
||||
"recovery_email": false,
|
||||
"recovery_phone": false,
|
||||
"skip_phone_verification": true,
|
||||
"use_simple_captcha": true
|
||||
},
|
||||
"tags": ["primary", "google", "standard"],
|
||||
"success_threshold": 0.9
|
||||
},
|
||||
"recovery_registration": {
|
||||
"priority": 7,
|
||||
"max_daily_attempts": 10,
|
||||
"cooldown_period": 600,
|
||||
"risk_level": "MEDIUM",
|
||||
"configuration": {
|
||||
"recovery_email": true,
|
||||
"recovery_phone": false,
|
||||
"backup_recovery_method": true,
|
||||
"enhanced_security": true
|
||||
},
|
||||
"tags": ["secondary", "secure", "recovery"],
|
||||
"success_threshold": 0.7
|
||||
}
|
||||
},
|
||||
"rotation_policy": {
|
||||
"max_failures_before_rotation": 3,
|
||||
"rotation_cooldown": 30,
|
||||
"prefer_high_success_rate": true,
|
||||
"avoid_recently_failed": false,
|
||||
"smart_fallback": true,
|
||||
"emergency_mode_trigger_failures": 20
|
||||
},
|
||||
"emergency_methods": ["standard_registration"],
|
||||
"daily_reset_hour": 0
|
||||
}
|
||||
},
|
||||
"global_settings": {
|
||||
"rotation_strategies": {
|
||||
"adaptive": {
|
||||
"description": "Learn from success patterns and adapt method selection",
|
||||
"weight_success_rate": 0.4,
|
||||
"weight_priority": 0.3,
|
||||
"weight_recent_performance": 0.3
|
||||
},
|
||||
"sequential": {
|
||||
"description": "Try methods in order of priority",
|
||||
"strict_order": true,
|
||||
"skip_on_cooldown": true
|
||||
},
|
||||
"random": {
|
||||
"description": "Random method selection from available options",
|
||||
"weighted_by_priority": true,
|
||||
"exclude_high_risk": false
|
||||
},
|
||||
"smart": {
|
||||
"description": "AI-driven method selection with machine learning",
|
||||
"use_ml_predictions": true,
|
||||
"consider_time_patterns": true,
|
||||
"adapt_to_platform_changes": true
|
||||
}
|
||||
},
|
||||
"performance_tracking": {
|
||||
"success_rate_window_hours": 24,
|
||||
"min_attempts_for_reliability": 5,
|
||||
"performance_decay_days": 7,
|
||||
"auto_adjust_priorities": true
|
||||
},
|
||||
"emergency_mode": {
|
||||
"auto_enable_threshold": 0.2,
|
||||
"auto_disable_threshold": 0.6,
|
||||
"max_duration_hours": 24,
|
||||
"notification_enabled": true
|
||||
},
|
||||
"analytics": {
|
||||
"track_execution_time": true,
|
||||
"track_error_patterns": true,
|
||||
"generate_daily_reports": true,
|
||||
"retention_days": 90
|
||||
},
|
||||
"fallback_behavior": {
|
||||
"on_rotation_failure": "use_original_method",
|
||||
"on_all_methods_exhausted": "enable_emergency_mode",
|
||||
"max_rotation_attempts_per_session": 3,
|
||||
"fallback_to_legacy_on_error": true
|
||||
}
|
||||
},
|
||||
"method_definitions": {
|
||||
"stealth_basic": {
|
||||
"description": "Basic stealth mode with minimal anti-detection",
|
||||
"required_services": [],
|
||||
"typical_success_rate": 0.7,
|
||||
"average_completion_time": 120
|
||||
},
|
||||
"stealth_enhanced": {
|
||||
"description": "Enhanced stealth with fingerprint obfuscation",
|
||||
"required_services": ["fingerprint_service"],
|
||||
"typical_success_rate": 0.6,
|
||||
"average_completion_time": 150
|
||||
},
|
||||
"stealth_maximum": {
|
||||
"description": "Maximum stealth with full anti-detection suite",
|
||||
"required_services": ["fingerprint_service"],
|
||||
"typical_success_rate": 0.5,
|
||||
"average_completion_time": 180
|
||||
},
|
||||
"standard_registration": {
|
||||
"description": "Standard Google account registration",
|
||||
"required_services": [],
|
||||
"typical_success_rate": 0.9,
|
||||
"average_completion_time": 150
|
||||
},
|
||||
"recovery_registration": {
|
||||
"description": "Google account registration with recovery options",
|
||||
"required_services": ["recovery_service"],
|
||||
"typical_success_rate": 0.7,
|
||||
"average_completion_time": 200
|
||||
}
|
||||
},
|
||||
"risk_levels": {
|
||||
"LOW": {
|
||||
"description": "Stable methods with high success rates",
|
||||
"max_concurrent_attempts": 10,
|
||||
"recommended_cooldown": 300
|
||||
},
|
||||
"MEDIUM": {
|
||||
"description": "Moderately reliable methods",
|
||||
"max_concurrent_attempts": 5,
|
||||
"recommended_cooldown": 600
|
||||
},
|
||||
"HIGH": {
|
||||
"description": "Experimental or unreliable methods",
|
||||
"max_concurrent_attempts": 2,
|
||||
"recommended_cooldown": 1800
|
||||
}
|
||||
}
|
||||
}
|
||||
70
config/paths.py
Normale Datei
70
config/paths.py
Normale Datei
@ -0,0 +1,70 @@
|
||||
"""
|
||||
Path Configuration - Zentrale Pfadverwaltung für Clean Architecture
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class PathConfig:
|
||||
"""Zentrale Klasse für alle Pfadkonfigurationen"""
|
||||
|
||||
# Basis-Verzeichnis des Projekts
|
||||
if hasattr(sys, '_MEIPASS'):
|
||||
# PyInstaller Bundle
|
||||
BASE_DIR = sys._MEIPASS
|
||||
else:
|
||||
# Normal Python execution
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Hauptverzeichnisse
|
||||
DATABASE_DIR = os.path.join(BASE_DIR, "database")
|
||||
CONFIG_DIR = os.path.join(BASE_DIR, "config")
|
||||
RESOURCES_DIR = os.path.join(BASE_DIR, "resources")
|
||||
LOGS_DIR = os.path.join(BASE_DIR, "logs")
|
||||
|
||||
# Datenbank-Dateien
|
||||
MAIN_DB = os.path.join(DATABASE_DIR, "accounts.db")
|
||||
SCHEMA_V1 = os.path.join(DATABASE_DIR, "schema.sql")
|
||||
SCHEMA_V2 = os.path.join(DATABASE_DIR, "schema_v2.sql")
|
||||
|
||||
# Resource-Verzeichnisse
|
||||
ICONS_DIR = os.path.join(RESOURCES_DIR, "icons")
|
||||
THEMES_DIR = os.path.join(RESOURCES_DIR, "themes")
|
||||
|
||||
# Log-Verzeichnisse
|
||||
SCREENSHOTS_DIR = os.path.join(LOGS_DIR, "screenshots")
|
||||
|
||||
@classmethod
|
||||
def ensure_directories(cls):
|
||||
"""Stellt sicher, dass alle notwendigen Verzeichnisse existieren"""
|
||||
directories = [
|
||||
cls.DATABASE_DIR,
|
||||
cls.CONFIG_DIR,
|
||||
cls.RESOURCES_DIR,
|
||||
cls.LOGS_DIR,
|
||||
cls.ICONS_DIR,
|
||||
cls.THEMES_DIR,
|
||||
cls.SCREENSHOTS_DIR
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
@classmethod
|
||||
def get_icon_path(cls, icon_name: str) -> str:
|
||||
"""
|
||||
Gibt den vollständigen Pfad zu einem Icon zurück
|
||||
|
||||
Args:
|
||||
icon_name: Name des Icons (ohne .svg)
|
||||
|
||||
Returns:
|
||||
Vollständiger Pfad zum Icon
|
||||
"""
|
||||
return os.path.join(cls.ICONS_DIR, f"{icon_name}.svg")
|
||||
|
||||
@classmethod
|
||||
def file_exists(cls, path: str) -> bool:
|
||||
"""Prüft ob eine Datei existiert"""
|
||||
return os.path.exists(path) and os.path.isfile(path)
|
||||
210
config/platform_config.py
Normale Datei
210
config/platform_config.py
Normale Datei
@ -0,0 +1,210 @@
|
||||
"""
|
||||
Platform-spezifische Konfiguration
|
||||
"""
|
||||
from typing import Dict, List, Any
|
||||
|
||||
|
||||
PLATFORM_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
'instagram': {
|
||||
'mobile_probability': 0.7,
|
||||
'min_age': 13,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['email'],
|
||||
'default_email_domain': 'z5m7q9dk3ah2v1plx6ju.com',
|
||||
'requires_phone_verification': False,
|
||||
'session_expiry_days': 30,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 3,
|
||||
'registrations_per_day': 10,
|
||||
'cooldown_minutes': 15
|
||||
},
|
||||
'error_patterns': {
|
||||
'already taken': 'Dieser Benutzername ist bereits vergeben',
|
||||
'weak password': 'Das Passwort ist zu schwach',
|
||||
'rate limit': 'Zu viele Versuche - bitte später erneut versuchen',
|
||||
'network error': 'Netzwerkfehler - bitte Internetverbindung prüfen',
|
||||
'captcha': 'Captcha-Verifizierung erforderlich',
|
||||
'verification': 'Es gab ein Problem mit der Verifizierung des Accounts',
|
||||
'proxy': 'Problem mit der Proxy-Verbindung',
|
||||
'timeout': 'Zeitüberschreitung bei der Verbindung',
|
||||
'username': 'Der gewählte Benutzername ist bereits vergeben oder nicht zulässig',
|
||||
'password': 'Das Passwort erfüllt nicht die Anforderungen von Instagram',
|
||||
'email': 'Die E-Mail-Adresse konnte nicht verwendet werden',
|
||||
'phone': 'Die Telefonnummer konnte nicht für die Registrierung verwendet werden'
|
||||
},
|
||||
'ui_config': {
|
||||
'primary_color': '#E4405F',
|
||||
'secondary_color': '#C13584',
|
||||
'icon': 'instagram.svg'
|
||||
}
|
||||
},
|
||||
'tiktok': {
|
||||
'mobile_probability': 0.9,
|
||||
'min_age': 13,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['email', 'phone'],
|
||||
'default_email_domain': 'z5m7q9dk3ah2v1plx6ju.com',
|
||||
'requires_phone_verification': True,
|
||||
'session_expiry_days': 14,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 2,
|
||||
'registrations_per_day': 5,
|
||||
'cooldown_minutes': 30
|
||||
},
|
||||
'error_patterns': {
|
||||
'captcha': 'TikTok hat einen Captcha-Test angefordert',
|
||||
'verification': 'Es gab ein Problem mit der Verifizierung des Accounts',
|
||||
'proxy': 'Problem mit der Proxy-Verbindung',
|
||||
'timeout': 'Zeitüberschreitung bei der Verbindung',
|
||||
'username': 'Der gewählte Benutzername ist bereits vergeben oder nicht zulässig',
|
||||
'password': 'Das Passwort erfüllt nicht die Anforderungen von TikTok',
|
||||
'email': 'Die E-Mail-Adresse konnte nicht verwendet werden',
|
||||
'phone': 'Die Telefonnummer konnte nicht für die Registrierung verwendet werden',
|
||||
'phone number required': 'Telefonnummer erforderlich',
|
||||
'invalid code': 'Ungültiger Verifizierungscode',
|
||||
'age': 'Das eingegebene Alter erfüllt nicht die Anforderungen von TikTok',
|
||||
'too_many_attempts': 'Zu viele Registrierungsversuche',
|
||||
'rate limit': 'Zu viele Versuche - bitte später erneut versuchen',
|
||||
'already taken': 'Der gewählte Benutzername ist bereits vergeben',
|
||||
'weak password': 'Das Passwort ist zu schwach',
|
||||
'network error': 'Netzwerkfehler - bitte Internetverbindung prüfen'
|
||||
},
|
||||
'ui_config': {
|
||||
'primary_color': '#000000',
|
||||
'secondary_color': '#FE2C55',
|
||||
'icon': 'tiktok.svg'
|
||||
}
|
||||
},
|
||||
'facebook': {
|
||||
'mobile_probability': 0.5,
|
||||
'min_age': 13,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['email', 'phone'],
|
||||
'default_email_domain': 'z5m7q9dk3ah2v1plx6ju.com',
|
||||
'requires_phone_verification': False,
|
||||
'session_expiry_days': 90,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 2,
|
||||
'registrations_per_day': 8,
|
||||
'cooldown_minutes': 20
|
||||
},
|
||||
'error_patterns': {
|
||||
# Facebook-spezifische Fehler hier hinzufügen
|
||||
},
|
||||
'ui_config': {
|
||||
'primary_color': '#1877F2',
|
||||
'secondary_color': '#42B883',
|
||||
'icon': 'facebook.svg'
|
||||
}
|
||||
},
|
||||
'ok': {
|
||||
'mobile_probability': 0.3,
|
||||
'min_age': 16,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['email', 'phone'],
|
||||
'default_email_domain': 'z5m7q9dk3ah2v1plx6ju.com',
|
||||
'requires_phone_verification': True,
|
||||
'session_expiry_days': 60,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 2,
|
||||
'registrations_per_day': 6,
|
||||
'cooldown_minutes': 25
|
||||
},
|
||||
'error_patterns': {
|
||||
'already taken': 'Dieser Benutzername ist bereits vergeben',
|
||||
'weak password': 'Das Passwort ist zu schwach',
|
||||
'rate limit': 'Zu viele Versuche - bitte später erneut versuchen',
|
||||
'network error': 'Netzwerkfehler - bitte Internetverbindung prüfen',
|
||||
'captcha': 'Captcha-Verifizierung erforderlich',
|
||||
'verification': 'Es gab ein Problem mit der Verifizierung des Accounts',
|
||||
'proxy': 'Problem mit der Proxy-Verbindung',
|
||||
'timeout': 'Zeitüberschreitung bei der Verbindung',
|
||||
'phone required': 'Telefonnummer erforderlich für OK.ru',
|
||||
'invalid phone': 'Ungültige Telefonnummer',
|
||||
'blocked region': 'Registrierung aus dieser Region nicht möglich'
|
||||
},
|
||||
'ui_config': {
|
||||
'primary_color': '#FF6600',
|
||||
'secondary_color': '#FF8533',
|
||||
'icon': 'ok.svg'
|
||||
}
|
||||
},
|
||||
'gmail': {
|
||||
'mobile_probability': 0.4,
|
||||
'min_age': 13,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['phone'],
|
||||
'default_email_domain': 'gmail.com',
|
||||
'requires_phone_verification': True,
|
||||
'session_expiry_days': 365,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 1,
|
||||
'registrations_per_day': 3,
|
||||
'cooldown_minutes': 60
|
||||
},
|
||||
'error_patterns': {
|
||||
'phone required': 'Telefonnummer erforderlich für Gmail',
|
||||
'phone already used': 'Diese Telefonnummer wurde bereits verwendet',
|
||||
'invalid phone': 'Ungültige Telefonnummer',
|
||||
'verification failed': 'SMS-Verifizierung fehlgeschlagen',
|
||||
'account suspended': 'Account-Erstellung gesperrt',
|
||||
'rate limit': 'Zu viele Versuche - bitte später erneut versuchen',
|
||||
'captcha': 'Captcha-Verifizierung erforderlich',
|
||||
'username taken': 'Gewünschter Benutzername nicht verfügbar',
|
||||
'weak password': 'Passwort erfüllt nicht die Sicherheitsanforderungen',
|
||||
'network error': 'Netzwerkfehler - bitte Internetverbindung prüfen'
|
||||
},
|
||||
'ui_config': {
|
||||
'primary_color': '#EA4335',
|
||||
'secondary_color': '#4285F4',
|
||||
'icon': 'gmail.svg'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_platform_config(platform: str) -> Dict[str, Any]:
|
||||
"""Gibt die Konfiguration für eine Platform zurück"""
|
||||
platform_lower = platform.lower()
|
||||
if platform_lower not in PLATFORM_CONFIG:
|
||||
# Fallback zu Standard-Konfiguration
|
||||
return {
|
||||
'mobile_probability': 0.5,
|
||||
'min_age': 13,
|
||||
'max_age': 99,
|
||||
'supported_registration': ['email'],
|
||||
'default_email_domain': 'z5m7q9dk3ah2v1plx6ju.com',
|
||||
'requires_phone_verification': False,
|
||||
'session_expiry_days': 30,
|
||||
'rate_limits': {
|
||||
'registrations_per_hour': 3,
|
||||
'registrations_per_day': 10,
|
||||
'cooldown_minutes': 15
|
||||
},
|
||||
'error_patterns': {},
|
||||
'ui_config': {
|
||||
'primary_color': '#000000',
|
||||
'secondary_color': '#666666',
|
||||
'icon': 'default.svg'
|
||||
}
|
||||
}
|
||||
return PLATFORM_CONFIG[platform_lower]
|
||||
|
||||
|
||||
def get_error_patterns(platform: str) -> Dict[str, str]:
|
||||
"""Gibt die Error-Patterns für eine Platform zurück"""
|
||||
config = get_platform_config(platform)
|
||||
return config.get('error_patterns', {})
|
||||
|
||||
|
||||
def get_rate_limits(platform: str) -> Dict[str, int]:
|
||||
"""Gibt die Rate-Limits für eine Platform zurück"""
|
||||
config = get_platform_config(platform)
|
||||
return config.get('rate_limits', {})
|
||||
|
||||
|
||||
def is_registration_method_supported(platform: str, method: str) -> bool:
|
||||
"""Prüft ob eine Registrierungsmethode unterstützt wird"""
|
||||
config = get_platform_config(platform)
|
||||
supported = config.get('supported_registration', ['email'])
|
||||
return method in supported
|
||||
15
config/proxy_config.json
Normale Datei
15
config/proxy_config.json
Normale Datei
@ -0,0 +1,15 @@
|
||||
{
|
||||
"ipv4": [
|
||||
"85.254.81.222:44444:14a38ed2efe94:04ed25fb1b"
|
||||
],
|
||||
"ipv6": [
|
||||
"92.119.89.251:30015:14a4622431481:a488401704"
|
||||
],
|
||||
"mobile": [
|
||||
"de1.4g.iproyal.com:7296:1rtSh0G:XswBCIqi1joy5dX"
|
||||
],
|
||||
"mobile_api": {
|
||||
"marsproxies": "9zKXWpMEA1",
|
||||
"iproyal": ""
|
||||
}
|
||||
}
|
||||
26
config/stealth_config.json
Normale Datei
26
config/stealth_config.json
Normale Datei
@ -0,0 +1,26 @@
|
||||
{
|
||||
"user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"platform": "Win32",
|
||||
"vendor": "Google Inc.",
|
||||
"accept_language": "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"timezone_id": "Europe/Berlin",
|
||||
"device_scale_factor": 1.0,
|
||||
"color_depth": 24,
|
||||
"hardware_concurrency": 8,
|
||||
"device_memory": 8,
|
||||
"webdriver": false,
|
||||
"fingerprint_noise": true,
|
||||
"device_type": "desktop",
|
||||
"browser_protection": {
|
||||
"enabled": true,
|
||||
"level": "medium",
|
||||
"show_border": true,
|
||||
"show_badge": true,
|
||||
"blur_effect": false,
|
||||
"opacity": 0.1,
|
||||
"badge_text": "🔒 Account wird erstellt - Bitte nicht eingreifen",
|
||||
"badge_position": "top-right",
|
||||
"border_color": "rgba(255, 0, 0, 0.5)",
|
||||
"dialog_always_on_top": true
|
||||
}
|
||||
}
|
||||
46
config/theme.json
Normale Datei
46
config/theme.json
Normale Datei
@ -0,0 +1,46 @@
|
||||
# Path: config/theme.json
|
||||
|
||||
{
|
||||
"dark": {
|
||||
"name": "Dark",
|
||||
"palette": {
|
||||
"Window": "#1E1E1E",
|
||||
"WindowText": "#FFFFFF",
|
||||
"Base": "#2D2D30",
|
||||
"AlternateBase": "#252526",
|
||||
"ToolTipBase": "#2D2D30",
|
||||
"ToolTipText": "#FFFFFF",
|
||||
"Text": "#FFFFFF",
|
||||
"Button": "#0E639C",
|
||||
"ButtonText": "#FFFFFF",
|
||||
"BrightText": "#FF0000",
|
||||
"Link": "#3794FF",
|
||||
"Highlight": "#264F78",
|
||||
"HighlightedText": "#FFFFFF"
|
||||
},
|
||||
"icons": {
|
||||
"path_suffix": "dark"
|
||||
}
|
||||
},
|
||||
"light": {
|
||||
"name": "Light",
|
||||
"palette": {
|
||||
"Window": "#FFFFFF",
|
||||
"WindowText": "#1E1E1E",
|
||||
"Base": "#F5F7FF",
|
||||
"AlternateBase": "#E8EBFF",
|
||||
"ToolTipBase": "#232D53",
|
||||
"ToolTipText": "#FFFFFF",
|
||||
"Text": "#1E1E1E",
|
||||
"Button": "#0099CC",
|
||||
"ButtonText": "#FFFFFF",
|
||||
"BrightText": "#F44336",
|
||||
"Link": "#0099CC",
|
||||
"Highlight": "#E8EBFF",
|
||||
"HighlightedText": "#232D53"
|
||||
},
|
||||
"icons": {
|
||||
"path_suffix": "light"
|
||||
}
|
||||
}
|
||||
}
|
||||
62
config/tiktok_config.json
Normale Datei
62
config/tiktok_config.json
Normale Datei
@ -0,0 +1,62 @@
|
||||
{
|
||||
"automation": {
|
||||
"delays": {
|
||||
"page_load": 3.0,
|
||||
"typing_delay": 0.1,
|
||||
"click_delay": 0.5,
|
||||
"form_submission": 2.0
|
||||
},
|
||||
"retries": {
|
||||
"max_attempts": 3,
|
||||
"delay_between_attempts": 2.0
|
||||
},
|
||||
"timeouts": {
|
||||
"element_wait": 10.0,
|
||||
"page_load": 30.0,
|
||||
"verification_wait": 300.0
|
||||
}
|
||||
},
|
||||
"urls": {
|
||||
"base_url": "https://www.tiktok.com",
|
||||
"signup_url": "https://www.tiktok.com/signup",
|
||||
"login_url": "https://www.tiktok.com/login",
|
||||
"email_signup": "https://www.tiktok.com/signup/phone-or-email/email"
|
||||
},
|
||||
"selectors": {
|
||||
"priority_order": [
|
||||
"data-e2e",
|
||||
"placeholder",
|
||||
"type",
|
||||
"class",
|
||||
"id"
|
||||
]
|
||||
},
|
||||
"registration": {
|
||||
"required_fields": [
|
||||
"email",
|
||||
"password",
|
||||
"birthday_month",
|
||||
"birthday_day",
|
||||
"birthday_year"
|
||||
],
|
||||
"optional_fields": [
|
||||
"username",
|
||||
"newsletter_subscription"
|
||||
],
|
||||
"verification": {
|
||||
"code_length": 6,
|
||||
"max_attempts": 3,
|
||||
"resend_delay": 60
|
||||
}
|
||||
},
|
||||
"error_handling": {
|
||||
"max_retries": 3,
|
||||
"retry_delay": 5.0,
|
||||
"screenshot_on_error": true
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"capture_screenshots": true,
|
||||
"detailed_errors": true
|
||||
}
|
||||
}
|
||||
0
config/twitter_config.json
Normale Datei
0
config/twitter_config.json
Normale Datei
9
config/update_config.json
Normale Datei
9
config/update_config.json
Normale Datei
@ -0,0 +1,9 @@
|
||||
{
|
||||
"last_check": "2025-04-01 12:00:00",
|
||||
"check_interval": 86400,
|
||||
"auto_check": true,
|
||||
"auto_download": false,
|
||||
"update_channel": "stable",
|
||||
"download_path": "updates",
|
||||
"downloaded_updates": []
|
||||
}
|
||||
31
config/user_agents.json
Normale Datei
31
config/user_agents.json
Normale Datei
@ -0,0 +1,31 @@
|
||||
{
|
||||
"desktop": [
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:123.0) Gecko/20100101 Firefox/123.0",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Safari/605.1.15",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:124.0) Gecko/20100101 Firefox/124.0",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
|
||||
"Mozilla/5.0 (X11; Linux x86_64; rv:124.0) Gecko/20100101 Firefox/124.0",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36 Edg/135.0.0.0",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
],
|
||||
"mobile": [
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 18_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_4_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/135.0.0.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/134.0.0.0 Mobile/15E148 Safari/604.1",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/123.0 Mobile/15E148 Safari/605.1.15",
|
||||
"Mozilla/5.0 (Linux; Android 14; SM-S9180) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 14; SM-G991B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 14; Pixel 8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 14; Pixel 7 Pro) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 13; SAMSUNG SM-A536B) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/24.0 Chrome/133.0.0.0 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 13; SAMSUNG SM-A546B) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/24.0 Chrome/133.0.0.0 Mobile Safari/537.36"
|
||||
]
|
||||
}
|
||||
207
controllers/account_controller.py
Normale Datei
207
controllers/account_controller.py
Normale Datei
@ -0,0 +1,207 @@
|
||||
"""
|
||||
Controller für die Verwaltung von Accounts.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import csv
|
||||
from datetime import datetime
|
||||
from PyQt5.QtWidgets import QFileDialog, QMessageBox
|
||||
from PyQt5.QtCore import QObject
|
||||
from application.use_cases.export_accounts_use_case import ExportAccountsUseCase
|
||||
|
||||
logger = logging.getLogger("account_controller")
|
||||
|
||||
class AccountController(QObject):
|
||||
"""Controller für die Verwaltung von Accounts."""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
super().__init__()
|
||||
self.db_manager = db_manager
|
||||
self.parent_view = None
|
||||
self.export_use_case = ExportAccountsUseCase(db_manager)
|
||||
|
||||
# Import Fingerprint Generator
|
||||
from application.use_cases.generate_account_fingerprint_use_case import GenerateAccountFingerprintUseCase
|
||||
self.fingerprint_generator = GenerateAccountFingerprintUseCase(db_manager)
|
||||
|
||||
def set_parent_view(self, view):
|
||||
"""Setzt die übergeordnete View für Dialoge."""
|
||||
self.parent_view = view
|
||||
|
||||
def on_account_created(self, platform: str, account_data: dict):
|
||||
"""Wird aufgerufen, wenn ein Account erstellt wurde."""
|
||||
account = {
|
||||
"platform": platform.lower(),
|
||||
"username": account_data.get("username", ""),
|
||||
"password": account_data.get("password", ""),
|
||||
"email": account_data.get("email", ""),
|
||||
"phone": account_data.get("phone", ""),
|
||||
"full_name": account_data.get("full_name", ""),
|
||||
"created_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
|
||||
# Account zur Datenbank hinzufügen
|
||||
account_id = self.db_manager.add_account(account)
|
||||
logger.info(f"Account in Datenbank gespeichert: {account['username']} (ID: {account_id})")
|
||||
|
||||
# Fingerprint für neuen Account generieren
|
||||
if account_id and account_id > 0:
|
||||
logger.info(f"Generiere Fingerprint für neuen Account {account_id}")
|
||||
fingerprint_id = self.fingerprint_generator.execute(account_id)
|
||||
if fingerprint_id:
|
||||
logger.info(f"Fingerprint {fingerprint_id} wurde Account {account_id} zugewiesen")
|
||||
else:
|
||||
logger.warning(f"Konnte keinen Fingerprint für Account {account_id} generieren")
|
||||
|
||||
# Erfolgsmeldung anzeigen
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"Erfolg",
|
||||
f"Account erfolgreich erstellt!\n\nBenutzername: {account['username']}\nPasswort: {account['password']}\nE-Mail/Telefon: {account['email'] or account['phone']}"
|
||||
)
|
||||
|
||||
def load_accounts(self, platform=None):
|
||||
"""Lädt Accounts aus der Datenbank."""
|
||||
try:
|
||||
if platform and hasattr(self.db_manager, "get_accounts_by_platform"):
|
||||
accounts = self.db_manager.get_accounts_by_platform(platform.lower())
|
||||
else:
|
||||
accounts = self.db_manager.get_all_accounts()
|
||||
if platform:
|
||||
accounts = [acc for acc in accounts if acc.get("platform", "").lower() == platform.lower()]
|
||||
|
||||
return accounts
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden der Accounts: {e}")
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Fehler beim Laden der Accounts:\n{str(e)}"
|
||||
)
|
||||
return []
|
||||
|
||||
def export_accounts(self, platform=None, accounts_to_export=None):
|
||||
"""Exportiert Accounts in eine CSV-Datei."""
|
||||
parent = self.parent_view or None
|
||||
|
||||
# Dialog für Format-Auswahl
|
||||
from PyQt5.QtWidgets import QDialog, QVBoxLayout, QRadioButton, QPushButton, QCheckBox, QDialogButtonBox
|
||||
|
||||
dialog = QDialog(parent)
|
||||
dialog.setWindowTitle("Export-Optionen")
|
||||
dialog.setMinimumWidth(300)
|
||||
|
||||
layout = QVBoxLayout(dialog)
|
||||
|
||||
# Format-Auswahl
|
||||
csv_radio = QRadioButton("CSV Format (Excel-kompatibel)")
|
||||
csv_radio.setChecked(True)
|
||||
json_radio = QRadioButton("JSON Format")
|
||||
|
||||
layout.addWidget(csv_radio)
|
||||
layout.addWidget(json_radio)
|
||||
|
||||
# Passwort-Option
|
||||
include_passwords = QCheckBox("Passwörter einschließen")
|
||||
include_passwords.setChecked(True)
|
||||
layout.addWidget(include_passwords)
|
||||
|
||||
# Buttons
|
||||
buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
|
||||
buttons.accepted.connect(dialog.accept)
|
||||
buttons.rejected.connect(dialog.reject)
|
||||
layout.addWidget(buttons)
|
||||
|
||||
if dialog.exec_() != QDialog.Accepted:
|
||||
return
|
||||
|
||||
# Format bestimmen
|
||||
format = 'csv' if csv_radio.isChecked() else 'json'
|
||||
file_extension = '*.csv' if format == 'csv' else '*.json'
|
||||
file_filter = f"{format.upper()}-Dateien ({file_extension});;Alle Dateien (*)"
|
||||
|
||||
# Dateiname vorschlagen
|
||||
suggested_filename = self.export_use_case.get_export_filename(platform, format)
|
||||
|
||||
file_path, _ = QFileDialog.getSaveFileName(
|
||||
parent,
|
||||
"Konten exportieren",
|
||||
suggested_filename,
|
||||
file_filter
|
||||
)
|
||||
|
||||
if not file_path:
|
||||
return
|
||||
|
||||
try:
|
||||
# Export durchführen mit Use Case
|
||||
if accounts_to_export:
|
||||
# Wenn spezifische Accounts übergeben wurden
|
||||
export_data = self.export_use_case.execute_with_accounts(
|
||||
accounts=accounts_to_export,
|
||||
format=format,
|
||||
include_passwords=include_passwords.isChecked()
|
||||
)
|
||||
else:
|
||||
# Standard-Export basierend auf Platform
|
||||
export_data = self.export_use_case.execute(
|
||||
platform=platform,
|
||||
format=format,
|
||||
include_passwords=include_passwords.isChecked()
|
||||
)
|
||||
|
||||
if not export_data:
|
||||
QMessageBox.warning(
|
||||
parent,
|
||||
"Keine Daten",
|
||||
"Es wurden keine Accounts zum Exportieren gefunden."
|
||||
)
|
||||
return
|
||||
|
||||
# Datei schreiben
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(export_data)
|
||||
|
||||
logger.info(f"Accounts erfolgreich nach {file_path} exportiert")
|
||||
|
||||
if parent:
|
||||
QMessageBox.information(
|
||||
parent,
|
||||
"Export erfolgreich",
|
||||
f"Konten wurden erfolgreich nach {file_path} exportiert."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Exportieren der Accounts: {e}")
|
||||
if parent:
|
||||
QMessageBox.critical(
|
||||
parent,
|
||||
"Export fehlgeschlagen",
|
||||
f"Fehler beim Exportieren der Konten:\n{str(e)}"
|
||||
)
|
||||
|
||||
def delete_account(self, account_id):
|
||||
"""Löscht einen Account aus der Datenbank."""
|
||||
try:
|
||||
success = self.db_manager.delete_account(account_id)
|
||||
|
||||
if not success:
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Konto mit ID {account_id} konnte nicht gelöscht werden."
|
||||
)
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Löschen des Accounts: {e}")
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Fehler beim Löschen des Kontos:\n{str(e)}"
|
||||
)
|
||||
return False
|
||||
402
controllers/main_controller.py
Normale Datei
402
controllers/main_controller.py
Normale Datei
@ -0,0 +1,402 @@
|
||||
"""
|
||||
Hauptcontroller für die Social Media Account Generator Anwendung.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from PyQt5.QtWidgets import QMessageBox, QApplication
|
||||
|
||||
from views.main_window import MainWindow
|
||||
from views.dialogs.license_activation_dialog import LicenseActivationDialog
|
||||
from controllers.platform_controllers.instagram_controller import InstagramController
|
||||
from controllers.platform_controllers.tiktok_controller import TikTokController
|
||||
from controllers.account_controller import AccountController
|
||||
from controllers.settings_controller import SettingsController
|
||||
from controllers.session_controller import SessionController
|
||||
|
||||
from database.db_manager import DatabaseManager
|
||||
from utils.proxy_rotator import ProxyRotator
|
||||
from utils.email_handler import EmailHandler
|
||||
from utils.theme_manager import ThemeManager
|
||||
from localization.language_manager import LanguageManager
|
||||
from licensing.license_manager import LicenseManager
|
||||
from updates.update_checker import UpdateChecker
|
||||
|
||||
logger = logging.getLogger("main")
|
||||
|
||||
class MainController:
|
||||
"""Hauptcontroller, der die Anwendung koordiniert."""
|
||||
|
||||
def __init__(self, app):
|
||||
# QApplication Referenz speichern
|
||||
self.app = app
|
||||
|
||||
# Theme Manager initialisieren
|
||||
self.theme_manager = ThemeManager(app)
|
||||
|
||||
# Language Manager initialisieren
|
||||
self.language_manager = LanguageManager(app)
|
||||
|
||||
# Lizenz Manager als erstes initialisieren
|
||||
self.license_manager = LicenseManager()
|
||||
|
||||
# Lizenz prüfen bevor andere Komponenten geladen werden
|
||||
if not self._check_and_activate_license():
|
||||
logger.error("Keine gültige Lizenz - Anwendung wird beendet")
|
||||
sys.exit(1)
|
||||
|
||||
# Modelle initialisieren
|
||||
self.db_manager = DatabaseManager()
|
||||
self.proxy_rotator = ProxyRotator()
|
||||
self.email_handler = EmailHandler()
|
||||
self.update_checker = UpdateChecker(self.license_manager.api_client)
|
||||
|
||||
# Haupt-View erstellen
|
||||
self.view = MainWindow(self.theme_manager, self.language_manager, self.db_manager)
|
||||
|
||||
# Untercontroller erstellen
|
||||
self.account_controller = AccountController(self.db_manager)
|
||||
self.account_controller.set_parent_view(self.view)
|
||||
self.settings_controller = SettingsController(
|
||||
self.proxy_rotator,
|
||||
self.email_handler,
|
||||
self.license_manager
|
||||
)
|
||||
self.session_controller = SessionController(self.db_manager)
|
||||
|
||||
# Plattform-Controller initialisieren
|
||||
self.platform_controllers = {}
|
||||
|
||||
# Instagram Controller hinzufügen
|
||||
instagram_controller = InstagramController(
|
||||
self.db_manager,
|
||||
self.proxy_rotator,
|
||||
self.email_handler,
|
||||
self.language_manager
|
||||
)
|
||||
# Signal für Rückkehr zur Hauptseite verbinden
|
||||
instagram_controller.return_to_main_requested = lambda: self.show_platform_selector_and_reset()
|
||||
# SessionController referenz hinzufügen
|
||||
instagram_controller.session_controller = self.session_controller
|
||||
self.platform_controllers["instagram"] = instagram_controller
|
||||
|
||||
# TikTok Controller hinzufügen
|
||||
tiktok_controller = TikTokController(
|
||||
self.db_manager,
|
||||
self.proxy_rotator,
|
||||
self.email_handler,
|
||||
self.language_manager
|
||||
)
|
||||
# Signal für Rückkehr zur Hauptseite verbinden
|
||||
tiktok_controller.return_to_main_requested = lambda: self.show_platform_selector_and_reset()
|
||||
# SessionController referenz hinzufügen
|
||||
tiktok_controller.session_controller = self.session_controller
|
||||
self.platform_controllers["tiktok"] = tiktok_controller
|
||||
|
||||
# X (Twitter) Controller hinzufügen
|
||||
from controllers.platform_controllers.x_controller import XController
|
||||
x_controller = XController(
|
||||
self.db_manager,
|
||||
self.proxy_rotator,
|
||||
self.email_handler,
|
||||
self.language_manager
|
||||
)
|
||||
# Signal für Rückkehr zur Hauptseite verbinden
|
||||
x_controller.return_to_main_requested = lambda: self.show_platform_selector_and_reset()
|
||||
# SessionController referenz hinzufügen
|
||||
x_controller.session_controller = self.session_controller
|
||||
self.platform_controllers["x"] = x_controller
|
||||
|
||||
# Gmail Controller hinzufügen
|
||||
from controllers.platform_controllers.gmail_controller import GmailController
|
||||
gmail_controller = GmailController(
|
||||
self.db_manager,
|
||||
self.proxy_rotator,
|
||||
self.email_handler,
|
||||
self.language_manager
|
||||
)
|
||||
# Signal für Rückkehr zur Hauptseite verbinden
|
||||
gmail_controller.return_to_main_requested = lambda: self.show_platform_selector_and_reset()
|
||||
# SessionController referenz hinzufügen
|
||||
gmail_controller.session_controller = self.session_controller
|
||||
self.platform_controllers["gmail"] = gmail_controller
|
||||
|
||||
# Hier können in Zukunft weitere Controller hinzugefügt werden:
|
||||
# self.platform_controllers["facebook"] = FacebookController(...)
|
||||
|
||||
# Signals verbinden
|
||||
self.connect_signals()
|
||||
|
||||
# Platform Selector Signal-Verbindungen
|
||||
if hasattr(self.view.platform_selector, 'export_requested'):
|
||||
self.view.platform_selector.export_requested.connect(
|
||||
lambda accounts: self.account_controller.export_accounts(None, accounts)
|
||||
)
|
||||
|
||||
if hasattr(self.view.platform_selector, 'login_requested'):
|
||||
self.view.platform_selector.login_requested.connect(
|
||||
self.session_controller.perform_one_click_login
|
||||
)
|
||||
|
||||
# Session-Status-Update Signal entfernt (Session-Funktionalität deaktiviert)
|
||||
|
||||
# Login-Result Signals verbinden
|
||||
self.session_controller.login_successful.connect(self._on_login_successful)
|
||||
self.session_controller.login_failed.connect(self._on_login_failed)
|
||||
|
||||
# Session starten
|
||||
self._start_license_session()
|
||||
|
||||
# Auf Updates prüfen
|
||||
self.check_for_updates()
|
||||
|
||||
# Hauptfenster anzeigen
|
||||
self.view.show()
|
||||
|
||||
def _on_login_successful(self, account_id: str, session_data: dict):
|
||||
"""Behandelt erfolgreiches Login"""
|
||||
# GEÄNDERT: Kein Popup mehr - User sieht Erfolg direkt im Browser
|
||||
try:
|
||||
account = self.db_manager.get_account(int(account_id))
|
||||
username = account.get('username', 'Unknown') if account else 'Unknown'
|
||||
platform = account.get('platform', 'Unknown') if account else 'Unknown'
|
||||
|
||||
logger.info(f"Login erfolgreich für Account {account_id} ({username}) - Browser bleibt offen")
|
||||
# Popup entfernt - User hat direktes Feedback über Browser-Status
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error showing success message: {e}")
|
||||
|
||||
def _on_login_failed(self, account_id: str, error_message: str):
|
||||
"""Behandelt fehlgeschlagenes Login"""
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
|
||||
# Account-Details für die Nachricht holen
|
||||
try:
|
||||
account = self.db_manager.get_account(int(account_id))
|
||||
username = account.get('username', 'Unknown') if account else 'Unknown'
|
||||
|
||||
msg = QMessageBox(self.view)
|
||||
msg.setIcon(QMessageBox.Warning)
|
||||
msg.setWindowTitle("Login Fehlgeschlagen")
|
||||
msg.setText(f"Ein-Klick-Login fehlgeschlagen!")
|
||||
msg.setInformativeText(f"Account: {username}\nFehler: {error_message}")
|
||||
msg.setStandardButtons(QMessageBox.Ok)
|
||||
msg.exec_()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error showing failure message: {e}")
|
||||
|
||||
def connect_signals(self):
|
||||
"""Verbindet alle Signale mit den entsprechenden Slots."""
|
||||
# Plattformauswahl-Signal verbinden
|
||||
self.view.platform_selected.connect(self.on_platform_selected)
|
||||
|
||||
# Zurück-Button verbinden
|
||||
self.view.back_to_selector_requested.connect(self.show_platform_selector)
|
||||
|
||||
# Theme-Toggle verbinden
|
||||
self.view.theme_toggled.connect(self.on_theme_toggled)
|
||||
|
||||
def on_platform_selected(self, platform: str):
|
||||
"""Wird aufgerufen, wenn eine Plattform ausgewählt wird."""
|
||||
logger.info(f"Plattform ausgewählt: {platform}")
|
||||
|
||||
# Aktuelle Plattform setzen
|
||||
self.current_platform = platform.lower()
|
||||
|
||||
# Prüfen, ob die Plattform unterstützt wird
|
||||
if self.current_platform not in self.platform_controllers:
|
||||
logger.error(f"Plattform '{platform}' wird nicht unterstützt")
|
||||
QMessageBox.critical(
|
||||
self.view,
|
||||
"Nicht unterstützt",
|
||||
f"Die Plattform '{platform}' ist noch nicht implementiert."
|
||||
)
|
||||
return
|
||||
|
||||
# Plattformspezifischen Controller abrufen
|
||||
platform_controller = self.platform_controllers.get(self.current_platform)
|
||||
|
||||
# Plattform-View initialisieren
|
||||
self.view.init_platform_ui(platform, platform_controller)
|
||||
|
||||
# Tab-Hooks verbinden
|
||||
self.connect_tab_hooks(platform_controller)
|
||||
|
||||
# Plattformspezifische Ansicht anzeigen
|
||||
self.view.show_platform_ui()
|
||||
|
||||
def on_theme_toggled(self):
|
||||
"""Wird aufgerufen, wenn das Theme gewechselt wird."""
|
||||
if self.theme_manager:
|
||||
theme_name = self.theme_manager.get_current_theme()
|
||||
logger.info(f"Theme gewechselt zu: {theme_name}")
|
||||
|
||||
# Hier kann zusätzliche Logik für Theme-Wechsel hinzugefügt werden
|
||||
# z.B. UI-Elemente aktualisieren, die nicht automatisch aktualisiert werden
|
||||
|
||||
def connect_tab_hooks(self, platform_controller):
|
||||
"""Verbindet die Tab-Hooks mit dem Plattform-Controller."""
|
||||
# Generator-Tab-Hooks
|
||||
# HINWEIS: account_created Signal ist nicht mehr verbunden, da Accounts
|
||||
# jetzt über SessionController mit Clean Architecture gespeichert werden
|
||||
if hasattr(platform_controller, "get_generator_tab"):
|
||||
generator_tab = platform_controller.get_generator_tab()
|
||||
# generator_tab.account_created.connect(self.account_controller.on_account_created) # Deaktiviert
|
||||
|
||||
|
||||
# Einstellungen-Tab-Hooks
|
||||
if hasattr(platform_controller, "get_settings_tab"):
|
||||
settings_tab = platform_controller.get_settings_tab()
|
||||
settings_tab.proxy_settings_saved.connect(self.settings_controller.save_proxy_settings)
|
||||
settings_tab.proxy_tested.connect(self.settings_controller.test_proxy)
|
||||
settings_tab.email_settings_saved.connect(self.settings_controller.save_email_settings)
|
||||
settings_tab.email_tested.connect(self.settings_controller.test_email)
|
||||
settings_tab.license_activated.connect(self.settings_controller.activate_license)
|
||||
|
||||
def show_platform_selector(self):
|
||||
"""Zeigt den Plattform-Selektor an."""
|
||||
logger.info("Zurück zur Plattformauswahl")
|
||||
self.view.show_platform_selector()
|
||||
if hasattr(self.view, "platform_selector"):
|
||||
self.view.platform_selector.load_accounts()
|
||||
|
||||
def show_platform_selector_and_reset(self):
|
||||
"""Zeigt den Plattform-Selektor an und setzt die Eingabefelder zurück."""
|
||||
logger.info("Zurück zur Plattformauswahl mit Reset der Eingabefelder")
|
||||
|
||||
# Eingabefelder des aktuellen Platform-Controllers zurücksetzen
|
||||
if hasattr(self, 'current_platform') and self.current_platform in self.platform_controllers:
|
||||
controller = self.platform_controllers[self.current_platform]
|
||||
if hasattr(controller, '_generator_tab') and controller._generator_tab:
|
||||
# Tab auf None setzen, damit beim nächsten Öffnen ein neuer erstellt wird
|
||||
controller._generator_tab = None
|
||||
|
||||
# Zur Plattformauswahl zurückkehren
|
||||
self.show_platform_selector()
|
||||
|
||||
def _check_and_activate_license(self):
|
||||
"""
|
||||
Prüft die Lizenz und zeigt den Aktivierungsdialog wenn nötig.
|
||||
|
||||
Returns:
|
||||
True wenn Lizenz gültig, False wenn Benutzer abbricht
|
||||
"""
|
||||
# Versuche Session fortzusetzen
|
||||
if self.license_manager.resume_session():
|
||||
logger.info("Bestehende Session fortgesetzt")
|
||||
return True
|
||||
|
||||
# Prüfe ob Lizenz vorhanden ist
|
||||
if self.license_manager.is_licensed():
|
||||
# Starte neue Session
|
||||
if self.license_manager.start_session():
|
||||
logger.info("Neue Session gestartet")
|
||||
return True
|
||||
else:
|
||||
logger.error("Session konnte nicht gestartet werden")
|
||||
# Zeige Fehlermeldung statt Aktivierungsdialog
|
||||
# Hole detaillierte Fehlermeldung
|
||||
session_result = self.license_manager.session_manager.start_session(
|
||||
self.license_manager.license_data["key"],
|
||||
self.license_manager.license_data.get("activation_id")
|
||||
)
|
||||
error_msg = session_result.get("error", "Unbekannter Fehler")
|
||||
|
||||
QMessageBox.critical(
|
||||
None,
|
||||
"Session-Fehler",
|
||||
f"Die Lizenz ist gültig, aber es konnte keine Session gestartet werden.\n\n"
|
||||
f"Grund: {error_msg}\n\n"
|
||||
"Mögliche Lösungen:\n"
|
||||
"- Schließen Sie andere laufende Instanzen\n"
|
||||
"- Warten Sie einen Moment und versuchen Sie es erneut\n"
|
||||
"- Kontaktieren Sie den Support",
|
||||
QMessageBox.Ok
|
||||
)
|
||||
return False
|
||||
|
||||
# Keine gültige Lizenz - zeige Aktivierungsdialog
|
||||
logger.info("Keine gültige Lizenz gefunden - zeige Aktivierungsdialog")
|
||||
|
||||
dialog = LicenseActivationDialog(self.license_manager)
|
||||
dialog.activation_successful.connect(self._on_license_activated)
|
||||
|
||||
result = dialog.exec_()
|
||||
return result == dialog.Accepted
|
||||
|
||||
def _on_license_activated(self):
|
||||
"""Wird aufgerufen wenn Lizenz erfolgreich aktiviert wurde."""
|
||||
logger.info("Lizenz wurde erfolgreich aktiviert")
|
||||
|
||||
def _start_license_session(self):
|
||||
"""Startet die Lizenz-Session für die laufende Anwendung."""
|
||||
if not self.license_manager.session_manager.is_session_active():
|
||||
if self.license_manager.is_licensed():
|
||||
self.license_manager.start_session()
|
||||
|
||||
def check_license(self):
|
||||
"""Überprüft den Lizenzstatus (für UI Updates)."""
|
||||
is_licensed = self.license_manager.is_licensed()
|
||||
license_info = self.license_manager.get_license_info()
|
||||
status_text = self.license_manager.get_status_text()
|
||||
|
||||
# UI kann hier aktualisiert werden basierend auf Lizenzstatus
|
||||
logger.info(f"Lizenzstatus: {status_text}")
|
||||
|
||||
return is_licensed
|
||||
|
||||
def check_for_updates(self):
|
||||
"""Prüft auf Updates."""
|
||||
try:
|
||||
# Mit Lizenzschlüssel prüfen wenn vorhanden
|
||||
license_key = self.license_manager.get_license_info().get("key")
|
||||
update_info = self.update_checker.check_for_updates(license_key=license_key)
|
||||
|
||||
if update_info["has_update"]:
|
||||
reply = QMessageBox.question(
|
||||
self.view,
|
||||
"Update verfügbar",
|
||||
f"Eine neue Version ist verfügbar: {update_info['latest_version']}\n"
|
||||
f"(Aktuelle Version: {update_info['current_version']})\n\n"
|
||||
f"Release-Datum: {update_info['release_date']}\n"
|
||||
f"Release-Notes:\n{update_info['release_notes']}\n\n"
|
||||
"Möchten Sie das Update jetzt herunterladen?",
|
||||
QMessageBox.Yes | QMessageBox.No,
|
||||
QMessageBox.Yes
|
||||
)
|
||||
|
||||
if reply == QMessageBox.Yes:
|
||||
self.download_update(update_info)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Update-Prüfung: {e}")
|
||||
|
||||
def download_update(self, update_info):
|
||||
"""Lädt ein Update herunter."""
|
||||
try:
|
||||
download_result = self.update_checker.download_update(
|
||||
update_info["download_url"],
|
||||
update_info["latest_version"]
|
||||
)
|
||||
|
||||
if download_result["success"]:
|
||||
QMessageBox.information(
|
||||
self.view,
|
||||
"Download erfolgreich",
|
||||
f"Update wurde heruntergeladen: {download_result['file_path']}\n\n"
|
||||
"Bitte schließen Sie die Anwendung und führen Sie das Update aus."
|
||||
)
|
||||
else:
|
||||
QMessageBox.warning(
|
||||
self.view,
|
||||
"Download fehlgeschlagen",
|
||||
f"Fehler beim Herunterladen des Updates:\n{download_result['error']}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Herunterladen des Updates: {e}")
|
||||
QMessageBox.critical(
|
||||
self.view,
|
||||
"Fehler",
|
||||
f"Fehler beim Herunterladen des Updates:\n{str(e)}"
|
||||
)
|
||||
0
controllers/platform_controllers/__init__.py
Normale Datei
0
controllers/platform_controllers/__init__.py
Normale Datei
265
controllers/platform_controllers/base_controller.py
Normale Datei
265
controllers/platform_controllers/base_controller.py
Normale Datei
@ -0,0 +1,265 @@
|
||||
"""
|
||||
Basis-Controller für Plattform-spezifische Funktionalität.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from PyQt5.QtCore import QObject
|
||||
from typing import Dict, Any, Optional, Tuple
|
||||
import random
|
||||
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
from views.tabs.accounts_tab import AccountsTab
|
||||
# SettingsTab import entfernt - wird nicht mehr verwendet
|
||||
|
||||
class BasePlatformController(QObject):
|
||||
"""Basis-Controller-Klasse für Plattformspezifische Logik."""
|
||||
|
||||
# Konstanten auf Klassen-Ebene
|
||||
MOBILE_PROBABILITY = {
|
||||
'instagram': 0.7,
|
||||
'tiktok': 0.9,
|
||||
'facebook': 0.5
|
||||
}
|
||||
|
||||
MIN_AGE = 13
|
||||
DEFAULT_EMAIL_DOMAIN = "z5m7q9dk3ah2v1plx6ju.com"
|
||||
|
||||
def __init__(self, platform_name, db_manager, proxy_rotator, email_handler, language_manager=None):
|
||||
super().__init__()
|
||||
self.platform_name = platform_name
|
||||
self.logger = logging.getLogger(f"{platform_name.lower()}_controller")
|
||||
|
||||
# Modelle
|
||||
self.db_manager = db_manager
|
||||
self.proxy_rotator = proxy_rotator
|
||||
self.email_handler = email_handler
|
||||
self.language_manager = language_manager
|
||||
|
||||
# Tabs
|
||||
self._generator_tab = None
|
||||
self._accounts_tab = None
|
||||
|
||||
# Worker Thread
|
||||
self.worker_thread = None
|
||||
|
||||
# Optional: Session Controller (Clean Architecture)
|
||||
self.session_controller = None
|
||||
|
||||
# Optional: Forge Dialog
|
||||
self.forge_dialog = None
|
||||
|
||||
# Plattformspezifische Initialisierungen
|
||||
self.init_platform()
|
||||
|
||||
def set_tabs(self, generator_tab, accounts_tab):
|
||||
"""
|
||||
Setzt die Tab-Referenzen.
|
||||
|
||||
Args:
|
||||
generator_tab: Generator-Tab
|
||||
accounts_tab: Accounts-Tab
|
||||
"""
|
||||
self._generator_tab = generator_tab
|
||||
self._accounts_tab = accounts_tab
|
||||
|
||||
def init_platform(self):
|
||||
"""
|
||||
Initialisiert plattformspezifische Komponenten.
|
||||
Diese Methode sollte von Unterklassen überschrieben werden.
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_generator_tab(self):
|
||||
"""Gibt den Generator-Tab zurück oder erstellt ihn bei Bedarf."""
|
||||
if not self._generator_tab:
|
||||
self._generator_tab = self.create_generator_tab()
|
||||
return self._generator_tab
|
||||
|
||||
def get_accounts_tab(self):
|
||||
"""Gibt den Accounts-Tab zurück oder erstellt ihn bei Bedarf."""
|
||||
if not self._accounts_tab:
|
||||
self._accounts_tab = self.create_accounts_tab()
|
||||
return self._accounts_tab
|
||||
|
||||
# get_settings_tab Methode entfernt - Settings-Tab wird nicht mehr verwendet
|
||||
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""
|
||||
Erstellt den Generator-Tab.
|
||||
Diese Methode sollte von Unterklassen überschrieben werden.
|
||||
"""
|
||||
return GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
def create_accounts_tab(self):
|
||||
"""
|
||||
Erstellt den Accounts-Tab.
|
||||
Diese Methode sollte von Unterklassen überschrieben werden.
|
||||
"""
|
||||
return AccountsTab(self.platform_name, self.db_manager, self.language_manager)
|
||||
|
||||
# create_settings_tab Methode entfernt - Settings-Tab wird nicht mehr verwendet
|
||||
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""
|
||||
Startet die Account-Erstellung.
|
||||
Diese Methode sollte von Unterklassen überschrieben werden.
|
||||
|
||||
Args:
|
||||
params: Parameter für die Account-Erstellung
|
||||
"""
|
||||
self.logger.info(f"Account-Erstellung für {self.platform_name} gestartet")
|
||||
# In Unterklassen implementieren
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
"""
|
||||
Validiert die Eingaben für die Account-Erstellung.
|
||||
|
||||
Args:
|
||||
inputs: Eingaben für die Account-Erstellung
|
||||
|
||||
Returns:
|
||||
(bool, str): (Ist gültig, Fehlermeldung falls nicht gültig)
|
||||
"""
|
||||
# Basis-Validierungen
|
||||
if not inputs.get("full_name"):
|
||||
return False, "Bitte geben Sie einen vollständigen Namen ein."
|
||||
|
||||
# Alter prüfen
|
||||
age_text = inputs.get("age_text", "")
|
||||
if not age_text:
|
||||
return False, "Bitte geben Sie ein Alter ein."
|
||||
|
||||
# Alter muss eine Zahl sein
|
||||
try:
|
||||
age = int(age_text)
|
||||
inputs["age"] = age # Füge das konvertierte Alter zu den Parametern hinzu
|
||||
except ValueError:
|
||||
return False, "Das Alter muss eine ganze Zahl sein."
|
||||
|
||||
# Alter-Bereich prüfen
|
||||
if age < 13 or age > 99:
|
||||
return False, "Das Alter muss zwischen 13 und 99 liegen."
|
||||
|
||||
# Telefonnummer prüfen, falls erforderlich
|
||||
if inputs.get("registration_method") == "phone" and not inputs.get("phone_number"):
|
||||
return False, "Telefonnummer erforderlich für Registrierung via Telefon."
|
||||
|
||||
return True, ""
|
||||
|
||||
def _determine_profile_type(self) -> str:
|
||||
"""Bestimmt den Profil-Typ basierend auf Platform-Wahrscheinlichkeiten"""
|
||||
mobile_prob = self.MOBILE_PROBABILITY.get(self.platform_name.lower(), 0.5)
|
||||
return 'mobile' if random.random() < mobile_prob else 'desktop'
|
||||
|
||||
def _generate_fingerprint_for_platform(self) -> Optional[Any]:
|
||||
"""Generiert Fingerprint mit Fehlerbehandlung"""
|
||||
try:
|
||||
if not hasattr(self, 'session_controller') or not self.session_controller:
|
||||
return None
|
||||
|
||||
profile_type = self._determine_profile_type()
|
||||
|
||||
# Prüfe ob fingerprint_service existiert
|
||||
if hasattr(self.session_controller, 'fingerprint_service'):
|
||||
return self.session_controller.fingerprint_service.generate_fingerprint(
|
||||
profile_type=profile_type,
|
||||
platform=self.platform_name.lower()
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Fingerprint-Generierung fehlgeschlagen: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def _setup_ui_for_creation(self):
|
||||
"""Bereitet die UI für die Account-Erstellung vor"""
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
def _connect_worker_signals(self):
|
||||
"""Verbindet Worker-Signals mit UI-Elementen"""
|
||||
if not self.worker_thread:
|
||||
return
|
||||
|
||||
generator_tab = self.get_generator_tab()
|
||||
|
||||
# Forge-Dialog Signals
|
||||
if self.forge_dialog:
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Generator-Tab Signals (Backup)
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
# Error und Finished Handling
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
|
||||
def _show_forge_dialog(self):
|
||||
"""Zeigt den Forge-Animation Dialog"""
|
||||
try:
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
|
||||
generator_tab = self.get_generator_tab()
|
||||
parent_widget = generator_tab.window()
|
||||
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget)
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Konnte Forge-Dialog nicht anzeigen: {e}")
|
||||
|
||||
def _handle_error(self, error_msg: str):
|
||||
"""Gemeinsame Fehlerbehandlung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Fehler anzeigen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.show_error(error_msg)
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_finished(self, result: dict):
|
||||
"""Gemeinsame Behandlung bei Abschluss"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Normale Verarbeitung
|
||||
self.handle_account_created(result)
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die Account-Erstellung"""
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.add_log(f"{self.platform_name}-Account-Erstellung wurde abgebrochen")
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Forge-Dialog schließen falls vorhanden
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
def handle_account_created(self, result):
|
||||
"""
|
||||
Verarbeitet erfolgreich erstellte Accounts.
|
||||
Sollte von Unterklassen überschrieben werden für spezifische Logik.
|
||||
"""
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
# Standard-Implementierung - kann von Unterklassen erweitert werden
|
||||
self.logger.info(f"Account erfolgreich erstellt für {self.platform_name}")
|
||||
217
controllers/platform_controllers/base_worker_thread.py
Normale Datei
217
controllers/platform_controllers/base_worker_thread.py
Normale Datei
@ -0,0 +1,217 @@
|
||||
"""
|
||||
Basis-Klasse für alle Platform Worker Threads zur Eliminierung von Code-Duplikation
|
||||
"""
|
||||
from abc import ABC, abstractmethod
|
||||
from PyQt5.QtCore import QThread, pyqtSignal
|
||||
from typing import Dict, Any, Optional
|
||||
from utils.text_similarity import TextSimilarity
|
||||
from domain.value_objects.browser_protection_style import BrowserProtectionStyle, ProtectionLevel
|
||||
import traceback
|
||||
|
||||
|
||||
class BaseAccountCreationWorkerThread(QThread):
|
||||
"""Basis-Klasse für alle Platform Worker Threads"""
|
||||
|
||||
# Signals MÜSSEN identisch zu bestehenden sein
|
||||
update_signal = pyqtSignal(str)
|
||||
log_signal = pyqtSignal(str)
|
||||
progress_signal = pyqtSignal(int)
|
||||
finished_signal = pyqtSignal(dict)
|
||||
error_signal = pyqtSignal(str)
|
||||
|
||||
def __init__(self, params: Dict[str, Any], platform_name: str,
|
||||
session_controller: Optional[Any] = None,
|
||||
generator_tab: Optional[Any] = None):
|
||||
super().__init__()
|
||||
self.params = params
|
||||
self.platform_name = platform_name
|
||||
self.session_controller = session_controller
|
||||
self.generator_tab = generator_tab
|
||||
self.running = True
|
||||
|
||||
# TextSimilarity für robustes Fehler-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.8)
|
||||
|
||||
# Platform-spezifische Error-Patterns (überschreibbar)
|
||||
self.error_interpretations = self.get_error_interpretations()
|
||||
|
||||
@abstractmethod
|
||||
def get_automation_class(self):
|
||||
"""Muss von Subklassen implementiert werden"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
"""Platform-spezifische Fehlerinterpretationen"""
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Gemeinsame Logik für Account-Erstellung - IDENTISCH zum Original"""
|
||||
try:
|
||||
self.update_signal.emit("Status: Initialisierung...")
|
||||
self.log_signal.emit(f"{self.platform_name}-Account-Erstellung gestartet...")
|
||||
self.progress_signal.emit(10)
|
||||
|
||||
# Automation-Klasse dynamisch laden
|
||||
AutomationClass = self.get_automation_class()
|
||||
|
||||
# WICHTIG: Exakt gleiche Parameter wie im Original
|
||||
# Prüfe ob die Automation-Klasse die neuen Parameter unterstützt
|
||||
automation_params = {
|
||||
# Unterstütze beide Parameter-Namen für Abwärtskompatibilität
|
||||
"headless": self.params.get("headless", not self.params.get("show_browser", False)),
|
||||
"proxy_type": self.params.get("proxy_type", "NoProxy")
|
||||
}
|
||||
|
||||
# Optionale Parameter nur hinzufügen wenn unterstützt
|
||||
import inspect
|
||||
init_signature = inspect.signature(AutomationClass.__init__)
|
||||
param_names = list(init_signature.parameters.keys())
|
||||
|
||||
if "fingerprint" in param_names:
|
||||
automation_params["fingerprint"] = self.params.get("fingerprint")
|
||||
if "imap_handler" in param_names:
|
||||
automation_params["imap_handler"] = self.params.get("imap_handler")
|
||||
if "phone_service" in param_names:
|
||||
automation_params["phone_service"] = self.params.get("phone_service")
|
||||
if "use_proxy" in param_names:
|
||||
automation_params["use_proxy"] = self.params.get("use_proxy", False)
|
||||
if "save_screenshots" in param_names:
|
||||
automation_params["save_screenshots"] = True
|
||||
if "debug" in param_names:
|
||||
automation_params["debug"] = self.params.get("debug", False)
|
||||
if "email_domain" in param_names:
|
||||
automation_params["email_domain"] = self.params.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com")
|
||||
if "window_position" in param_names:
|
||||
automation_params["window_position"] = self.params.get("window_position")
|
||||
|
||||
automation = AutomationClass(**automation_params)
|
||||
|
||||
# Setze Callback für kundenfreundliche Logs
|
||||
automation.set_customer_log_callback(lambda msg: self.log_signal.emit(msg))
|
||||
|
||||
self.update_signal.emit(f"{self.platform_name}-Automation initialisiert")
|
||||
self.progress_signal.emit(20)
|
||||
|
||||
# Browser-Schutz wird jetzt direkt in base_automation.py nach Browser-Start angewendet
|
||||
|
||||
# Account registrieren
|
||||
self.log_signal.emit(f"Registriere Account für: {self.params['full_name']}")
|
||||
|
||||
# Account registrieren mit allen Original-Parametern
|
||||
# Erstelle saubere Parameter für register_account
|
||||
register_params = {
|
||||
"full_name": self.params["full_name"],
|
||||
"age": self.params["age"],
|
||||
"registration_method": self.params.get("registration_method", "email"),
|
||||
"email_domain": self.params.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com")
|
||||
}
|
||||
|
||||
# Füge optionale Parameter hinzu wenn vorhanden
|
||||
if "phone_number" in self.params:
|
||||
register_params["phone_number"] = self.params["phone_number"]
|
||||
|
||||
# Additional params separat behandeln
|
||||
if "additional_params" in self.params:
|
||||
register_params.update(self.params["additional_params"])
|
||||
|
||||
result = automation.register_account(**register_params)
|
||||
|
||||
if result["success"]:
|
||||
# Stelle sicher, dass die Datenstruktur kompatibel ist
|
||||
if "account_data" not in result:
|
||||
# Wenn account_data nicht existiert, erstelle es aus den Top-Level-Feldern
|
||||
result["account_data"] = {
|
||||
"username": result.get("username", ""),
|
||||
"password": result.get("password", ""),
|
||||
"email": result.get("email", ""),
|
||||
"phone": result.get("phone", "")
|
||||
}
|
||||
|
||||
result["fingerprint"] = self.params.get("fingerprint")
|
||||
self.log_signal.emit("Account erfolgreich erstellt!")
|
||||
self.finished_signal.emit(result)
|
||||
self.progress_signal.emit(100)
|
||||
|
||||
# Session-Speicherung wenn verfügbar
|
||||
self._save_session_if_available(result)
|
||||
else:
|
||||
error_msg = result.get("error", "Unbekannter Fehler")
|
||||
interpreted_error = self._interpret_error(error_msg)
|
||||
self.log_signal.emit(f"Fehler bei der Account-Erstellung: {interpreted_error}")
|
||||
self.error_signal.emit(interpreted_error)
|
||||
self.progress_signal.emit(0) # Reset progress on error
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
self.log_signal.emit(f"Schwerwiegender Fehler: {error_msg}")
|
||||
self.log_signal.emit(traceback.format_exc())
|
||||
|
||||
interpreted_error = self._interpret_error(error_msg)
|
||||
self.error_signal.emit(interpreted_error)
|
||||
self.progress_signal.emit(0) # Reset progress on error
|
||||
|
||||
def _interpret_error(self, error_message: str) -> str:
|
||||
"""Interpretiert Fehler mit Fuzzy-Matching"""
|
||||
error_lower = error_message.lower()
|
||||
|
||||
for pattern, interpretation in self.error_interpretations.items():
|
||||
if pattern in error_lower or self.text_similarity.is_similar(pattern, error_lower, threshold=0.8):
|
||||
return interpretation
|
||||
|
||||
return f"Fehler bei der Registrierung: {error_message}"
|
||||
|
||||
|
||||
def _save_session_if_available(self, result: Dict[str, Any]):
|
||||
"""Speichert Session wenn Controller verfügbar"""
|
||||
# Session über SessionController speichern wenn verfügbar
|
||||
if hasattr(self, 'session_controller') and self.session_controller:
|
||||
try:
|
||||
# Verwende den SessionController direkt für Clean Architecture
|
||||
if hasattr(self.session_controller, 'create_and_save_account'):
|
||||
# Account-Daten aus dem korrekten Pfad extrahieren
|
||||
if "account_data" in result:
|
||||
account_data = result["account_data"]
|
||||
else:
|
||||
account_data = {
|
||||
'username': result.get("username"),
|
||||
'password': result.get("password"),
|
||||
'email': result.get("email"),
|
||||
'phone': result.get("phone")
|
||||
}
|
||||
|
||||
save_result = self.session_controller.create_and_save_account(
|
||||
platform=self.platform_name,
|
||||
account_data=account_data
|
||||
)
|
||||
|
||||
if save_result.get('success'):
|
||||
self.log_signal.emit(f"Session erfolgreich gespeichert")
|
||||
else:
|
||||
self.log_signal.emit(f"Warnung: Session konnte nicht gespeichert werden")
|
||||
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Warnung: Session konnte nicht gespeichert werden: {e}")
|
||||
|
||||
# Alternativ: Signal an Generator Tab senden
|
||||
elif hasattr(self, 'generator_tab') and self.generator_tab:
|
||||
try:
|
||||
if hasattr(self.generator_tab, 'account_created'):
|
||||
# Account-Daten aus dem korrekten Pfad extrahieren
|
||||
if "account_data" in result:
|
||||
account_data = result["account_data"]
|
||||
else:
|
||||
account_data = {
|
||||
'username': result.get("username"),
|
||||
'password': result.get("password"),
|
||||
'email': result.get("email"),
|
||||
'phone': result.get("phone")
|
||||
}
|
||||
self.generator_tab.account_created.emit(self.platform_name, account_data)
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Warnung: Konnte Account-Daten nicht an UI senden: {e}")
|
||||
|
||||
def stop(self):
|
||||
"""Stoppt den Thread"""
|
||||
self.running = False
|
||||
self.terminate()
|
||||
244
controllers/platform_controllers/gmail_controller.py
Normale Datei
244
controllers/platform_controllers/gmail_controller.py
Normale Datei
@ -0,0 +1,244 @@
|
||||
"""
|
||||
Controller für Gmail/Google Account-spezifische Funktionalität
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from social_networks.gmail.gmail_automation import GmailAutomation
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("gmail_controller")
|
||||
|
||||
class GmailWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Worker-Thread für Gmail-Account-Erstellung"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
logger.info(f"[GMAIL WORKER] __init__ aufgerufen")
|
||||
logger.info(f"[GMAIL WORKER] params: {params}")
|
||||
logger.info(f"[GMAIL WORKER] session_controller: {session_controller}")
|
||||
logger.info(f"[GMAIL WORKER] generator_tab: {generator_tab}")
|
||||
super().__init__(params, "Gmail", session_controller, generator_tab)
|
||||
logger.info(f"[GMAIL WORKER] Initialisierung abgeschlossen")
|
||||
|
||||
def get_automation_class(self):
|
||||
"""Gibt die Gmail-Automation-Klasse zurück"""
|
||||
logger.info(f"[GMAIL WORKER] get_automation_class aufgerufen")
|
||||
logger.info(f"[GMAIL WORKER] Gebe zurück: {GmailAutomation}")
|
||||
return GmailAutomation
|
||||
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
"""Gmail-spezifische Fehlerinterpretationen"""
|
||||
return {
|
||||
"captcha": "Google hat ein Captcha angefordert. Bitte versuchen Sie es später erneut.",
|
||||
"phone": "Eine Telefonnummer ist zur Verifizierung erforderlich.",
|
||||
"age": "Sie müssen mindestens 13 Jahre alt sein.",
|
||||
"taken": "Diese E-Mail-Adresse ist bereits vergeben."
|
||||
}
|
||||
|
||||
|
||||
class GmailController(BasePlatformController):
|
||||
"""Controller für Gmail-Funktionalität"""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager, theme_manager=None):
|
||||
super().__init__("gmail", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
logger.info("Gmail Controller initialisiert")
|
||||
|
||||
def get_worker_thread_class(self):
|
||||
"""Gibt die Worker-Thread-Klasse für Gmail zurück"""
|
||||
return GmailWorkerThread
|
||||
|
||||
def get_platform_display_name(self) -> str:
|
||||
"""Gibt den Anzeigenamen der Plattform zurück"""
|
||||
return "Gmail"
|
||||
|
||||
def validate_account_data(self, account_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Validiert die Account-Daten für Gmail"""
|
||||
errors = []
|
||||
|
||||
# Pflichtfelder prüfen
|
||||
if not account_data.get("first_name"):
|
||||
errors.append("Vorname ist erforderlich")
|
||||
|
||||
if not account_data.get("last_name"):
|
||||
errors.append("Nachname ist erforderlich")
|
||||
|
||||
# Prüfe Geburtsdatum (muss mindestens 13 Jahre alt sein)
|
||||
if account_data.get("birthday"):
|
||||
from datetime import datetime
|
||||
try:
|
||||
birth_date = datetime.strptime(account_data["birthday"], "%Y-%m-%d")
|
||||
age = (datetime.now() - birth_date).days / 365.25
|
||||
if age < 13:
|
||||
errors.append("Sie müssen mindestens 13 Jahre alt sein")
|
||||
except:
|
||||
errors.append("Ungültiges Geburtsdatum")
|
||||
|
||||
if errors:
|
||||
return {
|
||||
"valid": False,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""Erstellt den Generator-Tab und verbindet die Signale"""
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
generator_tab = GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
# Signal verbinden
|
||||
generator_tab.start_requested.connect(self.start_account_creation)
|
||||
generator_tab.stop_requested.connect(self.stop_account_creation)
|
||||
|
||||
return generator_tab
|
||||
|
||||
def get_default_settings(self) -> Dict[str, Any]:
|
||||
"""Gibt die Standard-Einstellungen für Gmail zurück"""
|
||||
settings = super().get_default_settings()
|
||||
settings.update({
|
||||
"require_phone": False, # Optional, aber oft erforderlich
|
||||
"require_email": False, # Gmail erstellt die Email
|
||||
"min_age": 13,
|
||||
"supported_languages": ["de", "en", "es", "fr", "it", "pt", "ru"],
|
||||
"default_language": "de",
|
||||
"captcha_warning": True # Gmail verwendet oft Captchas
|
||||
})
|
||||
return settings
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die Gmail-Account-Erstellung."""
|
||||
logger.info(f"[GMAIL] start_account_creation aufgerufen")
|
||||
logger.info(f"[GMAIL] Parameter: {params}")
|
||||
logger.info(f"[GMAIL] Controller-Typ: {type(self)}")
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
parent_widget = generator_tab.window()
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "Gmail")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
fingerprint_data = fingerprint_service.generate_fingerprint()
|
||||
|
||||
fingerprint = BrowserFingerprint.from_dict(fingerprint_data)
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
logger.info(f"Fingerprint für Gmail Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
|
||||
# Worker-Thread starten
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
logger.info(f"[GMAIL] Erstelle Worker Thread...")
|
||||
logger.info(f"[GMAIL] session_controller: {session_controller}")
|
||||
logger.info(f"[GMAIL] generator_tab_ref: {generator_tab_ref}")
|
||||
|
||||
self.worker_thread = GmailWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
|
||||
logger.info(f"[GMAIL] Worker Thread erstellt: {self.worker_thread}")
|
||||
|
||||
# Signals verbinden
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(lambda result: self._handle_finished(result.get("success", False), result))
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
logger.info(f"[GMAIL] Starte Worker Thread...")
|
||||
self.worker_thread.start()
|
||||
logger.info(f"[GMAIL] Worker Thread gestartet!")
|
||||
|
||||
# Dialog anzeigen
|
||||
logger.info(f"[GMAIL] Zeige Forge Dialog...")
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
logger.info(f"[GMAIL] start_account_creation abgeschlossen")
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die laufende Account-Erstellung"""
|
||||
logger.info("[GMAIL] Stoppe Account-Erstellung")
|
||||
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
self.worker_thread.wait()
|
||||
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
|
||||
# UI zurücksetzen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_error(self, error_msg):
|
||||
"""Behandelt Fehler während der Account-Erstellung"""
|
||||
logger.error(f"[GMAIL] Fehler: {error_msg}")
|
||||
|
||||
# Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.show_error(f"Fehler: {error_msg}")
|
||||
|
||||
def _handle_finished(self, success, result_data):
|
||||
"""Behandelt das Ende der Account-Erstellung"""
|
||||
logger.info(f"[GMAIL] Account-Erstellung beendet. Erfolg: {success}")
|
||||
|
||||
# Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
if success:
|
||||
generator_tab.show_success("Gmail Account erfolgreich erstellt!")
|
||||
else:
|
||||
error_msg = result_data.get('error', 'Unbekannter Fehler')
|
||||
generator_tab.show_error(f"Fehler: {error_msg}")
|
||||
415
controllers/platform_controllers/instagram_controller.py
Normale Datei
415
controllers/platform_controllers/instagram_controller.py
Normale Datei
@ -0,0 +1,415 @@
|
||||
"""
|
||||
Controller für Instagram-spezifische Funktionalität.
|
||||
Mit TextSimilarity-Integration für robusteres UI-Element-Matching.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import random
|
||||
from PyQt5.QtCore import QThread, pyqtSignal, QObject
|
||||
from typing import Dict, Any
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
from views.tabs.accounts_tab import AccountsTab
|
||||
from views.tabs.settings_tab import SettingsTab
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
|
||||
from social_networks.instagram.instagram_automation import InstagramAutomation
|
||||
from utils.text_similarity import TextSimilarity
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("instagram_controller")
|
||||
|
||||
# Legacy WorkerThread als Backup beibehalten
|
||||
class LegacyInstagramWorkerThread(QThread):
|
||||
"""Legacy Thread für die Instagram-Account-Erstellung (Backup)."""
|
||||
|
||||
# Signale
|
||||
update_signal = pyqtSignal(str)
|
||||
log_signal = pyqtSignal(str)
|
||||
progress_signal = pyqtSignal(int)
|
||||
finished_signal = pyqtSignal(dict)
|
||||
error_signal = pyqtSignal(str)
|
||||
|
||||
def __init__(self, params):
|
||||
super().__init__()
|
||||
self.params = params
|
||||
self.running = True
|
||||
|
||||
# TextSimilarity für robustes Fehler-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.7)
|
||||
|
||||
# Fehler-Patterns für robustes Fehler-Matching
|
||||
self.error_patterns = [
|
||||
"Fehler", "Error", "Fehlgeschlagen", "Failed", "Problem", "Issue",
|
||||
"Nicht möglich", "Not possible", "Bitte versuchen Sie es erneut",
|
||||
"Please try again", "Konnte nicht", "Could not", "Timeout"
|
||||
]
|
||||
|
||||
def run(self):
|
||||
"""Führt die Account-Erstellung aus."""
|
||||
try:
|
||||
self.log_signal.emit("Instagram-Account-Erstellung gestartet...")
|
||||
self.progress_signal.emit(10)
|
||||
|
||||
# Instagram-Automation initialisieren
|
||||
automation = InstagramAutomation(
|
||||
headless=self.params.get("headless", False),
|
||||
use_proxy=self.params.get("use_proxy", False),
|
||||
proxy_type=self.params.get("proxy_type"),
|
||||
save_screenshots=True,
|
||||
debug=self.params.get("debug", False),
|
||||
email_domain=self.params.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com")
|
||||
)
|
||||
|
||||
self.update_signal.emit("Browser wird vorbereitet...")
|
||||
self.progress_signal.emit(20)
|
||||
|
||||
# Account registrieren
|
||||
self.log_signal.emit(f"Registriere Account für: {self.params['full_name']}")
|
||||
|
||||
# Account registrieren - immer mit Email
|
||||
result = automation.register_account(
|
||||
full_name=self.params["full_name"],
|
||||
age=self.params["age"],
|
||||
registration_method="email", # Immer Email-Registrierung
|
||||
phone_number=None, # Keine Telefonnummer
|
||||
**self.params.get("additional_params", {})
|
||||
)
|
||||
|
||||
self.progress_signal.emit(100)
|
||||
|
||||
if result["success"]:
|
||||
self.log_signal.emit("Account erfolgreich erstellt!")
|
||||
self.finished_signal.emit(result)
|
||||
else:
|
||||
# Robuste Fehlerbehandlung mit TextSimilarity
|
||||
error_msg = result.get("error", "Unbekannter Fehler")
|
||||
|
||||
# Versuche, Fehler nutzerfreundlicher zu interpretieren
|
||||
user_friendly_error = self._interpret_error(error_msg)
|
||||
|
||||
self.log_signal.emit(f"Fehler bei der Account-Erstellung: {user_friendly_error}")
|
||||
self.error_signal.emit(user_friendly_error)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler im Worker-Thread: {e}")
|
||||
self.log_signal.emit(f"Schwerwiegender Fehler: {str(e)}")
|
||||
self.error_signal.emit(str(e))
|
||||
self.progress_signal.emit(0)
|
||||
|
||||
def _interpret_error(self, error_msg: str) -> str:
|
||||
"""
|
||||
Interpretiert Fehlermeldungen und gibt eine benutzerfreundlichere Version zurück.
|
||||
Verwendet TextSimilarity für robusteres Fehler-Matching.
|
||||
|
||||
Args:
|
||||
error_msg: Die ursprüngliche Fehlermeldung
|
||||
|
||||
Returns:
|
||||
str: Benutzerfreundliche Fehlermeldung
|
||||
"""
|
||||
# Bekannte Fehlermuster und deren Interpretationen
|
||||
error_interpretations = {
|
||||
"captcha": "Instagram hat einen Captcha-Test angefordert. Versuchen Sie es später erneut oder nutzen Sie einen anderen Proxy.",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts. Bitte prüfen Sie die E-Mail-Einstellungen.",
|
||||
"proxy": "Problem mit der Proxy-Verbindung. Bitte prüfen Sie Ihre Proxy-Einstellungen.",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung. Bitte überprüfen Sie Ihre Internetverbindung.",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig.",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von Instagram.",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden. Bitte nutzen Sie eine andere E-Mail-Domain.",
|
||||
"phone": "Die Telefonnummer konnte nicht für die Registrierung verwendet werden."
|
||||
}
|
||||
|
||||
# Versuche, den Fehler zu kategorisieren
|
||||
for pattern, interpretation in error_interpretations.items():
|
||||
for error_term in self.error_patterns:
|
||||
if (pattern in error_msg.lower() or
|
||||
self.text_similarity.is_similar(error_term, error_msg, threshold=0.7)):
|
||||
return interpretation
|
||||
|
||||
# Fallback: Originale Fehlermeldung zurückgeben
|
||||
return error_msg
|
||||
|
||||
def stop(self):
|
||||
"""Stoppt den Thread."""
|
||||
self.running = False
|
||||
self.terminate()
|
||||
|
||||
|
||||
# Neue Implementation mit BaseWorkerThread
|
||||
class InstagramWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Refaktorierte Instagram Worker Thread Implementation"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
super().__init__(params, "Instagram", session_controller, generator_tab)
|
||||
|
||||
def get_automation_class(self):
|
||||
from social_networks.instagram.instagram_automation import InstagramAutomation
|
||||
return InstagramAutomation
|
||||
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
return {
|
||||
"already taken": "Dieser Benutzername ist bereits vergeben",
|
||||
"weak password": "Das Passwort ist zu schwach",
|
||||
"rate limit": "Zu viele Versuche - bitte später erneut versuchen",
|
||||
"network error": "Netzwerkfehler - bitte Internetverbindung prüfen",
|
||||
"captcha": "Captcha-Verifizierung erforderlich",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts",
|
||||
"proxy": "Problem mit der Proxy-Verbindung",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von Instagram",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden",
|
||||
"phone": "Die Telefonnummer konnte nicht für die Registrierung verwendet werden"
|
||||
}
|
||||
|
||||
class InstagramController(BasePlatformController):
|
||||
"""Controller für Instagram-spezifische Funktionalität."""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager=None):
|
||||
super().__init__("Instagram", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
self.worker_thread = None
|
||||
|
||||
# TextSimilarity für robustes UI-Element-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.75)
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""Erstellt den Instagram-Generator-Tab."""
|
||||
generator_tab = GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
# Instagram-spezifische Anpassungen
|
||||
# Diese Methode überschreiben, wenn spezifische Anpassungen benötigt werden
|
||||
|
||||
# Signale verbinden
|
||||
generator_tab.start_requested.connect(self.start_account_creation)
|
||||
generator_tab.stop_requested.connect(self.stop_account_creation)
|
||||
|
||||
return generator_tab
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die Instagram-Account-Erstellung."""
|
||||
super().start_account_creation(params)
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
parent_widget = generator_tab.window() # Hauptfenster als Parent
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "Instagram")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint VOR Account-Erstellung generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
|
||||
# Generiere einen neuen Fingerprint für diesen Account
|
||||
fingerprint_data = fingerprint_service.generate_fingerprint()
|
||||
|
||||
# Erstelle BrowserFingerprint Entity mit allen notwendigen Daten
|
||||
fingerprint = BrowserFingerprint.from_dict(fingerprint_data)
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
# Konvertiere zu Dictionary für Übertragung
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
|
||||
logger.info(f"Fingerprint für neue Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
# Fortfahren ohne Fingerprint - wird später generiert
|
||||
|
||||
# Worker-Thread starten mit optionalen Parametern
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
self.worker_thread = InstagramWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
# Updates an Forge-Dialog weiterleiten
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab für Backup
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
self.worker_thread.start()
|
||||
|
||||
# Dialog anzeigen und Animation starten
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die Instagram-Account-Erstellung."""
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.add_log("Account-Erstellung wurde abgebrochen")
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Forge-Dialog schließen falls vorhanden
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
def handle_account_created(self, result):
|
||||
"""Verarbeitet erfolgreich erstellte Accounts mit Clean Architecture."""
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
# Account-Daten aus dem Ergebnis holen
|
||||
account_data = result.get("account_data", {})
|
||||
|
||||
# Account und Session über SessionController speichern (Clean Architecture)
|
||||
if hasattr(self, 'session_controller') and self.session_controller:
|
||||
try:
|
||||
session_data = result.get("session_data", {})
|
||||
save_result = self.session_controller.create_and_save_account(
|
||||
platform=self.platform_name,
|
||||
account_data=account_data
|
||||
)
|
||||
|
||||
if save_result.get('success'):
|
||||
logger.info(f"Account und Session erfolgreich gespeichert")
|
||||
|
||||
# Erfolgsmeldung anzeigen (nur einmal!)
|
||||
account_info = save_result.get('account_data', {})
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
QMessageBox.information(
|
||||
generator_tab,
|
||||
"Erfolg",
|
||||
f"Account erfolgreich erstellt!\n\n"
|
||||
f"Benutzername: {account_info.get('username', '')}\n"
|
||||
f"Passwort: {account_info.get('password', '')}\n"
|
||||
f"E-Mail/Telefon: {account_info.get('email') or account_info.get('phone', '')}"
|
||||
)
|
||||
|
||||
# Signal senden, um zur Hauptseite zurückzukehren
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
else:
|
||||
error_msg = save_result.get('message', 'Unbekannter Fehler')
|
||||
logger.error(f"Fehler beim Speichern: {error_msg}")
|
||||
from views.widgets.modern_message_box import show_error
|
||||
show_error(
|
||||
generator_tab,
|
||||
"Fehler beim Speichern",
|
||||
f"Beim Speichern des Accounts ist ein Fehler aufgetreten:\n\n{error_msg}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern des Accounts: {e}")
|
||||
from views.widgets.modern_message_box import show_critical
|
||||
show_critical(
|
||||
generator_tab,
|
||||
"Unerwarteter Fehler",
|
||||
f"Ein unerwarteter Fehler ist beim Speichern des Accounts aufgetreten:\n\n{str(e)}"
|
||||
)
|
||||
else:
|
||||
# Fallback: Alte Methode falls SessionController nicht verfügbar
|
||||
logger.warning("SessionController nicht verfügbar, verwende alte Methode")
|
||||
generator_tab.account_created.emit(self.platform_name, account_data)
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
|
||||
# save_account_to_db wurde entfernt - Accounts werden jetzt über SessionController gespeichert
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
"""
|
||||
Validiert die Eingaben für die Account-Erstellung.
|
||||
Verwendet TextSimilarity für robustere Validierung.
|
||||
"""
|
||||
# Basis-Validierungen von BasePlatformController verwenden
|
||||
valid, error_msg = super().validate_inputs(inputs)
|
||||
if not valid:
|
||||
return valid, error_msg
|
||||
|
||||
# Instagram-spezifische Validierungen
|
||||
age = inputs.get("age", 0)
|
||||
if age < 13: # Änderung von 14 auf 13
|
||||
return False, "Das Alter muss mindestens 13 sein (Instagram-Anforderung)."
|
||||
|
||||
# E-Mail-Domain-Validierung (immer Email-Registrierung)
|
||||
email_domain = inputs.get("email_domain", "")
|
||||
# Blacklist von bekannten problematischen Domains
|
||||
blacklisted_domains = ["temp-mail.org", "guerrillamail.com", "maildrop.cc"]
|
||||
|
||||
# Prüfe mit TextSimilarity auf Ähnlichkeit mit Blacklist
|
||||
for domain in blacklisted_domains:
|
||||
if self.text_similarity.is_similar(email_domain, domain, threshold=0.8):
|
||||
return False, f"Die E-Mail-Domain '{email_domain}' kann problematisch für die Instagram-Registrierung sein. Bitte verwenden Sie eine andere Domain."
|
||||
|
||||
return True, ""
|
||||
|
||||
def _handle_error(self, error_msg: str):
|
||||
"""Behandelt Fehler während der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Fehler anzeigen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.show_error(error_msg)
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_finished(self, result: dict):
|
||||
"""Behandelt das Ende der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Normale Verarbeitung
|
||||
self.handle_account_created(result)
|
||||
|
||||
def get_form_field_label(self, field_type: str) -> str:
|
||||
"""
|
||||
Gibt einen Label-Text für ein Formularfeld basierend auf dem Feldtyp zurück.
|
||||
|
||||
Args:
|
||||
field_type: Typ des Formularfelds
|
||||
|
||||
Returns:
|
||||
str: Label-Text für das Formularfeld
|
||||
"""
|
||||
# Mapping von Feldtypen zu Labels
|
||||
field_labels = {
|
||||
"full_name": "Vollständiger Name",
|
||||
"username": "Benutzername",
|
||||
"password": "Passwort",
|
||||
"email": "E-Mail-Adresse",
|
||||
"phone": "Telefonnummer",
|
||||
"age": "Alter",
|
||||
"birthday": "Geburtsdatum"
|
||||
}
|
||||
|
||||
return field_labels.get(field_type, field_type.capitalize())
|
||||
317
controllers/platform_controllers/method_rotation_mixin.py
Normale Datei
317
controllers/platform_controllers/method_rotation_mixin.py
Normale Datei
@ -0,0 +1,317 @@
|
||||
"""
|
||||
Method rotation mixin for platform controllers.
|
||||
Provides method rotation functionality without breaking existing inheritance hierarchy.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from application.use_cases.method_rotation_use_case import MethodRotationUseCase, RotationContext
|
||||
from infrastructure.repositories.method_strategy_repository import MethodStrategyRepository
|
||||
from infrastructure.repositories.rotation_session_repository import RotationSessionRepository
|
||||
from infrastructure.repositories.platform_method_state_repository import PlatformMethodStateRepository
|
||||
from domain.entities.method_rotation import MethodStrategy, RotationSession, RiskLevel
|
||||
|
||||
|
||||
class MethodRotationMixin:
|
||||
"""
|
||||
Mixin class that adds method rotation capabilities to platform controllers.
|
||||
Can be mixed into existing controller classes without breaking inheritance.
|
||||
"""
|
||||
|
||||
def _init_method_rotation_system(self):
|
||||
"""Initialize the method rotation system components"""
|
||||
try:
|
||||
# Check if db_manager is available
|
||||
if not hasattr(self, 'db_manager') or self.db_manager is None:
|
||||
self.method_rotation_use_case = None
|
||||
return
|
||||
|
||||
# Initialize repositories
|
||||
self.method_strategy_repo = MethodStrategyRepository(self.db_manager)
|
||||
self.rotation_session_repo = RotationSessionRepository(self.db_manager)
|
||||
self.platform_state_repo = PlatformMethodStateRepository(self.db_manager)
|
||||
|
||||
# Initialize use case
|
||||
self.method_rotation_use_case = MethodRotationUseCase(
|
||||
strategy_repo=self.method_strategy_repo,
|
||||
session_repo=self.rotation_session_repo,
|
||||
state_repo=self.platform_state_repo
|
||||
)
|
||||
|
||||
# Initialize rotation state
|
||||
self.current_rotation_session = None
|
||||
self.current_rotation_context = None
|
||||
|
||||
self.logger.info(f"Method rotation system initialized for {self.platform_name}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to initialize method rotation system: {e}")
|
||||
# Set to None so we can detect failures and fallback
|
||||
self.method_rotation_use_case = None
|
||||
|
||||
def _apply_method_strategy(self, params: Dict[str, Any], strategy: MethodStrategy) -> Dict[str, Any]:
|
||||
"""
|
||||
Apply method strategy configuration to account creation parameters.
|
||||
|
||||
Args:
|
||||
params: Original account creation parameters
|
||||
strategy: Selected method strategy
|
||||
|
||||
Returns:
|
||||
Updated parameters with method-specific configuration
|
||||
"""
|
||||
updated_params = params.copy()
|
||||
|
||||
# Apply method selection
|
||||
updated_params['registration_method'] = strategy.method_name
|
||||
|
||||
# Apply method-specific configuration
|
||||
config = strategy.configuration
|
||||
|
||||
if strategy.method_name.startswith('stealth_'):
|
||||
# Instagram anti-bot strategy methods
|
||||
updated_params['stealth_method'] = strategy.method_name
|
||||
updated_params['enhanced_stealth'] = config.get('enhanced_stealth', False)
|
||||
updated_params['user_agent_rotation'] = config.get('user_agent_rotation', False)
|
||||
updated_params['fingerprint_complexity'] = config.get('fingerprint_complexity', 'basic')
|
||||
updated_params['canvas_noise'] = config.get('canvas_noise', False)
|
||||
updated_params['webrtc_protection'] = config.get('webrtc_protection', 'basic')
|
||||
updated_params['viewport_randomization'] = config.get('viewport_randomization', False)
|
||||
updated_params['navigator_spoof'] = config.get('navigator_spoof', False)
|
||||
updated_params['timing_randomization'] = config.get('timing_randomization', False)
|
||||
updated_params['screen_resolution_spoof'] = config.get('screen_resolution_spoof', False)
|
||||
updated_params['memory_spoof'] = config.get('memory_spoof', False)
|
||||
updated_params['hardware_spoof'] = config.get('hardware_spoof', False)
|
||||
|
||||
elif strategy.method_name == 'email':
|
||||
# Email method configuration (legacy)
|
||||
updated_params['email_domain'] = config.get('email_domain', self.DEFAULT_EMAIL_DOMAIN)
|
||||
updated_params['require_phone_verification'] = config.get('require_phone_verification', False)
|
||||
updated_params['auto_verify_email'] = config.get('auto_verify_email', True)
|
||||
|
||||
elif strategy.method_name == 'phone':
|
||||
# Phone method configuration (legacy)
|
||||
updated_params['registration_method'] = 'phone'
|
||||
updated_params['require_email_backup'] = config.get('require_email_backup', True)
|
||||
updated_params['phone_verification_timeout'] = config.get('phone_verification_timeout', 300)
|
||||
|
||||
elif strategy.method_name == 'social_login':
|
||||
# Social login configuration (legacy)
|
||||
updated_params['registration_method'] = 'social'
|
||||
updated_params['social_providers'] = config.get('supported_providers', ['facebook'])
|
||||
updated_params['fallback_to_email'] = config.get('fallback_to_email', True)
|
||||
|
||||
elif strategy.method_name in ['standard_registration', 'recovery_registration']:
|
||||
# Gmail-specific methods
|
||||
updated_params['registration_method'] = strategy.method_name
|
||||
updated_params['recovery_email'] = config.get('recovery_email', False)
|
||||
updated_params['recovery_phone'] = config.get('recovery_phone', False)
|
||||
|
||||
# Add strategy metadata
|
||||
updated_params['_method_strategy'] = {
|
||||
'strategy_id': strategy.strategy_id,
|
||||
'method_name': strategy.method_name,
|
||||
'priority': strategy.priority,
|
||||
'risk_level': strategy.risk_level.value,
|
||||
'effectiveness_score': strategy.effectiveness_score
|
||||
}
|
||||
|
||||
return updated_params
|
||||
|
||||
def handle_method_failure(self, error_details: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Handle method failure and attempt rotation to next best method.
|
||||
|
||||
Args:
|
||||
error_details: Details about the failure
|
||||
|
||||
Returns:
|
||||
True if rotation succeeded and retry should be attempted, False otherwise
|
||||
"""
|
||||
if not self.method_rotation_use_case or not self.current_rotation_session:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Record the failure
|
||||
self.method_rotation_use_case.record_method_result(
|
||||
session_id=self.current_rotation_session.session_id,
|
||||
method_name=self.current_rotation_session.current_method,
|
||||
success=False,
|
||||
error_details=error_details
|
||||
)
|
||||
|
||||
# Check if rotation should occur
|
||||
if self.method_rotation_use_case.should_rotate_method(self.current_rotation_session.session_id):
|
||||
|
||||
# Attempt rotation
|
||||
next_method = self.method_rotation_use_case.rotate_method(
|
||||
session_id=self.current_rotation_session.session_id,
|
||||
reason=f"Method failure: {error_details.get('error_type', 'unknown')}"
|
||||
)
|
||||
|
||||
if next_method:
|
||||
self.logger.info(f"Rotating from {self.current_rotation_session.current_method} to {next_method.method_name}")
|
||||
|
||||
# Update current session reference
|
||||
self.current_rotation_session = self.rotation_session_repo.find_by_id(
|
||||
self.current_rotation_session.session_id
|
||||
)
|
||||
|
||||
return True
|
||||
else:
|
||||
self.logger.warning("No alternative methods available for rotation")
|
||||
|
||||
else:
|
||||
self.logger.info("Rotation not triggered - continuing with current method")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Method rotation failed: {e}")
|
||||
|
||||
return False
|
||||
|
||||
def handle_method_success(self, result: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Handle successful method execution.
|
||||
|
||||
Args:
|
||||
result: Result details from successful execution
|
||||
"""
|
||||
if not self.method_rotation_use_case or not self.current_rotation_session:
|
||||
return
|
||||
|
||||
try:
|
||||
execution_time = result.get('execution_time', 0.0)
|
||||
|
||||
# Record the success
|
||||
self.method_rotation_use_case.record_method_result(
|
||||
session_id=self.current_rotation_session.session_id,
|
||||
method_name=self.current_rotation_session.current_method,
|
||||
success=True,
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
self.logger.info(f"Method {self.current_rotation_session.current_method} succeeded in {execution_time:.2f}s")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to record method success: {e}")
|
||||
|
||||
def get_rotation_status(self) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get current rotation session status.
|
||||
|
||||
Returns:
|
||||
Dictionary with rotation status information or None if no active session
|
||||
"""
|
||||
if not self.method_rotation_use_case or not self.current_rotation_session:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.method_rotation_use_case.get_session_status(
|
||||
self.current_rotation_session.session_id
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get rotation status: {e}")
|
||||
return None
|
||||
|
||||
def get_platform_method_recommendations(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get method recommendations and insights for the current platform.
|
||||
|
||||
Returns:
|
||||
Dictionary with recommendations and platform insights
|
||||
"""
|
||||
if not self.method_rotation_use_case:
|
||||
return {}
|
||||
|
||||
try:
|
||||
return self.method_rotation_use_case.get_platform_method_recommendations(
|
||||
self.platform_name.lower()
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get method recommendations: {e}")
|
||||
return {}
|
||||
|
||||
def enable_emergency_mode(self, reason: str = "manual_override") -> None:
|
||||
"""
|
||||
Enable emergency mode for the platform.
|
||||
|
||||
Args:
|
||||
reason: Reason for enabling emergency mode
|
||||
"""
|
||||
if not self.method_rotation_use_case:
|
||||
return
|
||||
|
||||
try:
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
self.platform_name.lower(), reason
|
||||
)
|
||||
self.logger.warning(f"Emergency mode enabled for {self.platform_name}: {reason}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to enable emergency mode: {e}")
|
||||
|
||||
def disable_emergency_mode(self) -> None:
|
||||
"""Disable emergency mode for the platform."""
|
||||
if not self.method_rotation_use_case:
|
||||
return
|
||||
|
||||
try:
|
||||
self.method_rotation_use_case.disable_emergency_mode(
|
||||
self.platform_name.lower()
|
||||
)
|
||||
self.logger.info(f"Emergency mode disabled for {self.platform_name}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to disable emergency mode: {e}")
|
||||
|
||||
def _create_rotation_context(self, params: Dict[str, Any]) -> RotationContext:
|
||||
"""
|
||||
Create rotation context from account creation parameters.
|
||||
|
||||
Args:
|
||||
params: Account creation parameters
|
||||
|
||||
Returns:
|
||||
RotationContext for method selection
|
||||
"""
|
||||
return RotationContext(
|
||||
platform=self.platform_name.lower(),
|
||||
account_id=params.get('account_id'),
|
||||
fingerprint_id=params.get('fingerprint', {}).get('fingerprint_id'),
|
||||
excluded_methods=params.get('_excluded_methods', []),
|
||||
max_risk_level=RiskLevel(params.get('_max_risk_level', 'HIGH')),
|
||||
emergency_mode=params.get('_emergency_mode', False),
|
||||
session_metadata={
|
||||
'user_inputs': {k: v for k, v in params.items() if not k.startswith('_')},
|
||||
'creation_started_at': datetime.now().isoformat(),
|
||||
'controller_type': self.__class__.__name__
|
||||
}
|
||||
)
|
||||
|
||||
def _should_use_rotation_system(self) -> bool:
|
||||
"""
|
||||
Check if rotation system should be used.
|
||||
|
||||
Returns:
|
||||
True if rotation system is available and should be used
|
||||
"""
|
||||
return (
|
||||
self.method_rotation_use_case is not None and
|
||||
hasattr(self, 'db_manager') and
|
||||
self.db_manager is not None
|
||||
)
|
||||
|
||||
def cleanup_rotation_session(self) -> None:
|
||||
"""Clean up current rotation session."""
|
||||
if self.current_rotation_session:
|
||||
try:
|
||||
if self.current_rotation_session.is_active:
|
||||
self.rotation_session_repo.archive_session(
|
||||
self.current_rotation_session.session_id,
|
||||
False
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup rotation session: {e}")
|
||||
finally:
|
||||
self.current_rotation_session = None
|
||||
self.current_rotation_context = None
|
||||
288
controllers/platform_controllers/method_rotation_worker_mixin.py
Normale Datei
288
controllers/platform_controllers/method_rotation_worker_mixin.py
Normale Datei
@ -0,0 +1,288 @@
|
||||
"""
|
||||
Worker thread mixin for method rotation integration.
|
||||
Adds rotation support to base worker threads without breaking existing functionality.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
# Import rotation components (with fallback for missing imports)
|
||||
try:
|
||||
from controllers.platform_controllers.method_rotation_mixin import MethodRotationMixin
|
||||
ROTATION_AVAILABLE = True
|
||||
except ImportError:
|
||||
ROTATION_AVAILABLE = False
|
||||
class MethodRotationMixin:
|
||||
pass
|
||||
|
||||
|
||||
class MethodRotationWorkerMixin:
|
||||
"""
|
||||
Mixin for worker threads to add method rotation support.
|
||||
Handles rotation-aware error handling and retry logic.
|
||||
"""
|
||||
|
||||
def _init_rotation_support(self, controller_instance: Optional[Any] = None):
|
||||
"""
|
||||
Initialize rotation support for worker thread.
|
||||
|
||||
Args:
|
||||
controller_instance: Reference to controller that has rotation capabilities
|
||||
"""
|
||||
self.controller_instance = controller_instance
|
||||
self.rotation_session_id = self.params.get('_rotation_session_id')
|
||||
self.strategy_id = self.params.get('_strategy_id')
|
||||
self.rotation_retry_count = 0
|
||||
self.max_rotation_retries = 3
|
||||
|
||||
def _handle_registration_failure(self, result: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Handle registration failure with rotation support.
|
||||
|
||||
Args:
|
||||
result: Result from failed registration attempt
|
||||
|
||||
Returns:
|
||||
True if rotation was attempted and should retry, False otherwise
|
||||
"""
|
||||
if not self._is_rotation_available():
|
||||
return False
|
||||
|
||||
# Check if we've exceeded retry limit
|
||||
if self.rotation_retry_count >= self.max_rotation_retries:
|
||||
self.log_signal.emit("Maximum rotation retries reached, stopping")
|
||||
return False
|
||||
|
||||
error_details = {
|
||||
'error_type': self._classify_error(result.get('error', '')),
|
||||
'message': result.get('error', 'Unknown error'),
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'attempt_number': self.rotation_retry_count + 1
|
||||
}
|
||||
|
||||
# Attempt rotation through controller
|
||||
try:
|
||||
rotation_success = self.controller_instance.handle_method_failure(error_details)
|
||||
|
||||
if rotation_success:
|
||||
self.rotation_retry_count += 1
|
||||
|
||||
# Get updated session to get new method
|
||||
rotation_status = self.controller_instance.get_rotation_status()
|
||||
if rotation_status:
|
||||
new_method = rotation_status.get('current_method')
|
||||
self.log_signal.emit(f"Rotating to method: {new_method} (attempt {self.rotation_retry_count})")
|
||||
|
||||
# Update params with new method
|
||||
self._update_params_for_rotation(rotation_status)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Rotation failed: {e}")
|
||||
|
||||
return False
|
||||
|
||||
def _handle_registration_success(self, result: Dict[str, Any]):
|
||||
"""
|
||||
Handle successful registration with rotation tracking.
|
||||
|
||||
Args:
|
||||
result: Result from successful registration
|
||||
"""
|
||||
if not self._is_rotation_available():
|
||||
return
|
||||
|
||||
try:
|
||||
# Record success through controller
|
||||
success_details = {
|
||||
'execution_time': result.get('execution_time', 0.0),
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'method_used': self.params.get('registration_method', 'unknown'),
|
||||
'retry_count': self.rotation_retry_count
|
||||
}
|
||||
|
||||
self.controller_instance.handle_method_success(success_details)
|
||||
|
||||
if self.rotation_retry_count > 0:
|
||||
self.log_signal.emit(f"Success after {self.rotation_retry_count} rotation(s)")
|
||||
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Failed to record rotation success: {e}")
|
||||
|
||||
def _update_params_for_rotation(self, rotation_status: Dict[str, Any]):
|
||||
"""
|
||||
Update worker parameters based on rotation status.
|
||||
|
||||
Args:
|
||||
rotation_status: Current rotation session status
|
||||
"""
|
||||
new_method = rotation_status.get('current_method')
|
||||
if new_method:
|
||||
# Apply method-specific parameter updates
|
||||
if new_method.startswith('stealth_'):
|
||||
self.params['stealth_method'] = new_method
|
||||
|
||||
if new_method == 'stealth_basic':
|
||||
self.params['enhanced_stealth'] = False
|
||||
self.params['user_agent_rotation'] = False
|
||||
self.log_signal.emit("Switched to basic stealth mode")
|
||||
|
||||
elif new_method == 'stealth_enhanced':
|
||||
self.params['enhanced_stealth'] = True
|
||||
self.params['user_agent_rotation'] = True
|
||||
self.params['canvas_noise'] = True
|
||||
self.log_signal.emit("Switched to enhanced stealth mode")
|
||||
|
||||
elif new_method == 'stealth_maximum':
|
||||
self.params['enhanced_stealth'] = True
|
||||
self.params['user_agent_rotation'] = True
|
||||
self.params['canvas_noise'] = True
|
||||
self.params['navigator_spoof'] = True
|
||||
self.params['viewport_randomization'] = True
|
||||
self.params['memory_spoof'] = True
|
||||
self.params['hardware_spoof'] = True
|
||||
self.log_signal.emit("Switched to maximum stealth mode")
|
||||
|
||||
elif new_method == 'phone':
|
||||
self.params['require_phone_verification'] = True
|
||||
self.log_signal.emit("Switched to phone registration method")
|
||||
elif new_method == 'email':
|
||||
self.params['require_phone_verification'] = False
|
||||
self.log_signal.emit("Switched to email registration method")
|
||||
elif new_method == 'social_login':
|
||||
self.params['use_social_login'] = True
|
||||
self.log_signal.emit("Switched to social login method")
|
||||
|
||||
def _classify_error(self, error_message: str) -> str:
|
||||
"""
|
||||
Classify error type for rotation decision making.
|
||||
|
||||
Args:
|
||||
error_message: Error message from failed attempt
|
||||
|
||||
Returns:
|
||||
Error classification string
|
||||
"""
|
||||
error_lower = error_message.lower()
|
||||
|
||||
# Browser-level and CSS parsing errors (high priority for rotation)
|
||||
if any(term in error_lower for term in [
|
||||
'css', 'javascript', 'parsing', '--font-family', '--gradient',
|
||||
'stylesheet', 'rendering', 'dom', 'browser', 'navigation'
|
||||
]):
|
||||
return 'browser_level_error'
|
||||
elif any(term in error_lower for term in ['rate limit', 'zu viele', 'too many']):
|
||||
return 'rate_limit'
|
||||
elif any(term in error_lower for term in ['suspended', 'gesperrt', 'blocked']):
|
||||
return 'account_suspended'
|
||||
elif any(term in error_lower for term in ['timeout', 'zeitüberschreitung']):
|
||||
return 'timeout'
|
||||
elif any(term in error_lower for term in ['captcha', 'verification']):
|
||||
return 'verification_required'
|
||||
elif any(term in error_lower for term in ['network', 'connection', 'verbindung']):
|
||||
return 'network_error'
|
||||
else:
|
||||
return 'unknown'
|
||||
|
||||
def _is_rotation_available(self) -> bool:
|
||||
"""
|
||||
Check if rotation support is available.
|
||||
|
||||
Returns:
|
||||
True if rotation is available and configured
|
||||
"""
|
||||
return (
|
||||
ROTATION_AVAILABLE and
|
||||
self.controller_instance is not None and
|
||||
hasattr(self.controller_instance, 'handle_method_failure') and
|
||||
hasattr(self.controller_instance, '_should_use_rotation_system') and
|
||||
self.controller_instance._should_use_rotation_system()
|
||||
)
|
||||
|
||||
def _enhanced_register_account(self, automation, register_params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Enhanced account registration with rotation support.
|
||||
|
||||
Args:
|
||||
automation: Platform automation instance
|
||||
register_params: Registration parameters
|
||||
|
||||
Returns:
|
||||
Registration result with rotation tracking
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
|
||||
try:
|
||||
# Attempt registration
|
||||
result = automation.register_account(**register_params)
|
||||
|
||||
# Calculate execution time
|
||||
execution_time = (datetime.now() - start_time).total_seconds()
|
||||
result['execution_time'] = execution_time
|
||||
|
||||
if result.get("success"):
|
||||
# Handle success
|
||||
self._handle_registration_success(result)
|
||||
return result
|
||||
else:
|
||||
# Handle failure with potential rotation
|
||||
if self._handle_registration_failure(result):
|
||||
# Rotation was attempted, retry with new method
|
||||
self.log_signal.emit("Retrying with rotated method...")
|
||||
|
||||
# Recursive call with updated params (limited by retry count)
|
||||
updated_register_params = register_params.copy()
|
||||
updated_register_params.update({
|
||||
'registration_method': self.params.get('registration_method'),
|
||||
'require_phone_verification': self.params.get('require_phone_verification', False)
|
||||
})
|
||||
|
||||
return self._enhanced_register_account(automation, updated_register_params)
|
||||
else:
|
||||
# No rotation available or retry limit reached
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
# Handle exceptions
|
||||
error_result = {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'execution_time': (datetime.now() - start_time).total_seconds()
|
||||
}
|
||||
|
||||
# Try rotation on exception as well
|
||||
if self._handle_registration_failure(error_result):
|
||||
self.log_signal.emit("Retrying after exception with rotated method...")
|
||||
return self._enhanced_register_account(automation, register_params)
|
||||
else:
|
||||
return error_result
|
||||
|
||||
def _log_rotation_status(self):
|
||||
"""Log current rotation status for debugging"""
|
||||
if self._is_rotation_available():
|
||||
try:
|
||||
status = self.controller_instance.get_rotation_status()
|
||||
if status:
|
||||
self.log_signal.emit(f"Rotation Status - Method: {status.get('current_method')}, "
|
||||
f"Attempts: {status.get('rotation_count', 0)}, "
|
||||
f"Success Rate: {status.get('success_rate', 0.0):.2f}")
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Could not get rotation status: {e}")
|
||||
|
||||
def cleanup_rotation(self):
|
||||
"""Clean up rotation resources"""
|
||||
if self._is_rotation_available():
|
||||
try:
|
||||
self.controller_instance.cleanup_rotation_session()
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"Rotation cleanup failed: {e}")
|
||||
|
||||
def stop(self):
|
||||
"""Enhanced stop method with rotation cleanup"""
|
||||
self.running = False
|
||||
self.cleanup_rotation()
|
||||
|
||||
# Call original stop if it exists
|
||||
if hasattr(super(), 'stop'):
|
||||
super().stop()
|
||||
193
controllers/platform_controllers/ok_ru_controller.py
Normale Datei
193
controllers/platform_controllers/ok_ru_controller.py
Normale Datei
@ -0,0 +1,193 @@
|
||||
"""
|
||||
Controller für OK.ru (Odnoklassniki)-spezifische Funktionalität.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from PyQt5.QtCore import QThread, pyqtSignal
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
|
||||
from social_networks.ok_ru.ok_ru_automation import OkRuAutomation
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("ok_ru_controller")
|
||||
|
||||
class OkRuWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Worker Thread für OK.ru Account-Erstellung"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
super().__init__(params, "OK.ru", session_controller, generator_tab)
|
||||
|
||||
def get_automation_class(self):
|
||||
"""Gibt die OK.ru-Automation-Klasse zurück"""
|
||||
return OkRuAutomation
|
||||
|
||||
def get_error_interpretations(self):
|
||||
"""OK.ru-spezifische Fehlerinterpretationen"""
|
||||
return {
|
||||
"phone": "Diese Telefonnummer wird bereits verwendet.",
|
||||
"captcha": "Bitte lösen Sie das Captcha.",
|
||||
"age": "Sie müssen mindestens 14 Jahre alt sein.",
|
||||
"blocked": "Zu viele Versuche. Bitte versuchen Sie es später erneut."
|
||||
}
|
||||
|
||||
class OkRuController(BasePlatformController):
|
||||
"""Controller für OK.ru (Odnoklassniki)-spezifische Funktionalität."""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager=None):
|
||||
super().__init__("OK.ru", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
self.worker_thread = None
|
||||
self.platform_icon = "ok_ru.png" # Spezifisches Icon für OK.ru
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""Erstellt den Generator-Tab für OK.ru."""
|
||||
generator_tab = GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
# OK.ru verwendet nur Telefon-Registrierung
|
||||
# Keine spezielle Konfiguration nötig, da GeneratorTab standardmäßig alle Felder hat
|
||||
|
||||
return generator_tab
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die OK.ru-Account-Erstellung."""
|
||||
logger.info(f"Starte OK.ru Account-Erstellung mit Parametern: {params}")
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
parent_widget = generator_tab.window()
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "OK.ru")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint VOR Account-Erstellung generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
|
||||
# Generiere einen neuen Fingerprint für diesen Account
|
||||
fingerprint_data = fingerprint_service.generate_fingerprint()
|
||||
|
||||
# Erstelle BrowserFingerprint Entity mit allen notwendigen Daten
|
||||
fingerprint = BrowserFingerprint.from_dict(fingerprint_data)
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
# Konvertiere zu Dictionary für Übertragung
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
|
||||
logger.info(f"Fingerprint für neue Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
# Fortfahren ohne Fingerprint - wird später generiert
|
||||
|
||||
# Worker-Thread starten
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
self.worker_thread = OkRuWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
|
||||
# Updates an Forge-Dialog weiterleiten
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab für Backup
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
self.worker_thread.start()
|
||||
|
||||
# Dialog anzeigen und Animation starten
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die OK.ru-Account-Erstellung."""
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.add_log("Account-Erstellung wurde abgebrochen")
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Forge-Dialog schließen falls vorhanden
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
"""
|
||||
Validiert die Eingaben für die Account-Erstellung.
|
||||
"""
|
||||
# Basis-Validierungen von BasePlatformController verwenden
|
||||
valid, error_msg = super().validate_inputs(inputs)
|
||||
if not valid:
|
||||
return valid, error_msg
|
||||
|
||||
# OK.ru-spezifische Validierungen
|
||||
age = inputs.get("age", 0)
|
||||
if age < 14:
|
||||
return False, "Das Alter muss mindestens 14 sein (OK.ru-Anforderung)."
|
||||
|
||||
# Telefonnummer-Validierung für OK.ru - vorerst deaktiviert für Tests
|
||||
# TODO: Telefonnummern-Feld in UI hinzufügen
|
||||
# phone_number = inputs.get("phone_number", "")
|
||||
# if not phone_number:
|
||||
# return False, "Telefonnummer ist erforderlich für OK.ru-Registrierung."
|
||||
#
|
||||
# # Einfache Telefonnummern-Validierung
|
||||
# if len(phone_number) < 10:
|
||||
# return False, "Telefonnummer muss mindestens 10 Ziffern haben."
|
||||
|
||||
return True, ""
|
||||
|
||||
def _handle_error(self, error_msg: str):
|
||||
"""Behandelt Fehler während der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Fehler anzeigen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.show_error(error_msg)
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_finished(self, result: dict):
|
||||
"""Behandelt das Ende der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Normale Verarbeitung
|
||||
self.handle_account_created(result)
|
||||
443
controllers/platform_controllers/rotation_error_handler.py
Normale Datei
443
controllers/platform_controllers/rotation_error_handler.py
Normale Datei
@ -0,0 +1,443 @@
|
||||
"""
|
||||
Comprehensive error handling and fallback mechanisms for method rotation system.
|
||||
Provides robust error recovery and graceful degradation strategies.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, List, Optional, Callable
|
||||
from enum import Enum
|
||||
|
||||
from domain.entities.method_rotation import MethodStrategy, RotationSession, RiskLevel
|
||||
from application.use_cases.method_rotation_use_case import MethodRotationUseCase
|
||||
|
||||
|
||||
class ErrorSeverity(Enum):
|
||||
"""Error severity levels for rotation decisions"""
|
||||
LOW = "low" # Minor issues, continue with current method
|
||||
MEDIUM = "medium" # Moderate issues, consider rotation
|
||||
HIGH = "high" # Serious issues, rotate immediately
|
||||
CRITICAL = "critical" # Critical failure, enable emergency mode
|
||||
|
||||
|
||||
class RotationErrorHandler:
|
||||
"""
|
||||
Handles errors and provides fallback mechanisms for the rotation system.
|
||||
Implements intelligent error classification and recovery strategies.
|
||||
"""
|
||||
|
||||
def __init__(self, method_rotation_use_case: MethodRotationUseCase):
|
||||
self.method_rotation_use_case = method_rotation_use_case
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
# Error classification patterns
|
||||
self.error_patterns = self._init_error_patterns()
|
||||
|
||||
# Fallback strategies
|
||||
self.fallback_strategies = self._init_fallback_strategies()
|
||||
|
||||
# Emergency mode settings
|
||||
self.emergency_thresholds = {
|
||||
'failure_rate_threshold': 0.8,
|
||||
'consecutive_failures_threshold': 5,
|
||||
'time_window_minutes': 30
|
||||
}
|
||||
|
||||
def handle_rotation_error(self, platform: str, session_id: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Handle rotation system errors with intelligent recovery.
|
||||
|
||||
Args:
|
||||
platform: Platform name
|
||||
session_id: Current rotation session ID
|
||||
error_details: Error information
|
||||
|
||||
Returns:
|
||||
Recovery action result
|
||||
"""
|
||||
try:
|
||||
# Classify error severity
|
||||
severity = self._classify_error_severity(error_details)
|
||||
|
||||
# Log error with classification
|
||||
self.logger.warning(f"Rotation error on {platform}: {error_details.get('message', 'Unknown')} (Severity: {severity.value})")
|
||||
|
||||
# Choose recovery strategy based on severity
|
||||
if severity == ErrorSeverity.CRITICAL:
|
||||
return self._handle_critical_error(platform, session_id, error_details)
|
||||
elif severity == ErrorSeverity.HIGH:
|
||||
return self._handle_high_severity_error(platform, session_id, error_details)
|
||||
elif severity == ErrorSeverity.MEDIUM:
|
||||
return self._handle_medium_severity_error(platform, session_id, error_details)
|
||||
else:
|
||||
return self._handle_low_severity_error(platform, session_id, error_details)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in rotation error handler: {e}")
|
||||
return self._fallback_to_original_behavior(platform, error_details)
|
||||
|
||||
def handle_system_failure(self, platform: str, failure_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Handle complete rotation system failures with graceful degradation.
|
||||
|
||||
Args:
|
||||
platform: Platform name
|
||||
failure_details: System failure information
|
||||
|
||||
Returns:
|
||||
Fallback strategy result
|
||||
"""
|
||||
self.logger.error(f"Rotation system failure on {platform}: {failure_details}")
|
||||
|
||||
try:
|
||||
# Attempt to gracefully shut down rotation for this platform
|
||||
self._disable_rotation_for_platform(platform, "system_failure")
|
||||
|
||||
# Enable emergency mode with safest methods only
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
platform, f"System failure: {failure_details.get('message', 'Unknown')}"
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'emergency_mode_enabled',
|
||||
'fallback_method': 'email',
|
||||
'message': 'System failure handled, emergency mode activated'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to handle system failure: {e}")
|
||||
return self._fallback_to_original_behavior(platform, failure_details)
|
||||
|
||||
def check_and_handle_emergency_conditions(self, platform: str) -> bool:
|
||||
"""
|
||||
Check if emergency conditions are met and handle accordingly.
|
||||
|
||||
Args:
|
||||
platform: Platform to check
|
||||
|
||||
Returns:
|
||||
True if emergency mode was triggered
|
||||
"""
|
||||
try:
|
||||
# Get platform statistics
|
||||
stats = self.method_rotation_use_case.strategy_repo.get_platform_statistics(platform)
|
||||
|
||||
# Check failure rate threshold
|
||||
recent_failures = stats.get('recent_failures_24h', 0)
|
||||
recent_successes = stats.get('recent_successes_24h', 0)
|
||||
total_recent = recent_failures + recent_successes
|
||||
|
||||
if total_recent > 0:
|
||||
failure_rate = recent_failures / total_recent
|
||||
|
||||
if failure_rate >= self.emergency_thresholds['failure_rate_threshold']:
|
||||
self.logger.warning(f"High failure rate detected on {platform}: {failure_rate:.2f}")
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
platform, f"High failure rate: {failure_rate:.2f}"
|
||||
)
|
||||
return True
|
||||
|
||||
# Check consecutive failures
|
||||
session_stats = self.method_rotation_use_case.session_repo.get_session_statistics(platform, days=1)
|
||||
failed_sessions = session_stats.get('failed_sessions', 0)
|
||||
|
||||
if failed_sessions >= self.emergency_thresholds['consecutive_failures_threshold']:
|
||||
self.logger.warning(f"High consecutive failures on {platform}: {failed_sessions}")
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
platform, f"Consecutive failures: {failed_sessions}"
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to check emergency conditions: {e}")
|
||||
return False
|
||||
|
||||
def recover_from_method_exhaustion(self, platform: str, session_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Handle the case when all available methods have been exhausted.
|
||||
|
||||
Args:
|
||||
platform: Platform name
|
||||
session_id: Current session ID
|
||||
|
||||
Returns:
|
||||
Recovery strategy result
|
||||
"""
|
||||
self.logger.warning(f"Method exhaustion on {platform}, implementing recovery strategy")
|
||||
|
||||
try:
|
||||
# Enable emergency mode
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
platform, "method_exhaustion"
|
||||
)
|
||||
|
||||
# Reset method cooldowns for emergency use
|
||||
self._reset_method_cooldowns(platform)
|
||||
|
||||
# Use safest method available
|
||||
emergency_methods = self.method_rotation_use_case.strategy_repo.get_emergency_methods(platform)
|
||||
|
||||
if emergency_methods:
|
||||
safest_method = emergency_methods[0]
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'emergency_recovery',
|
||||
'method': safest_method.method_name,
|
||||
'message': f'Recovered using emergency method: {safest_method.method_name}'
|
||||
}
|
||||
else:
|
||||
# No emergency methods available, fall back to original behavior
|
||||
return self._fallback_to_original_behavior(platform, {
|
||||
'error': 'method_exhaustion',
|
||||
'message': 'No emergency methods available'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to recover from method exhaustion: {e}")
|
||||
return self._fallback_to_original_behavior(platform, {'error': str(e)})
|
||||
|
||||
def _classify_error_severity(self, error_details: Dict[str, Any]) -> ErrorSeverity:
|
||||
"""Classify error severity based on error patterns"""
|
||||
error_message = error_details.get('message', '').lower()
|
||||
error_type = error_details.get('error_type', '').lower()
|
||||
|
||||
# Critical errors
|
||||
critical_patterns = [
|
||||
'system failure', 'database error', 'connection refused',
|
||||
'authentication failed', 'service unavailable'
|
||||
]
|
||||
|
||||
if any(pattern in error_message or pattern in error_type for pattern in critical_patterns):
|
||||
return ErrorSeverity.CRITICAL
|
||||
|
||||
# High severity errors
|
||||
high_patterns = [
|
||||
'account suspended', 'rate limit exceeded', 'quota exceeded',
|
||||
'blocked', 'banned', 'captcha failed multiple times'
|
||||
]
|
||||
|
||||
if any(pattern in error_message or pattern in error_type for pattern in high_patterns):
|
||||
return ErrorSeverity.HIGH
|
||||
|
||||
# Medium severity errors
|
||||
medium_patterns = [
|
||||
'timeout', 'verification failed', 'invalid credentials',
|
||||
'network error', 'temporary failure'
|
||||
]
|
||||
|
||||
if any(pattern in error_message or pattern in error_type for pattern in medium_patterns):
|
||||
return ErrorSeverity.MEDIUM
|
||||
|
||||
# Default to low severity
|
||||
return ErrorSeverity.LOW
|
||||
|
||||
def _handle_critical_error(self, platform: str, session_id: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle critical errors with immediate emergency mode activation"""
|
||||
self.logger.error(f"Critical error on {platform}: {error_details}")
|
||||
|
||||
# Enable emergency mode immediately
|
||||
self.method_rotation_use_case.enable_emergency_mode(
|
||||
platform, f"Critical error: {error_details.get('message', 'Unknown')}"
|
||||
)
|
||||
|
||||
# Archive current session
|
||||
self.method_rotation_use_case.session_repo.archive_session(session_id, False)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'emergency_mode',
|
||||
'severity': 'critical',
|
||||
'message': 'Critical error handled, emergency mode enabled'
|
||||
}
|
||||
|
||||
def _handle_high_severity_error(self, platform: str, session_id: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle high severity errors with method blocking and rotation"""
|
||||
error_type = error_details.get('error_type', 'unknown')
|
||||
|
||||
# Block the current method temporarily
|
||||
session = self.method_rotation_use_case.session_repo.find_by_id(session_id)
|
||||
if session:
|
||||
current_method = session.current_method
|
||||
|
||||
# Block method for extended period
|
||||
self.method_rotation_use_case.state_repo.block_method(
|
||||
platform, current_method, f"High severity error: {error_type}"
|
||||
)
|
||||
|
||||
# Attempt rotation to different method
|
||||
next_method = self.method_rotation_use_case.rotate_method(
|
||||
session_id, f"high_severity_error_{error_type}"
|
||||
)
|
||||
|
||||
if next_method:
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'method_rotation',
|
||||
'blocked_method': current_method,
|
||||
'new_method': next_method.method_name,
|
||||
'message': f'Rotated from {current_method} to {next_method.method_name}'
|
||||
}
|
||||
|
||||
# Check if emergency mode should be triggered
|
||||
if self.check_and_handle_emergency_conditions(platform):
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'emergency_mode_triggered',
|
||||
'message': 'Emergency conditions detected, emergency mode enabled'
|
||||
}
|
||||
|
||||
return {
|
||||
'success': False,
|
||||
'action': 'rotation_failed',
|
||||
'message': 'Could not rotate to alternative method'
|
||||
}
|
||||
|
||||
def _handle_medium_severity_error(self, platform: str, session_id: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle medium severity errors with conditional rotation"""
|
||||
# Attempt rotation if failure count is high
|
||||
session = self.method_rotation_use_case.session_repo.find_by_id(session_id)
|
||||
|
||||
if session and session.failure_count >= 2:
|
||||
next_method = self.method_rotation_use_case.rotate_method(
|
||||
session_id, f"medium_severity_error_{error_details.get('error_type', 'unknown')}"
|
||||
)
|
||||
|
||||
if next_method:
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'conditional_rotation',
|
||||
'new_method': next_method.method_name,
|
||||
'message': f'Rotated to {next_method.method_name} after {session.failure_count} failures'
|
||||
}
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'continue_current_method',
|
||||
'message': 'Continuing with current method, failure count below threshold'
|
||||
}
|
||||
|
||||
def _handle_low_severity_error(self, platform: str, session_id: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle low severity errors with minimal intervention"""
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'continue_current_method',
|
||||
'message': 'Low severity error, continuing with current method'
|
||||
}
|
||||
|
||||
def _fallback_to_original_behavior(self, platform: str,
|
||||
error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Fallback to original behavior when rotation system fails completely"""
|
||||
self.logger.warning(f"Falling back to original behavior for {platform}")
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'action': 'fallback_to_original',
|
||||
'method': 'email', # Default method
|
||||
'message': 'Rotation system disabled, using original behavior',
|
||||
'fallback_reason': error_details.get('message', 'Unknown error')
|
||||
}
|
||||
|
||||
def _disable_rotation_for_platform(self, platform: str, reason: str) -> None:
|
||||
"""Temporarily disable rotation for a specific platform"""
|
||||
try:
|
||||
# Block all methods except the safest one
|
||||
strategies = self.method_rotation_use_case.strategy_repo.find_active_by_platform(platform)
|
||||
|
||||
for strategy in strategies[1:]: # Keep the first (safest) method active
|
||||
self.method_rotation_use_case.strategy_repo.disable_method(
|
||||
platform, strategy.method_name, f"Platform disabled: {reason}"
|
||||
)
|
||||
|
||||
self.logger.info(f"Rotation disabled for {platform}: {reason}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to disable rotation for {platform}: {e}")
|
||||
|
||||
def _reset_method_cooldowns(self, platform: str) -> None:
|
||||
"""Reset all method cooldowns for emergency recovery"""
|
||||
try:
|
||||
strategies = self.method_rotation_use_case.strategy_repo.find_by_platform(platform)
|
||||
|
||||
for strategy in strategies:
|
||||
strategy.last_failure = None
|
||||
strategy.cooldown_period = 0
|
||||
self.method_rotation_use_case.strategy_repo.save(strategy)
|
||||
|
||||
self.logger.info(f"Method cooldowns reset for {platform}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to reset cooldowns for {platform}: {e}")
|
||||
|
||||
def _init_error_patterns(self) -> Dict[str, List[str]]:
|
||||
"""Initialize error classification patterns"""
|
||||
return {
|
||||
'rate_limit': [
|
||||
'rate limit', 'too many requests', 'quota exceeded',
|
||||
'zu viele anfragen', 'rate limiting', 'throttled'
|
||||
],
|
||||
'account_suspended': [
|
||||
'suspended', 'banned', 'blocked', 'gesperrt',
|
||||
'account disabled', 'violation', 'restricted'
|
||||
],
|
||||
'network_error': [
|
||||
'network error', 'connection failed', 'timeout',
|
||||
'netzwerkfehler', 'verbindung fehlgeschlagen', 'dns error'
|
||||
],
|
||||
'verification_failed': [
|
||||
'verification failed', 'captcha', 'human verification',
|
||||
'verifizierung fehlgeschlagen', 'bot detected'
|
||||
],
|
||||
'system_error': [
|
||||
'internal server error', 'service unavailable', 'maintenance',
|
||||
'server fehler', 'wartung', 'system down'
|
||||
]
|
||||
}
|
||||
|
||||
def _init_fallback_strategies(self) -> Dict[str, Callable]:
|
||||
"""Initialize fallback strategy functions"""
|
||||
return {
|
||||
'rate_limit': self._handle_rate_limit_fallback,
|
||||
'account_suspended': self._handle_suspension_fallback,
|
||||
'network_error': self._handle_network_fallback,
|
||||
'verification_failed': self._handle_verification_fallback,
|
||||
'system_error': self._handle_system_error_fallback
|
||||
}
|
||||
|
||||
def _handle_rate_limit_fallback(self, platform: str, error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle rate limiting with extended cooldowns"""
|
||||
# Extend cooldown periods for all methods
|
||||
strategies = self.method_rotation_use_case.strategy_repo.find_by_platform(platform)
|
||||
|
||||
for strategy in strategies:
|
||||
strategy.cooldown_period = max(strategy.cooldown_period * 2, 1800) # At least 30 minutes
|
||||
self.method_rotation_use_case.strategy_repo.save(strategy)
|
||||
|
||||
return {'action': 'extended_cooldown', 'cooldown_minutes': 30}
|
||||
|
||||
def _handle_suspension_fallback(self, platform: str, error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle account suspension with method blocking"""
|
||||
# Enable emergency mode with only safest methods
|
||||
self.method_rotation_use_case.enable_emergency_mode(platform, "account_suspension")
|
||||
return {'action': 'emergency_mode', 'reason': 'account_suspension'}
|
||||
|
||||
def _handle_network_fallback(self, platform: str, error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle network errors with retry strategy"""
|
||||
return {'action': 'retry_with_delay', 'delay_seconds': 60}
|
||||
|
||||
def _handle_verification_fallback(self, platform: str, error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle verification failures with method rotation"""
|
||||
return {'action': 'rotate_method', 'reason': 'verification_failed'}
|
||||
|
||||
def _handle_system_error_fallback(self, platform: str, error_details: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Handle system errors with graceful degradation"""
|
||||
self._disable_rotation_for_platform(platform, "system_error")
|
||||
return {'action': 'disable_rotation', 'reason': 'system_error'}
|
||||
46
controllers/platform_controllers/safe_imports.py
Normale Datei
46
controllers/platform_controllers/safe_imports.py
Normale Datei
@ -0,0 +1,46 @@
|
||||
"""
|
||||
Safe imports for platform controllers.
|
||||
Provides fallback when PyQt5 is not available during testing.
|
||||
"""
|
||||
|
||||
try:
|
||||
from PyQt5.QtCore import QObject, QThread, pyqtSignal
|
||||
PYQT5_AVAILABLE = True
|
||||
except ImportError:
|
||||
# Fallback for testing without PyQt5
|
||||
class QObject:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
class QThread(QObject):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.running = True
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
|
||||
def isRunning(self):
|
||||
return self.running
|
||||
|
||||
def quit(self):
|
||||
self.running = False
|
||||
|
||||
def wait(self):
|
||||
pass
|
||||
|
||||
def pyqtSignal(*args, **kwargs):
|
||||
"""Mock pyqtSignal for testing"""
|
||||
class MockSignal:
|
||||
def connect(self, func):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
return MockSignal()
|
||||
|
||||
PYQT5_AVAILABLE = False
|
||||
|
||||
__all__ = ['QObject', 'QThread', 'pyqtSignal', 'PYQT5_AVAILABLE']
|
||||
419
controllers/platform_controllers/tiktok_controller.py
Normale Datei
419
controllers/platform_controllers/tiktok_controller.py
Normale Datei
@ -0,0 +1,419 @@
|
||||
"""
|
||||
Controller für TikTok-spezifische Funktionalität.
|
||||
Mit TextSimilarity-Integration für robusteres UI-Element-Matching.
|
||||
"""
|
||||
|
||||
import time
|
||||
import random
|
||||
from PyQt5.QtCore import QThread, pyqtSignal, QObject
|
||||
from typing import Dict, Any
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
from views.tabs.accounts_tab import AccountsTab
|
||||
from views.tabs.settings_tab import SettingsTab
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
|
||||
from social_networks.tiktok.tiktok_automation import TikTokAutomation
|
||||
from utils.text_similarity import TextSimilarity
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("tiktok_controller")
|
||||
|
||||
# Legacy WorkerThread als Backup beibehalten
|
||||
class LegacyTikTokWorkerThread(QThread):
|
||||
"""Legacy Thread für die TikTok-Account-Erstellung (Backup)."""
|
||||
|
||||
# Signale
|
||||
update_signal = pyqtSignal(str)
|
||||
log_signal = pyqtSignal(str)
|
||||
progress_signal = pyqtSignal(int)
|
||||
finished_signal = pyqtSignal(dict)
|
||||
error_signal = pyqtSignal(str)
|
||||
|
||||
def __init__(self, params):
|
||||
super().__init__()
|
||||
self.params = params
|
||||
self.running = True
|
||||
|
||||
# TextSimilarity für robustes Fehler-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.7)
|
||||
|
||||
# Fehler-Patterns für robustes Fehler-Matching
|
||||
self.error_patterns = [
|
||||
"Fehler", "Error", "Fehlgeschlagen", "Failed", "Problem", "Issue",
|
||||
"Nicht möglich", "Not possible", "Bitte versuchen Sie es erneut",
|
||||
"Please try again", "Konnte nicht", "Could not", "Timeout"
|
||||
]
|
||||
|
||||
def run(self):
|
||||
"""Führt die Account-Erstellung aus."""
|
||||
try:
|
||||
self.log_signal.emit("TikTok-Account-Erstellung gestartet...")
|
||||
self.progress_signal.emit(10)
|
||||
|
||||
# TikTok-Automation initialisieren
|
||||
automation = TikTokAutomation(
|
||||
headless=self.params.get("headless", False),
|
||||
use_proxy=self.params.get("use_proxy", False),
|
||||
proxy_type=self.params.get("proxy_type"),
|
||||
save_screenshots=True,
|
||||
debug=self.params.get("debug", False),
|
||||
email_domain=self.params.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com")
|
||||
)
|
||||
|
||||
self.update_signal.emit("TikTok-Automation initialisiert")
|
||||
self.progress_signal.emit(20)
|
||||
|
||||
# Account registrieren
|
||||
self.log_signal.emit(f"Registriere Account für: {self.params['full_name']}")
|
||||
|
||||
# Account registrieren - immer mit Email
|
||||
result = automation.register_account(
|
||||
full_name=self.params["full_name"],
|
||||
age=self.params["age"],
|
||||
registration_method="email", # Immer Email-Registrierung
|
||||
phone_number=None, # Keine Telefonnummer
|
||||
**self.params.get("additional_params", {})
|
||||
)
|
||||
|
||||
self.progress_signal.emit(100)
|
||||
|
||||
if result["success"]:
|
||||
self.log_signal.emit("Account erfolgreich erstellt!")
|
||||
self.finished_signal.emit(result)
|
||||
else:
|
||||
# Robuste Fehlerbehandlung mit TextSimilarity
|
||||
error_msg = result.get("error", "Unbekannter Fehler")
|
||||
|
||||
# Versuche, Fehler nutzerfreundlicher zu interpretieren
|
||||
user_friendly_error = self._interpret_error(error_msg)
|
||||
|
||||
self.log_signal.emit(f"Fehler bei der Account-Erstellung: {user_friendly_error}")
|
||||
self.error_signal.emit(user_friendly_error)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler im Worker-Thread: {e}")
|
||||
self.log_signal.emit(f"Schwerwiegender Fehler: {str(e)}")
|
||||
self.error_signal.emit(str(e))
|
||||
self.progress_signal.emit(0)
|
||||
|
||||
def _interpret_error(self, error_msg: str) -> str:
|
||||
"""
|
||||
Interpretiert Fehlermeldungen und gibt eine benutzerfreundlichere Version zurück.
|
||||
Verwendet TextSimilarity für robusteres Fehler-Matching.
|
||||
|
||||
Args:
|
||||
error_msg: Die ursprüngliche Fehlermeldung
|
||||
|
||||
Returns:
|
||||
str: Benutzerfreundliche Fehlermeldung
|
||||
"""
|
||||
# Bekannte Fehlermuster und deren Interpretationen
|
||||
error_interpretations = {
|
||||
"captcha": "TikTok hat einen Captcha-Test angefordert. Versuchen Sie es später erneut oder nutzen Sie einen anderen Proxy.",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts. Bitte prüfen Sie die E-Mail-Einstellungen.",
|
||||
"proxy": "Problem mit der Proxy-Verbindung. Bitte prüfen Sie Ihre Proxy-Einstellungen.",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung. Bitte überprüfen Sie Ihre Internetverbindung.",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig.",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von TikTok.",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden. Bitte nutzen Sie eine andere E-Mail-Domain.",
|
||||
"phone": "Die Telefonnummer konnte nicht für die Registrierung verwendet werden.",
|
||||
"age": "Das eingegebene Alter erfüllt nicht die Anforderungen von TikTok.",
|
||||
"too_many_attempts": "Zu viele Registrierungsversuche. Bitte warten Sie und versuchen Sie es später erneut."
|
||||
}
|
||||
|
||||
# Versuche, den Fehler zu kategorisieren
|
||||
for pattern, interpretation in error_interpretations.items():
|
||||
for error_term in self.error_patterns:
|
||||
if (pattern in error_msg.lower() or
|
||||
self.text_similarity.is_similar(error_term, error_msg, threshold=0.7)):
|
||||
return interpretation
|
||||
|
||||
# Fallback: Originale Fehlermeldung zurückgeben
|
||||
return error_msg
|
||||
|
||||
def stop(self):
|
||||
"""Stoppt den Thread."""
|
||||
self.running = False
|
||||
self.terminate()
|
||||
|
||||
|
||||
# Neue Implementation mit BaseWorkerThread
|
||||
class TikTokWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Refaktorierte TikTok Worker Thread Implementation"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
super().__init__(params, "TikTok", session_controller, generator_tab)
|
||||
|
||||
def get_automation_class(self):
|
||||
from social_networks.tiktok.tiktok_automation import TikTokAutomation
|
||||
return TikTokAutomation
|
||||
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
return {
|
||||
"captcha": "TikTok hat einen Captcha-Test angefordert. Versuchen Sie es später erneut oder nutzen Sie einen anderen Proxy.",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts. Bitte prüfen Sie die E-Mail-Einstellungen.",
|
||||
"proxy": "Problem mit der Proxy-Verbindung. Bitte prüfen Sie Ihre Proxy-Einstellungen.",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung. Bitte überprüfen Sie Ihre Internetverbindung.",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig.",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von TikTok.",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden. Bitte nutzen Sie eine andere E-Mail-Domain.",
|
||||
"phone": "Die Telefonnummer konnte nicht für die Registrierung verwendet werden.",
|
||||
"phone number required": "Telefonnummer erforderlich",
|
||||
"invalid code": "Ungültiger Verifizierungscode",
|
||||
"age": "Das eingegebene Alter erfüllt nicht die Anforderungen von TikTok.",
|
||||
"too_many_attempts": "Zu viele Registrierungsversuche. Bitte warten Sie und versuchen Sie es später erneut.",
|
||||
"rate limit": "Zu viele Versuche - bitte später erneut versuchen",
|
||||
"already taken": "Der gewählte Benutzername ist bereits vergeben",
|
||||
"weak password": "Das Passwort ist zu schwach",
|
||||
"network error": "Netzwerkfehler - bitte Internetverbindung prüfen"
|
||||
}
|
||||
|
||||
class TikTokController(BasePlatformController):
|
||||
"""Controller für TikTok-spezifische Funktionalität."""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager=None):
|
||||
super().__init__("TikTok", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
self.worker_thread = None
|
||||
|
||||
# TextSimilarity für robustes UI-Element-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.75)
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""Erstellt den TikTok-Generator-Tab."""
|
||||
generator_tab = GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
# TikTok-spezifische Anpassungen
|
||||
# Diese Methode überschreiben, wenn spezifische Anpassungen benötigt werden
|
||||
|
||||
# Signale verbinden
|
||||
generator_tab.start_requested.connect(self.start_account_creation)
|
||||
generator_tab.stop_requested.connect(self.stop_account_creation)
|
||||
|
||||
return generator_tab
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die TikTok-Account-Erstellung."""
|
||||
super().start_account_creation(params)
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
parent_widget = generator_tab.window() # Hauptfenster als Parent
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "TikTok")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint VOR Account-Erstellung generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
|
||||
# Generiere einen neuen Fingerprint für diesen Account
|
||||
fingerprint = fingerprint_service.generate_fingerprint()
|
||||
|
||||
# Das ist bereits ein BrowserFingerprint-Objekt, kein Dict!
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
# Konvertiere zu Dictionary für Übertragung
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
|
||||
logger.info(f"Fingerprint für neue Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
# Fortfahren ohne Fingerprint - wird später generiert
|
||||
|
||||
# Worker-Thread starten mit optionalen Parametern
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
self.worker_thread = TikTokWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
# Updates an Forge-Dialog weiterleiten
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab für Backup
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
self.worker_thread.start()
|
||||
|
||||
# Dialog anzeigen und Animation starten
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die TikTok-Account-Erstellung."""
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.add_log("Account-Erstellung wurde abgebrochen")
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Forge-Dialog schließen falls vorhanden
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
def handle_account_created(self, result):
|
||||
"""Verarbeitet erfolgreich erstellte Accounts mit Clean Architecture."""
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
# Account-Daten aus dem Ergebnis holen
|
||||
account_data = result.get("account_data", {})
|
||||
|
||||
# Account und Session über SessionController speichern (Clean Architecture)
|
||||
if hasattr(self, 'session_controller') and self.session_controller:
|
||||
try:
|
||||
session_data = result.get("session_data", {})
|
||||
save_result = self.session_controller.create_and_save_account(
|
||||
platform=self.platform_name,
|
||||
account_data=account_data
|
||||
)
|
||||
|
||||
if save_result.get('success'):
|
||||
logger.info(f"Account und Session erfolgreich gespeichert")
|
||||
|
||||
# Erfolgsmeldung anzeigen (nur einmal!)
|
||||
account_info = save_result.get('account_data', {})
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
QMessageBox.information(
|
||||
generator_tab,
|
||||
"Erfolg",
|
||||
f"Account erfolgreich erstellt!\n\n"
|
||||
f"Benutzername: {account_info.get('username', '')}\n"
|
||||
f"Passwort: {account_info.get('password', '')}\n"
|
||||
f"E-Mail/Telefon: {account_info.get('email') or account_info.get('phone', '')}"
|
||||
)
|
||||
|
||||
# Signal senden, um zur Hauptseite zurückzukehren
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
else:
|
||||
error_msg = save_result.get('message', 'Unbekannter Fehler')
|
||||
logger.error(f"Fehler beim Speichern: {error_msg}")
|
||||
from views.widgets.modern_message_box import show_error
|
||||
show_error(
|
||||
generator_tab,
|
||||
"Fehler beim Speichern",
|
||||
f"Beim Speichern des Accounts ist ein Fehler aufgetreten:\n\n{error_msg}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern des Accounts: {e}")
|
||||
from views.widgets.modern_message_box import show_critical
|
||||
show_critical(
|
||||
generator_tab,
|
||||
"Unerwarteter Fehler",
|
||||
f"Ein unerwarteter Fehler ist beim Speichern des Accounts aufgetreten:\n\n{str(e)}"
|
||||
)
|
||||
else:
|
||||
# Fallback: Alte Methode falls SessionController nicht verfügbar
|
||||
logger.warning("SessionController nicht verfügbar, verwende alte Methode")
|
||||
generator_tab.account_created.emit(self.platform_name, account_data)
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
|
||||
# save_account_to_db wurde entfernt - Accounts werden jetzt über SessionController gespeichert
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
"""
|
||||
Validiert die Eingaben für die Account-Erstellung.
|
||||
Verwendet TextSimilarity für robustere Validierung.
|
||||
"""
|
||||
# Basis-Validierungen von BasePlatformController verwenden
|
||||
valid, error_msg = super().validate_inputs(inputs)
|
||||
if not valid:
|
||||
return valid, error_msg
|
||||
|
||||
# TikTok-spezifische Validierungen
|
||||
age = inputs.get("age", 0)
|
||||
if age < 13:
|
||||
return False, "Das Alter muss mindestens 13 sein (TikTok-Anforderung)."
|
||||
|
||||
# E-Mail-Domain-Validierung (immer Email-Registrierung)
|
||||
email_domain = inputs.get("email_domain", "")
|
||||
# Blacklist von bekannten problematischen Domains
|
||||
blacklisted_domains = ["temp-mail.org", "guerrillamail.com", "maildrop.cc"]
|
||||
|
||||
# Prüfe mit TextSimilarity auf Ähnlichkeit mit Blacklist
|
||||
for domain in blacklisted_domains:
|
||||
if self.text_similarity.is_similar(email_domain, domain, threshold=0.8):
|
||||
return False, f"Die E-Mail-Domain '{email_domain}' kann problematisch für die TikTok-Registrierung sein. Bitte verwenden Sie eine andere Domain."
|
||||
|
||||
return True, ""
|
||||
|
||||
def get_form_field_label(self, field_type: str) -> str:
|
||||
"""
|
||||
Gibt einen Label-Text für ein Formularfeld basierend auf dem Feldtyp zurück.
|
||||
|
||||
Args:
|
||||
field_type: Typ des Formularfelds
|
||||
|
||||
Returns:
|
||||
str: Label-Text für das Formularfeld
|
||||
"""
|
||||
# Mapping von Feldtypen zu Labels
|
||||
field_labels = {
|
||||
"full_name": "Vollständiger Name",
|
||||
"username": "Benutzername",
|
||||
"password": "Passwort",
|
||||
"email": "E-Mail-Adresse",
|
||||
"phone": "Telefonnummer",
|
||||
"age": "Alter",
|
||||
"birthday": "Geburtsdatum"
|
||||
}
|
||||
|
||||
return field_labels.get(field_type, field_type.capitalize())
|
||||
|
||||
def _handle_error(self, error_msg: str):
|
||||
"""Behandelt Fehler während der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Fehler anzeigen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.show_error(error_msg)
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_finished(self, result: dict):
|
||||
"""Behandelt das Ende der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Normale Verarbeitung
|
||||
self.handle_account_created(result)
|
||||
159
controllers/platform_controllers/vk_controller.py
Normale Datei
159
controllers/platform_controllers/vk_controller.py
Normale Datei
@ -0,0 +1,159 @@
|
||||
"""
|
||||
Controller für VK-spezifische Funktionalität
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from social_networks.vk.vk_automation import VKAutomation
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("vk_controller")
|
||||
|
||||
class VKWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Worker-Thread für VK-Account-Erstellung"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
super().__init__(params, "VK", session_controller, generator_tab)
|
||||
|
||||
def get_automation_class(self):
|
||||
"""Gibt die VK-Automation-Klasse zurück"""
|
||||
return VKAutomation
|
||||
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
"""VK-spezifische Fehlerinterpretationen"""
|
||||
return {
|
||||
"phone": "Diese Telefonnummer wird bereits verwendet oder ist ungültig.",
|
||||
"code": "Der Verifizierungscode ist ungültig.",
|
||||
"blocked": "Zu viele Versuche. Bitte versuchen Sie es später erneut.",
|
||||
"captcha": "Bitte lösen Sie das Captcha."
|
||||
}
|
||||
|
||||
|
||||
class VKController(BasePlatformController):
|
||||
"""Controller für VK-Funktionalität"""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager, theme_manager=None):
|
||||
super().__init__("vk", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
logger.info("VK Controller initialisiert")
|
||||
|
||||
def get_worker_thread_class(self):
|
||||
"""Gibt die Worker-Thread-Klasse für VK zurück"""
|
||||
return VKWorkerThread
|
||||
|
||||
def get_platform_display_name(self) -> str:
|
||||
"""Gibt den Anzeigenamen der Plattform zurück"""
|
||||
return "VK"
|
||||
|
||||
def validate_account_data(self, account_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Validiert die Account-Daten für VK"""
|
||||
errors = []
|
||||
|
||||
# Pflichtfelder prüfen
|
||||
if not account_data.get("first_name"):
|
||||
errors.append("Vorname ist erforderlich")
|
||||
|
||||
if not account_data.get("last_name"):
|
||||
errors.append("Nachname ist erforderlich")
|
||||
|
||||
if not account_data.get("phone"):
|
||||
errors.append("Telefonnummer ist für VK erforderlich")
|
||||
|
||||
if errors:
|
||||
return {
|
||||
"valid": False,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"errors": []
|
||||
}
|
||||
|
||||
def get_default_settings(self) -> Dict[str, Any]:
|
||||
"""Gibt die Standard-Einstellungen für VK zurück"""
|
||||
settings = super().get_default_settings()
|
||||
settings.update({
|
||||
"require_phone": True,
|
||||
"require_email": False,
|
||||
"default_country_code": "+7", # Russland
|
||||
"supported_languages": ["ru", "en", "de"],
|
||||
"default_language": "ru"
|
||||
})
|
||||
return settings
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die VK-Account-Erstellung."""
|
||||
logger.info(f"Starte VK Account-Erstellung mit Parametern: {params}")
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
parent_widget = generator_tab.window()
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "VK")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
fingerprint_data = fingerprint_service.generate_fingerprint()
|
||||
|
||||
fingerprint = BrowserFingerprint.from_dict(fingerprint_data)
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
logger.info(f"Fingerprint für VK Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
|
||||
# Worker-Thread starten
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
self.worker_thread = VKWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
|
||||
# Signals verbinden
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
self.worker_thread.start()
|
||||
|
||||
# Dialog anzeigen
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
417
controllers/platform_controllers/x_controller.py
Normale Datei
417
controllers/platform_controllers/x_controller.py
Normale Datei
@ -0,0 +1,417 @@
|
||||
"""
|
||||
Controller für X (Twitter)-spezifische Funktionalität.
|
||||
Mit TextSimilarity-Integration für robusteres UI-Element-Matching.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import random
|
||||
from PyQt5.QtCore import QThread, pyqtSignal, QObject
|
||||
from typing import Dict, Any, Tuple
|
||||
|
||||
from controllers.platform_controllers.base_controller import BasePlatformController
|
||||
from controllers.platform_controllers.base_worker_thread import BaseAccountCreationWorkerThread
|
||||
from views.tabs.generator_tab import GeneratorTab
|
||||
from views.tabs.accounts_tab import AccountsTab
|
||||
from views.tabs.settings_tab import SettingsTab
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
|
||||
from social_networks.x.x_automation import XAutomation
|
||||
from utils.text_similarity import TextSimilarity
|
||||
from utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger("x_controller")
|
||||
|
||||
# Legacy WorkerThread als Backup beibehalten
|
||||
class LegacyXWorkerThread(QThread):
|
||||
"""Legacy Thread für die X-Account-Erstellung (Backup)."""
|
||||
|
||||
# Signale
|
||||
update_signal = pyqtSignal(str)
|
||||
log_signal = pyqtSignal(str)
|
||||
progress_signal = pyqtSignal(int)
|
||||
finished_signal = pyqtSignal(dict)
|
||||
error_signal = pyqtSignal(str)
|
||||
|
||||
def __init__(self, params):
|
||||
super().__init__()
|
||||
self.params = params
|
||||
self.running = True
|
||||
|
||||
# TextSimilarity für robustes Fehler-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.7)
|
||||
|
||||
# Fehler-Patterns für robustes Fehler-Matching
|
||||
self.error_patterns = [
|
||||
"Fehler", "Error", "Fehlgeschlagen", "Failed", "Problem", "Issue",
|
||||
"Nicht möglich", "Not possible", "Bitte versuchen Sie es erneut",
|
||||
"Please try again", "Konnte nicht", "Could not", "Timeout"
|
||||
]
|
||||
|
||||
def run(self):
|
||||
"""Führt die Account-Erstellung aus."""
|
||||
try:
|
||||
self.log_signal.emit("X-Account-Erstellung gestartet...")
|
||||
self.progress_signal.emit(10)
|
||||
|
||||
# X-Automation initialisieren
|
||||
automation = XAutomation(
|
||||
headless=self.params.get("headless", False),
|
||||
use_proxy=self.params.get("use_proxy", False),
|
||||
proxy_type=self.params.get("proxy_type"),
|
||||
save_screenshots=True,
|
||||
debug=self.params.get("debug", False),
|
||||
email_domain=self.params.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com")
|
||||
)
|
||||
|
||||
self.update_signal.emit("X-Automation initialisiert")
|
||||
self.progress_signal.emit(20)
|
||||
|
||||
# Account registrieren
|
||||
self.log_signal.emit(f"Registriere Account für: {self.params['full_name']}")
|
||||
|
||||
# Account registrieren - immer mit Email
|
||||
result = automation.register_account(
|
||||
full_name=self.params["full_name"],
|
||||
age=self.params["age"],
|
||||
registration_method="email", # Immer Email-Registrierung
|
||||
phone_number=None, # Keine Telefonnummer
|
||||
**self.params.get("additional_params", {})
|
||||
)
|
||||
|
||||
self.progress_signal.emit(100)
|
||||
|
||||
if result["success"]:
|
||||
self.log_signal.emit("Account erfolgreich erstellt!")
|
||||
self.finished_signal.emit(result)
|
||||
else:
|
||||
# Robuste Fehlerbehandlung mit TextSimilarity
|
||||
error_msg = result.get("error", "Unbekannter Fehler")
|
||||
|
||||
# Versuche, Fehler nutzerfreundlicher zu interpretieren
|
||||
user_friendly_error = self._interpret_error(error_msg)
|
||||
|
||||
self.log_signal.emit(f"Fehler bei der Account-Erstellung: {user_friendly_error}")
|
||||
self.error_signal.emit(user_friendly_error)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler im Worker-Thread: {e}")
|
||||
self.log_signal.emit(f"Schwerwiegender Fehler: {str(e)}")
|
||||
self.error_signal.emit(str(e))
|
||||
self.progress_signal.emit(0)
|
||||
|
||||
def _interpret_error(self, error_msg: str) -> str:
|
||||
"""
|
||||
Interpretiert Fehlermeldungen und gibt eine benutzerfreundlichere Version zurück.
|
||||
Verwendet TextSimilarity für robusteres Fehler-Matching.
|
||||
|
||||
Args:
|
||||
error_msg: Die ursprüngliche Fehlermeldung
|
||||
|
||||
Returns:
|
||||
str: Benutzerfreundliche Fehlermeldung
|
||||
"""
|
||||
# Bekannte Fehlermuster und deren Interpretationen
|
||||
error_interpretations = {
|
||||
"captcha": "X hat einen Captcha-Test angefordert. Versuchen Sie es später erneut oder nutzen Sie einen anderen Proxy.",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts. Bitte prüfen Sie die E-Mail-Einstellungen.",
|
||||
"proxy": "Problem mit der Proxy-Verbindung. Bitte prüfen Sie Ihre Proxy-Einstellungen.",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung. Bitte überprüfen Sie Ihre Internetverbindung.",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig.",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von X.",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden. Bitte nutzen Sie eine andere E-Mail-Domain.",
|
||||
"phone": "Die Telefonnummer konnte nicht für die Registrierung verwendet werden.",
|
||||
"rate limit": "Zu viele Anfragen. Bitte warten Sie einige Minuten und versuchen Sie es erneut.",
|
||||
"suspended": "Account wurde gesperrt. Möglicherweise wurden Sicherheitsrichtlinien verletzt."
|
||||
}
|
||||
|
||||
# Versuche, den Fehler zu kategorisieren
|
||||
for pattern, interpretation in error_interpretations.items():
|
||||
for error_term in self.error_patterns:
|
||||
if (pattern in error_msg.lower() or
|
||||
self.text_similarity.is_similar(error_term, error_msg, threshold=0.7)):
|
||||
return interpretation
|
||||
|
||||
# Fallback: Originale Fehlermeldung zurückgeben
|
||||
return error_msg
|
||||
|
||||
def stop(self):
|
||||
"""Stoppt den Thread."""
|
||||
self.running = False
|
||||
self.terminate()
|
||||
|
||||
|
||||
# Neue Implementation mit BaseWorkerThread
|
||||
class XWorkerThread(BaseAccountCreationWorkerThread):
|
||||
"""Refaktorierte X Worker Thread Implementation"""
|
||||
|
||||
def __init__(self, params, session_controller=None, generator_tab=None):
|
||||
super().__init__(params, "X", session_controller, generator_tab)
|
||||
|
||||
def get_automation_class(self):
|
||||
from social_networks.x.x_automation import XAutomation
|
||||
return XAutomation
|
||||
|
||||
def get_error_interpretations(self) -> Dict[str, str]:
|
||||
return {
|
||||
"already taken": "Dieser Benutzername ist bereits vergeben",
|
||||
"weak password": "Das Passwort ist zu schwach",
|
||||
"rate limit": "Zu viele Versuche - bitte später erneut versuchen",
|
||||
"network error": "Netzwerkfehler - bitte Internetverbindung prüfen",
|
||||
"captcha": "Captcha-Verifizierung erforderlich",
|
||||
"verification": "Es gab ein Problem mit der Verifizierung des Accounts",
|
||||
"proxy": "Problem mit der Proxy-Verbindung",
|
||||
"timeout": "Zeitüberschreitung bei der Verbindung",
|
||||
"username": "Der gewählte Benutzername ist bereits vergeben oder nicht zulässig",
|
||||
"password": "Das Passwort erfüllt nicht die Anforderungen von X",
|
||||
"email": "Die E-Mail-Adresse konnte nicht verwendet werden",
|
||||
"suspended": "Account wurde gesperrt"
|
||||
}
|
||||
|
||||
class XController(BasePlatformController):
|
||||
"""Controller für X (Twitter)-spezifische Funktionalität."""
|
||||
|
||||
def __init__(self, db_manager, proxy_rotator, email_handler, language_manager=None):
|
||||
super().__init__("X", db_manager, proxy_rotator, email_handler, language_manager)
|
||||
self.worker_thread = None
|
||||
|
||||
# TextSimilarity für robustes UI-Element-Matching
|
||||
self.text_similarity = TextSimilarity(default_threshold=0.75)
|
||||
|
||||
def create_generator_tab(self):
|
||||
"""Erstellt den X-Generator-Tab."""
|
||||
generator_tab = GeneratorTab(self.platform_name, self.language_manager)
|
||||
|
||||
# X-spezifische Anpassungen
|
||||
# Diese Methode überschreiben, wenn spezifische Anpassungen benötigt werden
|
||||
|
||||
# Signale verbinden
|
||||
generator_tab.start_requested.connect(self.start_account_creation)
|
||||
generator_tab.stop_requested.connect(self.stop_account_creation)
|
||||
|
||||
return generator_tab
|
||||
|
||||
def start_account_creation(self, params):
|
||||
"""Startet die X-Account-Erstellung."""
|
||||
super().start_account_creation(params)
|
||||
|
||||
# Validiere Eingaben
|
||||
is_valid, error_msg = self.validate_inputs(params)
|
||||
if not is_valid:
|
||||
self.get_generator_tab().show_error(error_msg)
|
||||
return
|
||||
|
||||
# UI aktualisieren
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(True)
|
||||
generator_tab.clear_log()
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Schmiedeanimation-Dialog erstellen und anzeigen
|
||||
parent_widget = generator_tab.window() # Hauptfenster als Parent
|
||||
self.forge_dialog = ForgeAnimationDialog(parent_widget, "X")
|
||||
self.forge_dialog.cancel_clicked.connect(self.stop_account_creation)
|
||||
self.forge_dialog.closed.connect(self.stop_account_creation)
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if parent_widget:
|
||||
window_pos = parent_widget.pos()
|
||||
params["window_position"] = (window_pos.x(), window_pos.y())
|
||||
|
||||
# Fingerprint VOR Account-Erstellung generieren
|
||||
try:
|
||||
from infrastructure.services.fingerprint.fingerprint_generator_service import FingerprintGeneratorService
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
import uuid
|
||||
|
||||
fingerprint_service = FingerprintGeneratorService()
|
||||
|
||||
# Generiere einen neuen Fingerprint für diesen Account
|
||||
fingerprint_data = fingerprint_service.generate_fingerprint()
|
||||
|
||||
# Erstelle BrowserFingerprint Entity mit allen notwendigen Daten
|
||||
fingerprint = BrowserFingerprint.from_dict(fingerprint_data)
|
||||
fingerprint.fingerprint_id = str(uuid.uuid4())
|
||||
fingerprint.account_bound = True
|
||||
fingerprint.rotation_seed = str(uuid.uuid4())
|
||||
|
||||
# Konvertiere zu Dictionary für Übertragung
|
||||
params["fingerprint"] = fingerprint.to_dict()
|
||||
|
||||
logger.info(f"Fingerprint für neue Account-Erstellung generiert: {fingerprint.fingerprint_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Generieren des Fingerprints: {e}")
|
||||
# Fortfahren ohne Fingerprint - wird später generiert
|
||||
|
||||
# Worker-Thread starten mit optionalen Parametern
|
||||
session_controller = getattr(self, 'session_controller', None)
|
||||
generator_tab_ref = generator_tab if hasattr(generator_tab, 'store_created_account') else None
|
||||
|
||||
self.worker_thread = XWorkerThread(
|
||||
params,
|
||||
session_controller=session_controller,
|
||||
generator_tab=generator_tab_ref
|
||||
)
|
||||
# Updates an Forge-Dialog weiterleiten
|
||||
self.worker_thread.update_signal.connect(self.forge_dialog.set_status)
|
||||
self.worker_thread.log_signal.connect(self.forge_dialog.add_log)
|
||||
self.worker_thread.error_signal.connect(self._handle_error)
|
||||
self.worker_thread.finished_signal.connect(self._handle_finished)
|
||||
self.worker_thread.progress_signal.connect(self.forge_dialog.set_progress)
|
||||
|
||||
# Auch an Generator-Tab für Backup
|
||||
self.worker_thread.log_signal.connect(lambda msg: generator_tab.add_log(msg))
|
||||
self.worker_thread.progress_signal.connect(lambda value: generator_tab.set_progress(value))
|
||||
|
||||
self.worker_thread.start()
|
||||
|
||||
# Dialog anzeigen und Animation starten
|
||||
self.forge_dialog.start_animation()
|
||||
self.forge_dialog.show()
|
||||
|
||||
def stop_account_creation(self):
|
||||
"""Stoppt die X-Account-Erstellung."""
|
||||
if self.worker_thread and self.worker_thread.isRunning():
|
||||
self.worker_thread.stop()
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.add_log("Account-Erstellung wurde abgebrochen")
|
||||
generator_tab.set_running(False)
|
||||
generator_tab.set_progress(0)
|
||||
|
||||
# Forge-Dialog schließen falls vorhanden
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
def handle_account_created(self, result):
|
||||
"""Verarbeitet erfolgreich erstellte Accounts mit Clean Architecture."""
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.set_running(False)
|
||||
|
||||
# Account-Daten aus dem Ergebnis holen
|
||||
account_data = result.get("account_data", {})
|
||||
|
||||
# Account und Session über SessionController speichern (Clean Architecture)
|
||||
if hasattr(self, 'session_controller') and self.session_controller:
|
||||
try:
|
||||
session_data = result.get("session_data", {})
|
||||
save_result = self.session_controller.create_and_save_account(
|
||||
platform=self.platform_name,
|
||||
account_data=account_data
|
||||
)
|
||||
|
||||
if save_result.get('success'):
|
||||
logger.info(f"Account und Session erfolgreich gespeichert")
|
||||
|
||||
# Erfolgsmeldung anzeigen (nur einmal!)
|
||||
account_info = save_result.get('account_data', {})
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
QMessageBox.information(
|
||||
generator_tab,
|
||||
"Erfolg",
|
||||
f"Account erfolgreich erstellt!\n\n"
|
||||
f"Benutzername: {account_info.get('username', '')}\n"
|
||||
f"Passwort: {account_info.get('password', '')}\n"
|
||||
f"E-Mail/Telefon: {account_info.get('email') or account_info.get('phone', '')}"
|
||||
)
|
||||
|
||||
# Signal senden, um zur Hauptseite zurückzukehren
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
else:
|
||||
error_msg = save_result.get('message', 'Unbekannter Fehler')
|
||||
logger.error(f"Fehler beim Speichern: {error_msg}")
|
||||
from views.widgets.modern_message_box import show_error
|
||||
show_error(
|
||||
generator_tab,
|
||||
"Fehler beim Speichern",
|
||||
f"Beim Speichern des Accounts ist ein Fehler aufgetreten:\n\n{error_msg}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern des Accounts: {e}")
|
||||
from views.widgets.modern_message_box import show_critical
|
||||
show_critical(
|
||||
generator_tab,
|
||||
"Unerwarteter Fehler",
|
||||
f"Ein unerwarteter Fehler ist beim Speichern des Accounts aufgetreten:\n\n{str(e)}"
|
||||
)
|
||||
else:
|
||||
# Fallback: Alte Methode falls SessionController nicht verfügbar
|
||||
logger.warning("SessionController nicht verfügbar, verwende alte Methode")
|
||||
generator_tab.account_created.emit(self.platform_name, account_data)
|
||||
if hasattr(self, 'return_to_main_requested') and callable(self.return_to_main_requested):
|
||||
self.return_to_main_requested()
|
||||
|
||||
# save_account_to_db wurde entfernt - Accounts werden jetzt über SessionController gespeichert
|
||||
|
||||
def validate_inputs(self, inputs):
|
||||
"""
|
||||
Validiert die Eingaben für die Account-Erstellung.
|
||||
Verwendet TextSimilarity für robustere Validierung.
|
||||
"""
|
||||
# Basis-Validierungen von BasePlatformController verwenden
|
||||
valid, error_msg = super().validate_inputs(inputs)
|
||||
if not valid:
|
||||
return valid, error_msg
|
||||
|
||||
# X-spezifische Validierungen
|
||||
age = inputs.get("age", 0)
|
||||
if age < 13:
|
||||
return False, "Das Alter muss mindestens 13 sein (X-Anforderung)."
|
||||
|
||||
# E-Mail-Domain-Validierung (immer Email-Registrierung)
|
||||
email_domain = inputs.get("email_domain", "")
|
||||
# Blacklist von bekannten problematischen Domains
|
||||
blacklisted_domains = ["temp-mail.org", "guerrillamail.com", "maildrop.cc"]
|
||||
|
||||
# Prüfe mit TextSimilarity auf Ähnlichkeit mit Blacklist
|
||||
for domain in blacklisted_domains:
|
||||
if self.text_similarity.is_similar(email_domain, domain, threshold=0.8):
|
||||
return False, f"Die E-Mail-Domain '{email_domain}' kann problematisch für die X-Registrierung sein. Bitte verwenden Sie eine andere Domain."
|
||||
|
||||
return True, ""
|
||||
|
||||
def _handle_error(self, error_msg: str):
|
||||
"""Behandelt Fehler während der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Fehler anzeigen
|
||||
generator_tab = self.get_generator_tab()
|
||||
generator_tab.show_error(error_msg)
|
||||
generator_tab.set_running(False)
|
||||
|
||||
def _handle_finished(self, result: dict):
|
||||
"""Behandelt das Ende der Account-Erstellung"""
|
||||
# Forge-Dialog schließen
|
||||
if hasattr(self, 'forge_dialog') and self.forge_dialog:
|
||||
self.forge_dialog.close()
|
||||
self.forge_dialog = None
|
||||
|
||||
# Normale Verarbeitung
|
||||
self.handle_account_created(result)
|
||||
|
||||
def get_form_field_label(self, field_type: str) -> str:
|
||||
"""
|
||||
Gibt einen Label-Text für ein Formularfeld basierend auf dem Feldtyp zurück.
|
||||
|
||||
Args:
|
||||
field_type: Typ des Formularfelds
|
||||
|
||||
Returns:
|
||||
str: Label-Text für das Formularfeld
|
||||
"""
|
||||
# Mapping von Feldtypen zu Labels
|
||||
field_labels = {
|
||||
"full_name": "Vollständiger Name",
|
||||
"username": "Benutzername",
|
||||
"password": "Passwort",
|
||||
"email": "E-Mail-Adresse",
|
||||
"phone": "Telefonnummer",
|
||||
"age": "Alter",
|
||||
"birthday": "Geburtsdatum"
|
||||
}
|
||||
|
||||
return field_labels.get(field_type, field_type.capitalize())
|
||||
321
controllers/session_controller.py
Normale Datei
321
controllers/session_controller.py
Normale Datei
@ -0,0 +1,321 @@
|
||||
"""
|
||||
Session Controller - Verwaltet Browser-Sessions und Ein-Klick-Login
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, List
|
||||
from PyQt5.QtCore import QObject, pyqtSignal, QTimer
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
|
||||
from application.use_cases.one_click_login_use_case import OneClickLoginUseCase
|
||||
from infrastructure.repositories.fingerprint_repository import FingerprintRepository
|
||||
from infrastructure.repositories.account_repository import AccountRepository
|
||||
|
||||
logger = logging.getLogger("session_controller")
|
||||
|
||||
|
||||
class SessionController(QObject):
|
||||
"""Controller für Ein-Klick-Login (ohne Session-Speicherung)"""
|
||||
|
||||
# Signale
|
||||
login_started = pyqtSignal(str) # account_id
|
||||
login_successful = pyqtSignal(str, dict) # account_id, login_data
|
||||
login_failed = pyqtSignal(str, str) # account_id, error_message
|
||||
|
||||
def __init__(self, db_manager):
|
||||
super().__init__()
|
||||
self.db_manager = db_manager
|
||||
|
||||
# Repositories initialisieren
|
||||
self.fingerprint_repository = FingerprintRepository(db_manager.db_path)
|
||||
self.account_repository = AccountRepository(db_manager.db_path)
|
||||
|
||||
# Import Fingerprint Generator Use Case
|
||||
from application.use_cases.generate_account_fingerprint_use_case import GenerateAccountFingerprintUseCase
|
||||
self.fingerprint_generator = GenerateAccountFingerprintUseCase(db_manager)
|
||||
|
||||
# Use Cases initialisieren
|
||||
self.one_click_login_use_case = OneClickLoginUseCase(
|
||||
self.fingerprint_repository,
|
||||
self.account_repository
|
||||
)
|
||||
|
||||
def perform_one_click_login(self, account_data: Dict[str, Any]):
|
||||
"""
|
||||
Führt Ein-Klick-Login für einen Account durch.
|
||||
|
||||
Args:
|
||||
account_data: Dict mit Account-Daten inkl. id, platform, username, etc.
|
||||
"""
|
||||
account_id = str(account_data.get("id", ""))
|
||||
platform = account_data.get("platform", "")
|
||||
username = account_data.get("username", "")
|
||||
logger.info(f"Ein-Klick-Login für Account {username} (ID: {account_id}) auf {platform}")
|
||||
self.login_started.emit(account_id)
|
||||
|
||||
try:
|
||||
# Stelle sicher, dass Account einen Fingerprint hat
|
||||
fingerprint_id = account_data.get("fingerprint_id")
|
||||
if not fingerprint_id:
|
||||
logger.info(f"Generiere Fingerprint für Account {account_id}")
|
||||
fingerprint_id = self.fingerprint_generator.execute(int(account_id))
|
||||
if not fingerprint_id:
|
||||
raise Exception("Konnte keinen Fingerprint generieren")
|
||||
|
||||
# Session-basierter Login deaktiviert - führe immer normalen Login durch
|
||||
logger.info(f"Starte normalen Login für Account {account_id} (Session-Login deaktiviert)")
|
||||
|
||||
# Zeige Login-Dialog an bevor der Login startet
|
||||
from views.widgets.forge_animation_widget import ForgeAnimationDialog
|
||||
from PyQt5.QtWidgets import QApplication
|
||||
|
||||
# Hole das Hauptfenster als Parent
|
||||
main_window = None
|
||||
for widget in QApplication.topLevelWidgets():
|
||||
if widget.objectName() == "AccountForgerMainWindow":
|
||||
main_window = widget
|
||||
break
|
||||
|
||||
self.login_dialog = ForgeAnimationDialog(main_window, platform, is_login=True)
|
||||
self.login_dialog.cancel_clicked.connect(lambda: self._cancel_login(account_id))
|
||||
self.login_dialog.closed.connect(lambda: self._cancel_login(account_id))
|
||||
|
||||
# Dialog anzeigen
|
||||
self.login_dialog.start_animation()
|
||||
self.login_dialog.show()
|
||||
|
||||
# Account-Daten direkt aus DB laden
|
||||
account = self.account_repository.get_by_id(int(account_id))
|
||||
if account:
|
||||
account_login_data = {
|
||||
'username': account.get('username'),
|
||||
'password': account.get('password'),
|
||||
'platform': account.get('platform'),
|
||||
'fingerprint_id': account.get('fingerprint_id')
|
||||
}
|
||||
|
||||
# Fensterposition vom Hauptfenster holen
|
||||
if main_window:
|
||||
window_pos = main_window.pos()
|
||||
account_login_data['window_position'] = (window_pos.x(), window_pos.y())
|
||||
|
||||
self._perform_normal_login(account_id, account_login_data)
|
||||
else:
|
||||
error_msg = f"Account mit ID {account_id} nicht gefunden"
|
||||
logger.error(error_msg)
|
||||
self.login_failed.emit(account_id, error_msg)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Ein-Klick-Login: {e}")
|
||||
self.login_failed.emit(account_id, str(e))
|
||||
|
||||
def _cancel_login(self, account_id: str):
|
||||
"""Bricht den Login-Prozess ab"""
|
||||
logger.info(f"Login für Account {account_id} wurde abgebrochen")
|
||||
if hasattr(self, 'login_dialog') and self.login_dialog:
|
||||
self.login_dialog.close()
|
||||
self.login_dialog = None
|
||||
# TODO: Login-Worker stoppen falls vorhanden
|
||||
|
||||
def create_and_save_account(self, platform: str, account_data: Dict[str, Any]):
|
||||
"""
|
||||
Erstellt und speichert einen neuen Account (ohne Session-Speicherung).
|
||||
|
||||
Args:
|
||||
platform: Plattform-Name
|
||||
account_data: Account-Informationen (username, password, etc.)
|
||||
|
||||
Returns:
|
||||
Dict mit success, account_id und message
|
||||
"""
|
||||
try:
|
||||
# Account in DB speichern
|
||||
from datetime import datetime
|
||||
account_record = {
|
||||
"platform": platform.lower(),
|
||||
"username": account_data.get("username", ""),
|
||||
"password": account_data.get("password", ""),
|
||||
"email": account_data.get("email", ""),
|
||||
"phone": account_data.get("phone", ""),
|
||||
"full_name": account_data.get("full_name", ""),
|
||||
"created_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
|
||||
account_id = self.db_manager.add_account(account_record)
|
||||
logger.info(f"Account in Datenbank gespeichert: {account_record['username']} (ID: {account_id})")
|
||||
|
||||
# Fingerprint für Account generieren
|
||||
if account_id and account_id > 0:
|
||||
logger.info(f"Generiere Fingerprint für neuen Account {account_id}")
|
||||
fingerprint_id = self.fingerprint_generator.execute(account_id)
|
||||
if fingerprint_id:
|
||||
logger.info(f"Fingerprint {fingerprint_id} wurde Account {account_id} zugewiesen")
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'account_id': account_id,
|
||||
'account_data': account_record,
|
||||
'message': 'Account erfolgreich erstellt'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Erstellen des Accounts: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'message': f'Fehler beim Erstellen des Accounts: {str(e)}'
|
||||
}
|
||||
|
||||
def _perform_normal_login(self, account_id: str, account_data: Dict[str, Any], automation=None):
|
||||
"""
|
||||
Führt einen normalen Login durch wenn keine Session vorhanden ist.
|
||||
|
||||
Args:
|
||||
account_id: Account ID
|
||||
account_data: Account-Daten inkl. Username, Password, Platform, Fingerprint
|
||||
automation: Optionale bereits erstellte Automation-Instanz
|
||||
"""
|
||||
from PyQt5.QtCore import QThread, pyqtSignal
|
||||
from social_networks.instagram.instagram_automation import InstagramAutomation
|
||||
from social_networks.tiktok.tiktok_automation import TikTokAutomation
|
||||
|
||||
class LoginWorkerThread(QThread):
|
||||
"""Worker Thread für den Login-Prozess"""
|
||||
login_completed = pyqtSignal(str, dict) # account_id, result
|
||||
login_failed = pyqtSignal(str, str) # account_id, error
|
||||
status_update = pyqtSignal(str) # status message
|
||||
log_update = pyqtSignal(str) # log message
|
||||
|
||||
def __init__(self, account_id, account_data, session_controller, automation=None):
|
||||
super().__init__()
|
||||
self.account_id = account_id
|
||||
self.account_data = account_data
|
||||
self.session_controller = session_controller
|
||||
self.automation = automation # Verwende bereitgestellte Automation oder erstelle neue
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
# Verwende bereitgestellte Automation oder erstelle neue
|
||||
if not self.automation:
|
||||
# Fingerprint laden wenn vorhanden
|
||||
fingerprint_dict = None
|
||||
if self.account_data.get('fingerprint_id'):
|
||||
fingerprint_obj = self.session_controller.fingerprint_repository.find_by_id(
|
||||
self.account_data['fingerprint_id']
|
||||
)
|
||||
if fingerprint_obj:
|
||||
fingerprint_dict = fingerprint_obj.to_dict()
|
||||
|
||||
# Automation basierend auf Platform auswählen
|
||||
platform = self.account_data.get('platform', '').lower()
|
||||
if platform == 'instagram':
|
||||
self.automation = InstagramAutomation(
|
||||
headless=False,
|
||||
fingerprint=fingerprint_dict,
|
||||
window_position=self.account_data.get('window_position')
|
||||
)
|
||||
# Callbacks setzen
|
||||
self.automation.status_update_callback = lambda msg: self.status_update.emit(msg)
|
||||
self.automation.log_update_callback = lambda msg: self.log_update.emit(msg)
|
||||
elif platform == 'tiktok':
|
||||
self.automation = TikTokAutomation(
|
||||
headless=False,
|
||||
fingerprint=fingerprint_dict,
|
||||
window_position=self.account_data.get('window_position')
|
||||
)
|
||||
# Callbacks setzen
|
||||
self.automation.status_update_callback = lambda msg: self.status_update.emit(msg)
|
||||
self.automation.log_update_callback = lambda msg: self.log_update.emit(msg)
|
||||
elif platform == 'x':
|
||||
from social_networks.x.x_automation import XAutomation
|
||||
self.automation = XAutomation(
|
||||
headless=False,
|
||||
fingerprint=fingerprint_dict,
|
||||
window_position=self.account_data.get('window_position')
|
||||
)
|
||||
# Callbacks setzen
|
||||
self.automation.status_update_callback = lambda msg: self.status_update.emit(msg)
|
||||
self.automation.log_update_callback = lambda msg: self.log_update.emit(msg)
|
||||
else:
|
||||
self.login_failed.emit(self.account_id, f"Plattform {platform} nicht unterstützt")
|
||||
return
|
||||
|
||||
platform = self.account_data.get('platform', '').lower()
|
||||
|
||||
# Status-Updates senden
|
||||
self.status_update.emit(f"Starte Login für {platform.title()}")
|
||||
self.log_update.emit(f"Öffne {platform.title()}-Webseite...")
|
||||
|
||||
# Login durchführen
|
||||
result = self.automation.login_account(
|
||||
username_or_email=self.account_data.get('username'),
|
||||
password=self.account_data.get('password'),
|
||||
account_id=self.account_id
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
# Session-Speicherung komplett entfernt - nur Login-Erfolg melden
|
||||
logger.info(f"Login erfolgreich für Account {self.account_id} - Session-Speicherung deaktiviert")
|
||||
self.login_completed.emit(self.account_id, result)
|
||||
else:
|
||||
self.login_failed.emit(self.account_id, result.get('error', 'Login fehlgeschlagen'))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim normalen Login: {e}")
|
||||
self.login_failed.emit(self.account_id, str(e))
|
||||
|
||||
def cleanup(self):
|
||||
"""Browser NICHT schließen - User soll Kontrolle behalten"""
|
||||
logger.info(f"Browser für Account {self.account_id} bleibt offen (User-Kontrolle)")
|
||||
# GEÄNDERT: Browser wird NICHT automatisch geschlossen
|
||||
# try:
|
||||
# if self.automation and hasattr(self.automation, 'browser'):
|
||||
# if hasattr(self.automation.browser, 'close'):
|
||||
# self.automation.browser.close()
|
||||
# logger.info(f"Browser für Account {self.account_id} geschlossen")
|
||||
# except Exception as e:
|
||||
# logger.error(f"Fehler beim Schließen des Browsers: {e}")
|
||||
|
||||
# Worker Thread erstellen und starten
|
||||
self.login_worker = LoginWorkerThread(account_id, account_data, self, automation)
|
||||
|
||||
# Dialog-Verbindungen falls vorhanden
|
||||
if hasattr(self, 'login_dialog') and self.login_dialog:
|
||||
self.login_worker.status_update.connect(self.login_dialog.set_status)
|
||||
self.login_worker.log_update.connect(self.login_dialog.add_log)
|
||||
|
||||
# Browser NICHT automatisch schließen - User behält Kontrolle
|
||||
def on_login_completed(aid, result):
|
||||
self.login_successful.emit(aid, result)
|
||||
# Dialog schließen bei Erfolg
|
||||
if hasattr(self, 'login_dialog') and self.login_dialog:
|
||||
self.login_dialog.close()
|
||||
self.login_dialog = None
|
||||
# GEÄNDERT: Browser wird NICHT automatisch geschlossen
|
||||
logger.info(f"Login erfolgreich für Account {aid} - Browser bleibt offen")
|
||||
# if hasattr(self.login_worker, 'cleanup'):
|
||||
# QTimer.singleShot(1000, self.login_worker.cleanup) # 1 Sekunde warten dann cleanup
|
||||
|
||||
def on_login_failed(aid, error):
|
||||
self.login_failed.emit(aid, error)
|
||||
# Dialog schließen bei Fehler
|
||||
if hasattr(self, 'login_dialog') and self.login_dialog:
|
||||
self.login_dialog.close()
|
||||
self.login_dialog = None
|
||||
# Browser auch bei Fehler schließen
|
||||
if hasattr(self.login_worker, 'cleanup'):
|
||||
QTimer.singleShot(1000, self.login_worker.cleanup)
|
||||
|
||||
self.login_worker.login_completed.connect(on_login_completed)
|
||||
self.login_worker.login_failed.connect(on_login_failed)
|
||||
self.login_worker.start()
|
||||
|
||||
def _show_manual_login_required(self, account_id: str, platform: str, reason: str):
|
||||
"""Zeigt Dialog für erforderlichen manuellen Login"""
|
||||
QMessageBox.information(
|
||||
None,
|
||||
"Manueller Login erforderlich",
|
||||
f"Ein-Klick-Login für {platform} ist nicht möglich.\n\n"
|
||||
f"Grund: {reason}\n\n"
|
||||
"Bitte melden Sie sich manuell an, um eine neue Session zu erstellen."
|
||||
)
|
||||
294
controllers/settings_controller.py
Normale Datei
294
controllers/settings_controller.py
Normale Datei
@ -0,0 +1,294 @@
|
||||
"""
|
||||
Controller für die Verwaltung von Einstellungen.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import random
|
||||
from PyQt5.QtWidgets import QMessageBox
|
||||
from PyQt5.QtCore import QObject
|
||||
|
||||
logger = logging.getLogger("settings_controller")
|
||||
|
||||
class SettingsController(QObject):
|
||||
"""Controller für die Verwaltung von Einstellungen."""
|
||||
|
||||
def __init__(self, proxy_rotator, email_handler, license_manager):
|
||||
super().__init__()
|
||||
self.proxy_rotator = proxy_rotator
|
||||
self.email_handler = email_handler
|
||||
self.license_manager = license_manager
|
||||
self.parent_view = None
|
||||
|
||||
def set_parent_view(self, view):
|
||||
"""Setzt die übergeordnete View für Dialoge."""
|
||||
self.parent_view = view
|
||||
|
||||
def load_proxy_settings(self):
|
||||
"""Lädt die Proxy-Einstellungen."""
|
||||
try:
|
||||
proxy_config = self.proxy_rotator.get_config() or {}
|
||||
|
||||
settings = {
|
||||
"ipv4_proxies": proxy_config.get("ipv4", []),
|
||||
"ipv6_proxies": proxy_config.get("ipv6", []),
|
||||
"mobile_proxies": proxy_config.get("mobile", []),
|
||||
"mobile_api": proxy_config.get("mobile_api", {})
|
||||
}
|
||||
|
||||
return settings
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden der Proxy-Einstellungen: {e}")
|
||||
return {}
|
||||
|
||||
def save_proxy_settings(self, settings):
|
||||
"""Speichert die Proxy-Einstellungen."""
|
||||
try:
|
||||
# IPv4 Proxies
|
||||
ipv4_proxies = settings.get("ipv4_proxies", [])
|
||||
if isinstance(ipv4_proxies, str):
|
||||
ipv4_proxies = [line.strip() for line in ipv4_proxies.splitlines() if line.strip()]
|
||||
|
||||
# IPv6 Proxies
|
||||
ipv6_proxies = settings.get("ipv6_proxies", [])
|
||||
if isinstance(ipv6_proxies, str):
|
||||
ipv6_proxies = [line.strip() for line in ipv6_proxies.splitlines() if line.strip()]
|
||||
|
||||
# Mobile Proxies
|
||||
mobile_proxies = settings.get("mobile_proxies", [])
|
||||
if isinstance(mobile_proxies, str):
|
||||
mobile_proxies = [line.strip() for line in mobile_proxies.splitlines() if line.strip()]
|
||||
|
||||
# API Keys
|
||||
mobile_api = settings.get("mobile_api", {})
|
||||
|
||||
# Konfiguration aktualisieren
|
||||
self.proxy_rotator.update_config({
|
||||
"ipv4": ipv4_proxies,
|
||||
"ipv6": ipv6_proxies,
|
||||
"mobile": mobile_proxies,
|
||||
"mobile_api": mobile_api
|
||||
})
|
||||
|
||||
logger.info("Proxy-Einstellungen gespeichert")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"Erfolg",
|
||||
"Proxy-Einstellungen wurden gespeichert."
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern der Proxy-Einstellungen: {e}")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Proxy-Einstellungen konnten nicht gespeichert werden:\n{str(e)}"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def test_proxy(self, proxy_type):
|
||||
"""Testet einen zufälligen Proxy des ausgewählten Typs."""
|
||||
try:
|
||||
# Überprüfe, ob Proxies konfiguriert sind
|
||||
proxies = self.proxy_rotator.get_proxies_by_type(proxy_type)
|
||||
if not proxies:
|
||||
if self.parent_view:
|
||||
QMessageBox.warning(
|
||||
self.parent_view,
|
||||
"Keine Proxies",
|
||||
f"Keine {proxy_type.upper()}-Proxies konfiguriert.\nBitte fügen Sie Proxies in den Einstellungen hinzu."
|
||||
)
|
||||
return False
|
||||
|
||||
# Zufälligen Proxy auswählen
|
||||
proxy = random.choice(proxies)
|
||||
|
||||
# Proxy testen
|
||||
result = self.proxy_rotator.test_proxy(proxy_type)
|
||||
|
||||
if result["success"]:
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"Proxy-Test erfolgreich",
|
||||
f"IP: {result['ip']}\nLand: {result['country'] or 'Unbekannt'}\nAntwortzeit: {result['response_time']:.2f}s"
|
||||
)
|
||||
return True
|
||||
else:
|
||||
if self.parent_view:
|
||||
QMessageBox.warning(
|
||||
self.parent_view,
|
||||
"Proxy-Test fehlgeschlagen",
|
||||
f"Fehler: {result['error']}"
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Testen des Proxy: {e}")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Fehler beim Testen des Proxy:\n{str(e)}"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def load_email_settings(self):
|
||||
"""Lädt die E-Mail-Einstellungen."""
|
||||
try:
|
||||
email_config = self.email_handler.get_config() or {}
|
||||
|
||||
settings = {
|
||||
"imap_server": email_config.get("imap_server", ""),
|
||||
"imap_port": email_config.get("imap_port", 993),
|
||||
"imap_user": email_config.get("imap_user", ""),
|
||||
"imap_pass": email_config.get("imap_pass", "")
|
||||
}
|
||||
|
||||
return settings
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden der E-Mail-Einstellungen: {e}")
|
||||
return {}
|
||||
|
||||
def save_email_settings(self, settings):
|
||||
"""Speichert die E-Mail-Einstellungen."""
|
||||
try:
|
||||
# Einstellungen aktualisieren
|
||||
self.email_handler.update_config(settings)
|
||||
|
||||
logger.info("E-Mail-Einstellungen gespeichert")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"Erfolg",
|
||||
"E-Mail-Einstellungen wurden gespeichert."
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Speichern der E-Mail-Einstellungen: {e}")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"E-Mail-Einstellungen konnten nicht gespeichert werden:\n{str(e)}"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def test_email(self, settings=None):
|
||||
"""Testet die E-Mail-Verbindung."""
|
||||
try:
|
||||
if settings:
|
||||
# Temporär Einstellungen aktualisieren
|
||||
self.email_handler.update_credentials(
|
||||
settings.get("imap_user", ""),
|
||||
settings.get("imap_pass", "")
|
||||
)
|
||||
self.email_handler.update_server(
|
||||
settings.get("imap_server", ""),
|
||||
settings.get("imap_port", 993)
|
||||
)
|
||||
|
||||
# Verbindung testen
|
||||
result = self.email_handler.test_connection()
|
||||
|
||||
if result["success"]:
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"E-Mail-Test erfolgreich",
|
||||
f"Verbindung zu {result['server']}:{result['port']} hergestellt.\nGefundene Postfächer: {result['mailbox_count']}"
|
||||
)
|
||||
return True
|
||||
else:
|
||||
if self.parent_view:
|
||||
QMessageBox.warning(
|
||||
self.parent_view,
|
||||
"E-Mail-Test fehlgeschlagen",
|
||||
f"Fehler: {result['error']}"
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Testen der E-Mail-Verbindung: {e}")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Fehler beim Testen der E-Mail-Verbindung:\n{str(e)}"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
def load_license_info(self):
|
||||
"""Lädt die Lizenzinformationen."""
|
||||
try:
|
||||
license_info = self.license_manager.get_license_info()
|
||||
return license_info
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden der Lizenzinformationen: {e}")
|
||||
return {}
|
||||
|
||||
def activate_license(self, license_key):
|
||||
"""Aktiviert eine Lizenz."""
|
||||
try:
|
||||
success, message = self.license_manager.activate_license(license_key)
|
||||
|
||||
if success:
|
||||
if self.parent_view:
|
||||
QMessageBox.information(
|
||||
self.parent_view,
|
||||
"Lizenz aktiviert",
|
||||
message
|
||||
)
|
||||
else:
|
||||
if self.parent_view:
|
||||
QMessageBox.warning(
|
||||
self.parent_view,
|
||||
"Lizenzaktivierung fehlgeschlagen",
|
||||
message
|
||||
)
|
||||
|
||||
return success, message
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Lizenzaktivierung: {e}")
|
||||
|
||||
if self.parent_view:
|
||||
QMessageBox.critical(
|
||||
self.parent_view,
|
||||
"Fehler",
|
||||
f"Fehler bei der Lizenzaktivierung:\n{str(e)}"
|
||||
)
|
||||
|
||||
return False, str(e)
|
||||
|
||||
def check_license(self):
|
||||
"""Überprüft, ob eine gültige Lizenz vorhanden ist."""
|
||||
try:
|
||||
is_licensed = self.license_manager.is_licensed()
|
||||
|
||||
if not is_licensed and self.parent_view:
|
||||
license_info = self.license_manager.get_license_info()
|
||||
status = license_info.get("status_text", "Inaktiv")
|
||||
|
||||
QMessageBox.warning(
|
||||
self.parent_view,
|
||||
"Keine gültige Lizenz",
|
||||
f"Status: {status}\n\nBitte aktivieren Sie eine Lizenz, um die Software zu nutzen."
|
||||
)
|
||||
|
||||
return is_licensed
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei der Lizenzprüfung: {e}")
|
||||
return False
|
||||
0
database/__init__.py
Normale Datei
0
database/__init__.py
Normale Datei
0
database/account_repository.py
Normale Datei
0
database/account_repository.py
Normale Datei
BIN
database/accounts.db
Normale Datei
BIN
database/accounts.db
Normale Datei
Binäre Datei nicht angezeigt.
589
database/db_manager.py
Normale Datei
589
database/db_manager.py
Normale Datei
@ -0,0 +1,589 @@
|
||||
"""
|
||||
Datenbankmanager für den Social Media Account Generator.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import sqlite3
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Any, Optional, Tuple, Union
|
||||
|
||||
from config.paths import PathConfig
|
||||
|
||||
logger = logging.getLogger("db_manager")
|
||||
|
||||
class DatabaseManager:
|
||||
"""Klasse zur Verwaltung der Datenbank für Account-Informationen."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""
|
||||
Initialisiert den DatabaseManager.
|
||||
|
||||
Args:
|
||||
db_path: Pfad zur Datenbank-Datei (falls None, wird PathConfig.MAIN_DB verwendet)
|
||||
"""
|
||||
self.db_path = db_path if db_path is not None else PathConfig.MAIN_DB
|
||||
|
||||
# Stelle sicher, dass das Datenbankverzeichnis existiert
|
||||
os.makedirs(os.path.dirname(self.db_path), exist_ok=True)
|
||||
|
||||
# Datenbank initialisieren
|
||||
self.init_db()
|
||||
|
||||
def init_db(self) -> None:
|
||||
"""Initialisiert die Datenbank und erstellt die benötigten Tabellen, wenn sie nicht existieren."""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Schema v2 laden und ausführen
|
||||
try:
|
||||
self._init_schema_v2(cursor)
|
||||
conn.commit() # Commit nach Schema v2 Initialisierung
|
||||
except Exception as e:
|
||||
logger.warning(f"Konnte Schema v2 nicht initialisieren: {e}")
|
||||
|
||||
# Accounts-Tabelle erstellen
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS accounts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
platform TEXT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
email TEXT,
|
||||
phone TEXT,
|
||||
full_name TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
last_login TEXT,
|
||||
notes TEXT,
|
||||
cookies TEXT,
|
||||
status TEXT,
|
||||
fingerprint_id TEXT,
|
||||
session_id TEXT,
|
||||
last_session_update TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# Migration für bestehende Datenbanken
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(accounts)")
|
||||
columns = [column[1] for column in cursor.fetchall()]
|
||||
|
||||
if "fingerprint_id" not in columns:
|
||||
cursor.execute("ALTER TABLE accounts ADD COLUMN fingerprint_id TEXT")
|
||||
logger.info("Added fingerprint_id column to accounts table")
|
||||
|
||||
if "session_id" not in columns:
|
||||
cursor.execute("ALTER TABLE accounts ADD COLUMN session_id TEXT")
|
||||
logger.info("Added session_id column to accounts table")
|
||||
|
||||
if "last_session_update" not in columns:
|
||||
cursor.execute("ALTER TABLE accounts ADD COLUMN last_session_update TEXT")
|
||||
logger.info("Added last_session_update column to accounts table")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Migration warning: {e}")
|
||||
|
||||
# Settings-Tabelle erstellen
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
)
|
||||
''')
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info("Datenbank initialisiert")
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler bei der Datenbankinitialisierung: {e}")
|
||||
|
||||
def add_account(self, account_data: Dict[str, Any]) -> int:
|
||||
"""
|
||||
Fügt einen Account zur Datenbank hinzu.
|
||||
|
||||
Args:
|
||||
account_data: Dictionary mit Account-Daten
|
||||
|
||||
Returns:
|
||||
ID des hinzugefügten Accounts oder -1 im Fehlerfall
|
||||
"""
|
||||
try:
|
||||
# Prüfe, ob erforderliche Felder vorhanden sind
|
||||
required_fields = ["platform", "username", "password"]
|
||||
for field in required_fields:
|
||||
if field not in account_data:
|
||||
logger.error(f"Fehlendes Pflichtfeld: {field}")
|
||||
return -1
|
||||
|
||||
# Sicherstellen, dass created_at vorhanden ist
|
||||
if "created_at" not in account_data:
|
||||
account_data["created_at"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# SQL-Anweisung vorbereiten
|
||||
fields = ", ".join(account_data.keys())
|
||||
placeholders = ", ".join(["?" for _ in account_data])
|
||||
|
||||
query = f"INSERT INTO accounts ({fields}) VALUES ({placeholders})"
|
||||
|
||||
# Anweisung ausführen
|
||||
cursor.execute(query, list(account_data.values()))
|
||||
|
||||
# ID des hinzugefügten Datensatzes abrufen
|
||||
account_id = cursor.lastrowid
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Account hinzugefügt: {account_data['username']} (ID: {account_id})")
|
||||
|
||||
return account_id
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Hinzufügen des Accounts: {e}")
|
||||
return -1
|
||||
|
||||
def get_account(self, account_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Gibt einen Account anhand seiner ID zurück.
|
||||
|
||||
Args:
|
||||
account_id: ID des Accounts
|
||||
|
||||
Returns:
|
||||
Dictionary mit Account-Daten oder None, wenn der Account nicht gefunden wurde
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row # Für dict-like Zugriff auf Zeilen
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT * FROM accounts WHERE id = ?", (account_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
# Konvertiere Row in Dictionary
|
||||
account = dict(row)
|
||||
logger.debug(f"Account gefunden: {account['username']} (ID: {account_id})")
|
||||
return account
|
||||
else:
|
||||
logger.warning(f"Account nicht gefunden: ID {account_id}")
|
||||
return None
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Abrufen des Accounts: {e}")
|
||||
return None
|
||||
|
||||
def get_all_accounts(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Gibt alle Accounts zurück.
|
||||
|
||||
Returns:
|
||||
Liste von Dictionaries mit Account-Daten
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT * FROM accounts ORDER BY id DESC")
|
||||
rows = cursor.fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
# Konvertiere Rows in Dictionaries
|
||||
accounts = [dict(row) for row in rows]
|
||||
|
||||
logger.info(f"{len(accounts)} Accounts abgerufen")
|
||||
|
||||
return accounts
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Abrufen aller Accounts: {e}")
|
||||
return []
|
||||
|
||||
def get_accounts_by_platform(self, platform: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Gibt alle Accounts einer bestimmten Plattform zurück.
|
||||
|
||||
Args:
|
||||
platform: Plattformname (z.B. "instagram")
|
||||
|
||||
Returns:
|
||||
Liste von Dictionaries mit Account-Daten
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT * FROM accounts WHERE platform = ? ORDER BY id DESC", (platform.lower(),))
|
||||
rows = cursor.fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
# Konvertiere Rows in Dictionaries
|
||||
accounts = [dict(row) for row in rows]
|
||||
|
||||
logger.info(f"{len(accounts)} Accounts für Plattform '{platform}' abgerufen")
|
||||
|
||||
return accounts
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Abrufen der Accounts für Plattform '{platform}': {e}")
|
||||
return []
|
||||
|
||||
def update_account(self, account_id: int, update_data: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Aktualisiert einen Account in der Datenbank.
|
||||
|
||||
Args:
|
||||
account_id: ID des zu aktualisierenden Accounts
|
||||
update_data: Dictionary mit zu aktualisierenden Feldern
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False im Fehlerfall
|
||||
"""
|
||||
if not update_data:
|
||||
logger.warning("Keine Aktualisierungsdaten bereitgestellt")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# SQL-Anweisung vorbereiten
|
||||
set_clause = ", ".join([f"{field} = ?" for field in update_data.keys()])
|
||||
values = list(update_data.values())
|
||||
values.append(account_id)
|
||||
|
||||
query = f"UPDATE accounts SET {set_clause} WHERE id = ?"
|
||||
|
||||
# Anweisung ausführen
|
||||
cursor.execute(query, values)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Account aktualisiert: ID {account_id}")
|
||||
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Aktualisieren des Accounts: {e}")
|
||||
return False
|
||||
|
||||
def delete_account(self, account_id: int) -> bool:
|
||||
"""
|
||||
Löscht einen Account aus der Datenbank.
|
||||
|
||||
Args:
|
||||
account_id: ID des zu löschenden Accounts
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False im Fehlerfall
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("DELETE FROM accounts WHERE id = ?", (account_id,))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Account gelöscht: ID {account_id}")
|
||||
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Löschen des Accounts: {e}")
|
||||
return False
|
||||
|
||||
def search_accounts(self, query: str, platform: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Sucht nach Accounts in der Datenbank.
|
||||
|
||||
Args:
|
||||
query: Suchbegriff
|
||||
platform: Optional, Plattform für die Einschränkung der Suche
|
||||
|
||||
Returns:
|
||||
Liste von Dictionaries mit gefundenen Account-Daten
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Suchbegriff für LIKE-Operator vorbereiten
|
||||
search_term = f"%{query}%"
|
||||
|
||||
if platform:
|
||||
query_sql = """
|
||||
SELECT * FROM accounts
|
||||
WHERE (username LIKE ? OR email LIKE ? OR phone LIKE ? OR full_name LIKE ?)
|
||||
AND platform = ?
|
||||
ORDER BY id DESC
|
||||
"""
|
||||
cursor.execute(query_sql, (search_term, search_term, search_term, search_term, platform.lower()))
|
||||
else:
|
||||
query_sql = """
|
||||
SELECT * FROM accounts
|
||||
WHERE username LIKE ? OR email LIKE ? OR phone LIKE ? OR full_name LIKE ?
|
||||
ORDER BY id DESC
|
||||
"""
|
||||
cursor.execute(query_sql, (search_term, search_term, search_term, search_term))
|
||||
|
||||
rows = cursor.fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
# Konvertiere Rows in Dictionaries
|
||||
accounts = [dict(row) for row in rows]
|
||||
|
||||
logger.info(f"{len(accounts)} Accounts gefunden für Suchbegriff '{query}'")
|
||||
|
||||
return accounts
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler bei der Suche nach Accounts: {e}")
|
||||
return []
|
||||
|
||||
def get_connection(self) -> sqlite3.Connection:
|
||||
"""
|
||||
Gibt eine neue Datenbankverbindung zurück.
|
||||
|
||||
Returns:
|
||||
SQLite Connection Objekt
|
||||
"""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
def get_account_count(self, platform: Optional[str] = None) -> int:
|
||||
"""
|
||||
Gibt die Anzahl der Accounts zurück.
|
||||
|
||||
Args:
|
||||
platform: Optional, Plattform für die Einschränkung der Zählung
|
||||
|
||||
Returns:
|
||||
Anzahl der Accounts
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
if platform:
|
||||
cursor.execute("SELECT COUNT(*) FROM accounts WHERE platform = ?", (platform.lower(),))
|
||||
else:
|
||||
cursor.execute("SELECT COUNT(*) FROM accounts")
|
||||
|
||||
count = cursor.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
return count
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Zählen der Accounts: {e}")
|
||||
return 0
|
||||
|
||||
def get_setting(self, key: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Gibt einen Einstellungswert zurück.
|
||||
|
||||
Args:
|
||||
key: Schlüssel der Einstellung
|
||||
default: Standardwert, falls die Einstellung nicht gefunden wurde
|
||||
|
||||
Returns:
|
||||
Wert der Einstellung oder der Standardwert
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT value FROM settings WHERE key = ?", (key,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
# Versuche, den Wert als JSON zu parsen
|
||||
try:
|
||||
return json.loads(row[0])
|
||||
except json.JSONDecodeError:
|
||||
# Wenn kein gültiges JSON, gib den Rohwert zurück
|
||||
return row[0]
|
||||
else:
|
||||
return default
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Abrufen der Einstellung '{key}': {e}")
|
||||
return default
|
||||
|
||||
def set_setting(self, key: str, value: Any) -> bool:
|
||||
"""
|
||||
Setzt einen Einstellungswert.
|
||||
|
||||
Args:
|
||||
key: Schlüssel der Einstellung
|
||||
value: Wert der Einstellung (wird als JSON gespeichert, wenn es kein String ist)
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False im Fehlerfall
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Wert als JSON speichern, wenn es kein String ist
|
||||
if not isinstance(value, str):
|
||||
value = json.dumps(value)
|
||||
|
||||
# Prüfen, ob die Einstellung bereits existiert
|
||||
cursor.execute("SELECT COUNT(*) FROM settings WHERE key = ?", (key,))
|
||||
exists = cursor.fetchone()[0] > 0
|
||||
|
||||
if exists:
|
||||
cursor.execute("UPDATE settings SET value = ? WHERE key = ?", (value, key))
|
||||
else:
|
||||
cursor.execute("INSERT INTO settings (key, value) VALUES (?, ?)", (key, value))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Einstellung gespeichert: {key}")
|
||||
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Speichern der Einstellung '{key}': {e}")
|
||||
return False
|
||||
|
||||
def delete_setting(self, key: str) -> bool:
|
||||
"""
|
||||
Löscht eine Einstellung.
|
||||
|
||||
Args:
|
||||
key: Schlüssel der zu löschenden Einstellung
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False im Fehlerfall
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("DELETE FROM settings WHERE key = ?", (key,))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Einstellung gelöscht: {key}")
|
||||
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Löschen der Einstellung '{key}': {e}")
|
||||
return False
|
||||
|
||||
def backup_database(self, backup_path: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Erstellt ein Backup der Datenbank.
|
||||
|
||||
Args:
|
||||
backup_path: Optional, Pfad für das Backup
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False im Fehlerfall
|
||||
"""
|
||||
if not backup_path:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = f"database/backup/accounts_{timestamp}.db"
|
||||
|
||||
# Stelle sicher, dass das Backup-Verzeichnis existiert
|
||||
os.makedirs(os.path.dirname(backup_path), exist_ok=True)
|
||||
|
||||
try:
|
||||
# SQLite-Backup-API verwenden
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
backup_conn = sqlite3.connect(backup_path)
|
||||
|
||||
conn.backup(backup_conn)
|
||||
|
||||
conn.close()
|
||||
backup_conn.close()
|
||||
|
||||
logger.info(f"Datenbank-Backup erstellt: {backup_path}")
|
||||
|
||||
return True
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Erstellen des Datenbank-Backups: {e}")
|
||||
return False
|
||||
|
||||
def _init_schema_v2(self, cursor) -> None:
|
||||
"""Initialisiert das Schema v2 mit Session-Tabellen."""
|
||||
schema_path = PathConfig.SCHEMA_V2
|
||||
|
||||
try:
|
||||
# Versuche schema_v2.sql zu laden
|
||||
if PathConfig.file_exists(schema_path):
|
||||
logger.info(f"Lade Schema v2 aus {schema_path}")
|
||||
with open(schema_path, 'r', encoding='utf-8') as f:
|
||||
schema_sql = f.read()
|
||||
|
||||
# Führe alle SQL-Statements aus
|
||||
# SQLite unterstützt nur ein Statement pro execute(),
|
||||
# daher müssen wir die Statements aufteilen
|
||||
statements = [s.strip() for s in schema_sql.split(';') if s.strip()]
|
||||
|
||||
for statement in statements:
|
||||
if statement: # Ignoriere leere Statements
|
||||
cursor.execute(statement)
|
||||
|
||||
logger.info("Schema v2 erfolgreich aus SQL-Datei geladen")
|
||||
else:
|
||||
logger.warning(f"schema_v2.sql nicht gefunden unter {schema_path}")
|
||||
# Fallback: Erstelle minimal notwendige Tabellen
|
||||
self._create_minimal_v2_tables(cursor)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Laden von Schema v2: {e}")
|
||||
# Fallback: Erstelle minimal notwendige Tabellen
|
||||
self._create_minimal_v2_tables(cursor)
|
||||
|
||||
def _create_minimal_v2_tables(self, cursor) -> None:
|
||||
"""Erstellt minimal notwendige v2 Tabellen als Fallback."""
|
||||
try:
|
||||
# Nur die wichtigsten Tabellen für One-Click-Login
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS browser_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
cookies TEXT NOT NULL,
|
||||
local_storage TEXT,
|
||||
session_storage TEXT,
|
||||
account_id TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
health_score REAL DEFAULT 1.0
|
||||
)
|
||||
''')
|
||||
|
||||
cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS browser_fingerprints (
|
||||
id TEXT PRIMARY KEY,
|
||||
canvas_noise_config TEXT NOT NULL,
|
||||
webrtc_config TEXT NOT NULL,
|
||||
fonts TEXT NOT NULL,
|
||||
hardware_config TEXT NOT NULL,
|
||||
navigator_props TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
logger.info("Minimale v2 Tabellen erstellt")
|
||||
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Fehler beim Erstellen der minimalen v2 Tabellen: {e}")
|
||||
19
database/migrations/add_browser_storage_columns.sql
Normale Datei
19
database/migrations/add_browser_storage_columns.sql
Normale Datei
@ -0,0 +1,19 @@
|
||||
-- Migration: Add browser storage columns to browser_sessions table
|
||||
-- This migration adds columns for storing LocalStorage and SessionStorage data
|
||||
|
||||
-- Add local_storage column
|
||||
ALTER TABLE browser_sessions ADD COLUMN local_storage TEXT;
|
||||
|
||||
-- Add session_storage column
|
||||
ALTER TABLE browser_sessions ADD COLUMN session_storage TEXT;
|
||||
|
||||
-- Add consent_data column for tracking cookie consent status
|
||||
ALTER TABLE browser_sessions ADD COLUMN consent_data TEXT;
|
||||
|
||||
-- Add storage_updated_at to track when storage was last updated
|
||||
ALTER TABLE browser_sessions ADD COLUMN storage_updated_at DATETIME;
|
||||
|
||||
-- Update existing sessions to have NULL storage (backward compatibility)
|
||||
UPDATE browser_sessions
|
||||
SET storage_updated_at = updated_at
|
||||
WHERE storage_updated_at IS NULL;
|
||||
66
database/migrations/add_fingerprint_persistence.sql
Normale Datei
66
database/migrations/add_fingerprint_persistence.sql
Normale Datei
@ -0,0 +1,66 @@
|
||||
-- Migration: Add fingerprint persistence fields for account-bound fingerprints
|
||||
-- Date: 2025-01-13
|
||||
|
||||
-- Add new columns to browser_fingerprints table for persistent fingerprint support
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN static_components TEXT; -- JSON: Unchangeable hardware/platform values
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN rotation_seed TEXT; -- Seed for deterministic noise generation
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN rotation_policy TEXT DEFAULT 'normal'; -- strict/normal/relaxed
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN last_major_rotation TIMESTAMP;
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN trust_score REAL DEFAULT 0.0; -- How established this fingerprint is
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN evolution_history TEXT; -- JSON: Track gradual changes
|
||||
ALTER TABLE browser_fingerprints ADD COLUMN account_bound BOOLEAN DEFAULT 0; -- Is this bound to specific account(s)
|
||||
|
||||
-- Create table for fingerprint-account associations (many-to-many)
|
||||
CREATE TABLE IF NOT EXISTS fingerprint_accounts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
account_id TEXT NOT NULL,
|
||||
assigned_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
primary_fingerprint BOOLEAN DEFAULT 0,
|
||||
last_used TIMESTAMP,
|
||||
success_count INTEGER DEFAULT 0,
|
||||
failure_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id),
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id),
|
||||
UNIQUE(fingerprint_id, account_id)
|
||||
);
|
||||
|
||||
-- Create table for fingerprint rotation history
|
||||
CREATE TABLE IF NOT EXISTS fingerprint_rotation_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
rotation_type TEXT NOT NULL, -- 'minor', 'gradual', 'major'
|
||||
rotated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
previous_values TEXT NOT NULL, -- JSON: What changed
|
||||
new_values TEXT NOT NULL, -- JSON: New values
|
||||
trigger_reason TEXT, -- Why rotation happened
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
|
||||
);
|
||||
|
||||
-- Create indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_fingerprints_account_bound ON browser_fingerprints(account_bound);
|
||||
CREATE INDEX IF NOT EXISTS idx_fingerprints_trust_score ON browser_fingerprints(trust_score);
|
||||
CREATE INDEX IF NOT EXISTS idx_fingerprints_rotation_policy ON browser_fingerprints(rotation_policy);
|
||||
CREATE INDEX IF NOT EXISTS idx_fingerprint_accounts_account ON fingerprint_accounts(account_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_fingerprint_accounts_fingerprint ON fingerprint_accounts(fingerprint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_history_fingerprint ON fingerprint_rotation_history(fingerprint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_history_timestamp ON fingerprint_rotation_history(rotated_at);
|
||||
|
||||
-- Create view for account fingerprint status
|
||||
CREATE VIEW IF NOT EXISTS v_account_fingerprints AS
|
||||
SELECT
|
||||
a.id as account_id,
|
||||
a.username,
|
||||
bf.id as fingerprint_id,
|
||||
bf.trust_score,
|
||||
bf.rotation_policy,
|
||||
bf.last_major_rotation,
|
||||
fa.primary_fingerprint,
|
||||
fa.last_used,
|
||||
fa.success_count,
|
||||
fa.failure_count,
|
||||
ROUND(CAST(fa.success_count AS REAL) / NULLIF(fa.success_count + fa.failure_count, 0), 2) as success_rate
|
||||
FROM accounts a
|
||||
LEFT JOIN fingerprint_accounts fa ON a.id = fa.account_id
|
||||
LEFT JOIN browser_fingerprints bf ON fa.fingerprint_id = bf.id
|
||||
WHERE fa.primary_fingerprint = 1 OR fa.fingerprint_id IS NOT NULL;
|
||||
18
database/migrations/add_fingerprint_support.sql
Normale Datei
18
database/migrations/add_fingerprint_support.sql
Normale Datei
@ -0,0 +1,18 @@
|
||||
-- Migration: Add fingerprint support to accounts table
|
||||
-- This migration adds fingerprint_id column to accounts table
|
||||
|
||||
-- Add fingerprint_id column to accounts table if it doesn't exist
|
||||
ALTER TABLE accounts ADD COLUMN fingerprint_id TEXT;
|
||||
|
||||
-- Add session_id column to accounts table if it doesn't exist
|
||||
ALTER TABLE accounts ADD COLUMN session_id TEXT;
|
||||
|
||||
-- Add last_session_update column to track session health
|
||||
ALTER TABLE accounts ADD COLUMN last_session_update TEXT;
|
||||
|
||||
-- Create index for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_accounts_fingerprint ON accounts(fingerprint_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_accounts_session ON accounts(session_id);
|
||||
|
||||
-- Update existing accounts to have NULL fingerprint_id (will be generated on login)
|
||||
UPDATE accounts SET fingerprint_id = NULL WHERE fingerprint_id IS NULL;
|
||||
156
database/migrations/add_method_rotation_system.sql
Normale Datei
156
database/migrations/add_method_rotation_system.sql
Normale Datei
@ -0,0 +1,156 @@
|
||||
-- Migration: Add Method Rotation System
|
||||
-- Version: 2025-07-24-001
|
||||
-- Description: Adds complete method rotation infrastructure for tracking and managing
|
||||
-- registration/login method strategies across all platforms
|
||||
|
||||
-- Method strategies table - stores configuration and performance data for each method
|
||||
CREATE TABLE IF NOT EXISTS method_strategies (
|
||||
id TEXT PRIMARY KEY,
|
||||
platform TEXT NOT NULL,
|
||||
method_name TEXT NOT NULL,
|
||||
priority INTEGER NOT NULL DEFAULT 5,
|
||||
success_rate REAL DEFAULT 0.0,
|
||||
failure_rate REAL DEFAULT 0.0,
|
||||
last_success TIMESTAMP,
|
||||
last_failure TIMESTAMP,
|
||||
cooldown_period INTEGER DEFAULT 0, -- seconds
|
||||
max_daily_attempts INTEGER DEFAULT 10,
|
||||
risk_level TEXT DEFAULT 'MEDIUM', -- LOW, MEDIUM, HIGH
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
configuration TEXT, -- JSON configuration for method-specific settings
|
||||
tags TEXT, -- JSON array for method categorization
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(platform, method_name)
|
||||
);
|
||||
|
||||
-- Rotation sessions table - tracks active rotation sessions
|
||||
CREATE TABLE IF NOT EXISTS rotation_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
platform TEXT NOT NULL,
|
||||
account_id TEXT,
|
||||
current_method TEXT NOT NULL,
|
||||
attempted_methods TEXT, -- JSON array of attempted method names
|
||||
session_start TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_rotation TIMESTAMP,
|
||||
rotation_count INTEGER DEFAULT 0,
|
||||
success_count INTEGER DEFAULT 0,
|
||||
failure_count INTEGER DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
rotation_reason TEXT,
|
||||
fingerprint_id TEXT,
|
||||
session_metadata TEXT, -- JSON for additional session data
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id),
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
|
||||
);
|
||||
|
||||
-- Rotation events table - detailed event logging for all rotation activities
|
||||
CREATE TABLE IF NOT EXISTS rotation_events (
|
||||
id TEXT PRIMARY KEY,
|
||||
session_id TEXT NOT NULL,
|
||||
method_name TEXT NOT NULL,
|
||||
event_type TEXT NOT NULL, -- SUCCESS, FAILURE, ROTATION, COOLDOWN, CONFIG_CHANGE
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
details TEXT, -- JSON event-specific details
|
||||
error_message TEXT,
|
||||
performance_metrics TEXT, -- JSON: execution_time, memory_usage, etc.
|
||||
correlation_id TEXT, -- For linking related events
|
||||
FOREIGN KEY (session_id) REFERENCES rotation_sessions(id)
|
||||
);
|
||||
|
||||
-- Method performance analytics table - aggregated daily performance data
|
||||
CREATE TABLE IF NOT EXISTS method_performance_analytics (
|
||||
id TEXT PRIMARY KEY,
|
||||
platform TEXT NOT NULL,
|
||||
method_name TEXT NOT NULL,
|
||||
date DATE NOT NULL,
|
||||
total_attempts INTEGER DEFAULT 0,
|
||||
successful_attempts INTEGER DEFAULT 0,
|
||||
failed_attempts INTEGER DEFAULT 0,
|
||||
avg_execution_time REAL DEFAULT 0.0,
|
||||
avg_success_rate REAL DEFAULT 0.0,
|
||||
peak_usage_hour INTEGER, -- 0-23 hour when most used
|
||||
error_categories TEXT, -- JSON: categorized error types and counts
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(platform, method_name, date)
|
||||
);
|
||||
|
||||
-- Method cooldowns table - tracks temporary method restrictions
|
||||
CREATE TABLE IF NOT EXISTS method_cooldowns (
|
||||
id TEXT PRIMARY KEY,
|
||||
platform TEXT NOT NULL,
|
||||
method_name TEXT NOT NULL,
|
||||
cooldown_until TIMESTAMP NOT NULL,
|
||||
reason TEXT NOT NULL,
|
||||
applied_by TEXT DEFAULT 'system',
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(platform, method_name)
|
||||
);
|
||||
|
||||
-- Platform method states table - stores platform-specific rotation state
|
||||
CREATE TABLE IF NOT EXISTS platform_method_states (
|
||||
id TEXT PRIMARY KEY,
|
||||
platform TEXT NOT NULL,
|
||||
last_successful_method TEXT,
|
||||
last_successful_at TIMESTAMP,
|
||||
preferred_methods TEXT, -- JSON array of method names in preference order
|
||||
blocked_methods TEXT, -- JSON array of temporarily blocked methods
|
||||
daily_attempt_counts TEXT, -- JSON: {"email": 3, "phone": 1}
|
||||
reset_date DATE, -- When daily counts reset
|
||||
rotation_strategy TEXT DEFAULT 'adaptive', -- sequential, random, adaptive, smart
|
||||
emergency_mode BOOLEAN DEFAULT 0,
|
||||
metadata TEXT, -- JSON: additional platform-specific state
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(platform)
|
||||
);
|
||||
|
||||
-- Indexes for performance optimization
|
||||
CREATE INDEX IF NOT EXISTS idx_method_strategies_platform ON method_strategies(platform);
|
||||
CREATE INDEX IF NOT EXISTS idx_method_strategies_active ON method_strategies(platform, is_active);
|
||||
CREATE INDEX IF NOT EXISTS idx_method_strategies_priority ON method_strategies(platform, priority DESC, success_rate DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_platform ON rotation_sessions(platform);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_active ON rotation_sessions(platform, is_active);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_sessions_account ON rotation_sessions(account_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_events_session ON rotation_events(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_events_timestamp ON rotation_events(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_rotation_events_method ON rotation_events(method_name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_method_performance_platform_date ON method_performance_analytics(platform, date);
|
||||
CREATE INDEX IF NOT EXISTS idx_method_performance_method ON method_performance_analytics(method_name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_method_cooldowns_platform_method ON method_cooldowns(platform, method_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_method_cooldowns_until ON method_cooldowns(cooldown_until);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_platform_method_states_platform ON platform_method_states(platform);
|
||||
|
||||
-- Insert default method strategies for existing platforms
|
||||
INSERT OR IGNORE INTO method_strategies (id, platform, method_name, priority, max_daily_attempts, cooldown_period, risk_level, configuration, tags) VALUES
|
||||
-- Instagram methods
|
||||
('instagram_email', 'instagram', 'email', 8, 20, 300, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": false, "auto_verify_email": true}', '["primary", "reliable"]'),
|
||||
('instagram_phone', 'instagram', 'phone', 6, 10, 600, 'MEDIUM', '{"require_email_backup": true, "phone_verification_timeout": 300}', '["secondary", "verification"]'),
|
||||
('instagram_social', 'instagram', 'social_login', 4, 5, 1800, 'HIGH', '{"supported_providers": ["facebook"], "fallback_to_email": true}', '["alternative", "high_risk"]'),
|
||||
|
||||
-- TikTok methods
|
||||
('tiktok_email', 'tiktok', 'email', 8, 25, 240, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": false}', '["primary", "reliable"]'),
|
||||
('tiktok_phone', 'tiktok', 'phone', 7, 15, 480, 'MEDIUM', '{"require_email_backup": false, "phone_verification_timeout": 180}', '["secondary", "fast"]'),
|
||||
|
||||
-- X (Twitter) methods
|
||||
('x_email', 'x', 'email', 8, 15, 360, 'LOW', '{"email_domain": "z5m7q9dk3ah2v1plx6ju.com", "require_phone_verification": true}', '["primary", "stable"]'),
|
||||
('x_phone', 'x', 'phone', 6, 8, 720, 'MEDIUM', '{"require_email_backup": true, "phone_verification_timeout": 300}', '["secondary", "verification"]'),
|
||||
|
||||
-- Gmail methods
|
||||
('gmail_standard', 'gmail', 'standard_registration', 9, 30, 180, 'LOW', '{"recovery_email": false, "recovery_phone": false}', '["primary", "google"]'),
|
||||
('gmail_recovery', 'gmail', 'recovery_registration', 7, 10, 600, 'MEDIUM', '{"recovery_email": true, "recovery_phone": false}', '["secondary", "secure"]);
|
||||
|
||||
-- Insert default platform method states
|
||||
INSERT OR IGNORE INTO platform_method_states (id, platform, preferred_methods, rotation_strategy, reset_date) VALUES
|
||||
('state_instagram', 'instagram', '["email", "phone", "social_login"]', 'adaptive', DATE('now')),
|
||||
('state_tiktok', 'tiktok', '["email", "phone"]', 'adaptive', DATE('now')),
|
||||
('state_x', 'x', '["email", "phone"]', 'adaptive', DATE('now')),
|
||||
('state_gmail', 'gmail', '["standard_registration", "recovery_registration"]', 'adaptive', DATE('now'));
|
||||
|
||||
-- Migration completed successfully
|
||||
INSERT OR IGNORE INTO schema_migrations (version, description, applied_at) VALUES
|
||||
('2025-07-24-001', 'Add Method Rotation System', CURRENT_TIMESTAMP);
|
||||
60
database/migrations/remove_unused_fingerprint_columns.sql
Normale Datei
60
database/migrations/remove_unused_fingerprint_columns.sql
Normale Datei
@ -0,0 +1,60 @@
|
||||
-- Migration: Remove unused fingerprint columns and tables
|
||||
-- Date: 2025-01-13
|
||||
-- Description: Removes evolution history, trust score, rotation policy and related unused columns
|
||||
|
||||
-- Drop unused table
|
||||
DROP TABLE IF EXISTS fingerprint_rotation_history;
|
||||
|
||||
-- Drop unused view
|
||||
DROP VIEW IF EXISTS v_account_fingerprints;
|
||||
|
||||
-- Create temporary table with desired schema
|
||||
CREATE TABLE browser_fingerprints_new (
|
||||
id TEXT PRIMARY KEY,
|
||||
canvas_noise_config TEXT,
|
||||
webrtc_config TEXT,
|
||||
fonts TEXT,
|
||||
hardware_config TEXT,
|
||||
navigator_props TEXT,
|
||||
webgl_vendor TEXT,
|
||||
webgl_renderer TEXT,
|
||||
audio_context_config TEXT,
|
||||
timezone TEXT,
|
||||
timezone_offset INTEGER,
|
||||
plugins TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_rotated TIMESTAMP,
|
||||
platform_specific TEXT,
|
||||
static_components TEXT,
|
||||
rotation_seed TEXT,
|
||||
account_bound BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
|
||||
-- Copy data from old table (excluding unused columns)
|
||||
INSERT INTO browser_fingerprints_new (
|
||||
id, canvas_noise_config, webrtc_config, fonts,
|
||||
hardware_config, navigator_props, webgl_vendor,
|
||||
webgl_renderer, audio_context_config, timezone,
|
||||
timezone_offset, plugins, created_at, last_rotated,
|
||||
platform_specific, static_components, rotation_seed,
|
||||
account_bound
|
||||
)
|
||||
SELECT
|
||||
id, canvas_noise_config, webrtc_config, fonts,
|
||||
hardware_config, navigator_props, webgl_vendor,
|
||||
webgl_renderer, audio_context_config, timezone,
|
||||
timezone_offset, plugins, created_at, last_rotated,
|
||||
platform_specific, static_components, rotation_seed,
|
||||
account_bound
|
||||
FROM browser_fingerprints;
|
||||
|
||||
-- Drop old table
|
||||
DROP TABLE browser_fingerprints;
|
||||
|
||||
-- Rename new table to original name
|
||||
ALTER TABLE browser_fingerprints_new RENAME TO browser_fingerprints;
|
||||
|
||||
-- Recreate indexes
|
||||
CREATE INDEX idx_fingerprints_created ON browser_fingerprints(created_at);
|
||||
CREATE INDEX idx_fingerprints_rotated ON browser_fingerprints(last_rotated);
|
||||
CREATE INDEX idx_fingerprints_account_bound ON browser_fingerprints(account_bound);
|
||||
187
database/schema_v2.sql
Normale Datei
187
database/schema_v2.sql
Normale Datei
@ -0,0 +1,187 @@
|
||||
-- Clean Architecture Database Schema v2
|
||||
-- Erweitert das bestehende Schema um neue Tabellen
|
||||
|
||||
-- Session Management
|
||||
CREATE TABLE IF NOT EXISTS browser_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
cookies TEXT NOT NULL, -- JSON encrypted
|
||||
local_storage TEXT, -- JSON encrypted
|
||||
session_storage TEXT, -- JSON encrypted
|
||||
proxy_config TEXT, -- JSON
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
health_score REAL DEFAULT 1.0,
|
||||
account_id TEXT,
|
||||
user_agent TEXT,
|
||||
viewport_width INTEGER DEFAULT 1920,
|
||||
viewport_height INTEGER DEFAULT 1080,
|
||||
locale TEXT DEFAULT 'de-DE',
|
||||
timezone TEXT DEFAULT 'Europe/Berlin',
|
||||
active BOOLEAN DEFAULT 1,
|
||||
error_count INTEGER DEFAULT 0,
|
||||
success_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id),
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||
);
|
||||
|
||||
-- Fingerprints
|
||||
CREATE TABLE IF NOT EXISTS browser_fingerprints (
|
||||
id TEXT PRIMARY KEY,
|
||||
canvas_noise_config TEXT NOT NULL, -- JSON
|
||||
webrtc_config TEXT NOT NULL, -- JSON
|
||||
fonts TEXT NOT NULL, -- JSON array
|
||||
hardware_config TEXT NOT NULL, -- JSON
|
||||
navigator_props TEXT NOT NULL, -- JSON
|
||||
webgl_vendor TEXT,
|
||||
webgl_renderer TEXT,
|
||||
audio_context_config TEXT, -- JSON
|
||||
timezone TEXT,
|
||||
timezone_offset INTEGER,
|
||||
plugins TEXT, -- JSON array
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
last_rotated TIMESTAMP,
|
||||
platform_specific TEXT, -- Platform-spezifische Anpassungen
|
||||
static_components TEXT, -- JSON: Unchangeable hardware/platform values
|
||||
rotation_seed TEXT, -- Seed for deterministic noise generation
|
||||
account_bound BOOLEAN DEFAULT 0 -- Is this bound to specific account(s)
|
||||
);
|
||||
|
||||
-- Rate Limiting
|
||||
CREATE TABLE IF NOT EXISTS rate_limit_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
action_type TEXT NOT NULL,
|
||||
duration_ms INTEGER NOT NULL,
|
||||
success BOOLEAN NOT NULL,
|
||||
response_code INTEGER,
|
||||
session_id TEXT,
|
||||
url TEXT,
|
||||
element_selector TEXT,
|
||||
error_message TEXT,
|
||||
retry_count INTEGER DEFAULT 0,
|
||||
metadata TEXT, -- JSON
|
||||
FOREIGN KEY (session_id) REFERENCES browser_sessions(id)
|
||||
);
|
||||
|
||||
-- Analytics
|
||||
CREATE TABLE IF NOT EXISTS account_creation_analytics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
event_id TEXT UNIQUE NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
account_id TEXT,
|
||||
session_id TEXT NOT NULL,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
duration_seconds REAL NOT NULL,
|
||||
success BOOLEAN NOT NULL,
|
||||
error_type TEXT,
|
||||
error_message TEXT,
|
||||
workflow_steps TEXT NOT NULL, -- JSON
|
||||
metadata TEXT, -- JSON
|
||||
total_retry_count INTEGER DEFAULT 0,
|
||||
network_requests INTEGER DEFAULT 0,
|
||||
screenshots_taken INTEGER DEFAULT 0,
|
||||
proxy_used BOOLEAN DEFAULT 0,
|
||||
proxy_type TEXT,
|
||||
browser_type TEXT DEFAULT 'chromium',
|
||||
headless BOOLEAN DEFAULT 0,
|
||||
success_rate REAL,
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id),
|
||||
FOREIGN KEY (session_id) REFERENCES browser_sessions(id),
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES browser_fingerprints(id)
|
||||
);
|
||||
|
||||
-- Error Events
|
||||
CREATE TABLE IF NOT EXISTS error_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
error_id TEXT UNIQUE NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
error_type TEXT NOT NULL,
|
||||
error_message TEXT NOT NULL,
|
||||
stack_trace TEXT,
|
||||
context TEXT NOT NULL, -- JSON
|
||||
recovery_attempted BOOLEAN DEFAULT 0,
|
||||
recovery_successful BOOLEAN DEFAULT 0,
|
||||
recovery_attempts TEXT, -- JSON array
|
||||
severity TEXT DEFAULT 'medium',
|
||||
platform TEXT,
|
||||
session_id TEXT,
|
||||
account_id TEXT,
|
||||
correlation_id TEXT,
|
||||
user_impact BOOLEAN DEFAULT 1,
|
||||
system_impact BOOLEAN DEFAULT 0,
|
||||
data_loss BOOLEAN DEFAULT 0,
|
||||
FOREIGN KEY (session_id) REFERENCES browser_sessions(id),
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||
);
|
||||
|
||||
-- Rate Limit Policies
|
||||
CREATE TABLE IF NOT EXISTS rate_limit_policies (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
action_type TEXT UNIQUE NOT NULL,
|
||||
min_delay REAL NOT NULL,
|
||||
max_delay REAL NOT NULL,
|
||||
adaptive BOOLEAN DEFAULT 1,
|
||||
backoff_multiplier REAL DEFAULT 1.5,
|
||||
max_retries INTEGER DEFAULT 3,
|
||||
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Session Pool Status
|
||||
CREATE TABLE IF NOT EXISTS session_pool_status (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
total_sessions INTEGER NOT NULL,
|
||||
active_sessions INTEGER NOT NULL,
|
||||
healthy_sessions INTEGER NOT NULL,
|
||||
failed_sessions INTEGER NOT NULL,
|
||||
avg_health_score REAL,
|
||||
metadata TEXT -- JSON
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_last_used ON browser_sessions(last_used);
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_health ON browser_sessions(health_score);
|
||||
CREATE INDEX IF NOT EXISTS idx_sessions_active ON browser_sessions(active);
|
||||
CREATE INDEX IF NOT EXISTS idx_rate_limits_timestamp ON rate_limit_events(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_rate_limits_action ON rate_limit_events(action_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_analytics_timestamp ON account_creation_analytics(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_analytics_success ON account_creation_analytics(success);
|
||||
CREATE INDEX IF NOT EXISTS idx_analytics_platform ON account_creation_analytics(metadata);
|
||||
CREATE INDEX IF NOT EXISTS idx_errors_timestamp ON error_events(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_errors_type ON error_events(error_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_errors_severity ON error_events(severity);
|
||||
|
||||
-- Views für häufige Abfragen
|
||||
CREATE VIEW IF NOT EXISTS v_session_health AS
|
||||
SELECT
|
||||
bs.id,
|
||||
bs.health_score,
|
||||
bs.error_count,
|
||||
bs.success_count,
|
||||
bs.last_used,
|
||||
COUNT(aca.id) as total_accounts,
|
||||
AVG(aca.success_rate) as avg_success_rate
|
||||
FROM browser_sessions bs
|
||||
LEFT JOIN account_creation_analytics aca ON bs.id = aca.session_id
|
||||
GROUP BY bs.id;
|
||||
|
||||
CREATE VIEW IF NOT EXISTS v_daily_analytics AS
|
||||
SELECT
|
||||
DATE(timestamp) as date,
|
||||
COUNT(*) as total_attempts,
|
||||
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful,
|
||||
AVG(duration_seconds) as avg_duration,
|
||||
AVG(total_retry_count) as avg_retries
|
||||
FROM account_creation_analytics
|
||||
GROUP BY DATE(timestamp);
|
||||
|
||||
CREATE VIEW IF NOT EXISTS v_error_summary AS
|
||||
SELECT
|
||||
error_type,
|
||||
COUNT(*) as error_count,
|
||||
MIN(timestamp) as first_occurrence,
|
||||
MAX(timestamp) as last_occurrence,
|
||||
AVG(CASE WHEN recovery_successful = 1 THEN 1.0 ELSE 0.0 END) as recovery_rate
|
||||
FROM error_events
|
||||
GROUP BY error_type;
|
||||
267
debug_video_issue.py
Normale Datei
267
debug_video_issue.py
Normale Datei
@ -0,0 +1,267 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Debug Video Issue - Final Diagnostic Script
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from browser.playwright_manager import PlaywrightManager
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logger = logging.getLogger("video_debug")
|
||||
|
||||
async def debug_video_issue():
|
||||
"""Comprehensive video issue debugging"""
|
||||
|
||||
print("🔍 STARTING COMPREHENSIVE VIDEO DEBUG ANALYSIS")
|
||||
|
||||
# Test with fresh manager
|
||||
manager = PlaywrightManager(headless=False)
|
||||
|
||||
try:
|
||||
page = manager.start()
|
||||
|
||||
print("📋 STEP 1: Navigating to Instagram...")
|
||||
success = manager.navigate_to("https://www.instagram.com")
|
||||
|
||||
if not success:
|
||||
print("❌ Failed to navigate to Instagram")
|
||||
return
|
||||
|
||||
print("📋 STEP 2: Checking browser capabilities...")
|
||||
|
||||
# Check all video-related capabilities
|
||||
capabilities = page.evaluate("""
|
||||
() => {
|
||||
const results = {
|
||||
// Basic video support
|
||||
video_element: !!document.createElement('video'),
|
||||
video_can_play_mp4: document.createElement('video').canPlayType('video/mp4'),
|
||||
video_can_play_webm: document.createElement('video').canPlayType('video/webm'),
|
||||
|
||||
// DRM Support
|
||||
widevine_support: !!navigator.requestMediaKeySystemAccess,
|
||||
media_source: !!window.MediaSource,
|
||||
encrypted_media: !!window.MediaKeys,
|
||||
|
||||
// Chrome APIs
|
||||
chrome_present: !!window.chrome,
|
||||
chrome_runtime: !!(window.chrome && window.chrome.runtime),
|
||||
chrome_app: window.chrome ? window.chrome.app : 'missing',
|
||||
chrome_csi: !!(window.chrome && window.chrome.csi),
|
||||
chrome_loadtimes: !!(window.chrome && window.chrome.loadTimes),
|
||||
|
||||
// Media Devices
|
||||
media_devices: !!(navigator.mediaDevices),
|
||||
enumerate_devices: !!(navigator.mediaDevices && navigator.mediaDevices.enumerateDevices),
|
||||
get_user_media: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia),
|
||||
|
||||
// Performance API
|
||||
performance_now: !!performance.now,
|
||||
performance_timing: !!performance.timing,
|
||||
|
||||
// Automation markers
|
||||
webdriver_present: !!navigator.webdriver,
|
||||
automation_markers: {
|
||||
webdriver_script_fn: !!navigator.__webdriver_script_fn,
|
||||
webdriver_evaluate: !!window.__webdriver_evaluate,
|
||||
selenium_unwrapped: !!document.__selenium_unwrapped,
|
||||
chrome_webdriver: !!(window.chrome && window.chrome.webdriver)
|
||||
},
|
||||
|
||||
// User agent analysis
|
||||
user_agent: navigator.userAgent,
|
||||
platform: navigator.platform,
|
||||
vendor: navigator.vendor,
|
||||
languages: navigator.languages,
|
||||
|
||||
// Screen info
|
||||
screen_width: screen.width,
|
||||
screen_height: screen.height,
|
||||
device_pixel_ratio: devicePixelRatio,
|
||||
|
||||
// Timing
|
||||
page_load_time: performance.now()
|
||||
};
|
||||
|
||||
return results;
|
||||
}
|
||||
""")
|
||||
|
||||
print("📊 BROWSER CAPABILITIES:")
|
||||
for key, value in capabilities.items():
|
||||
print(f" {key}: {value}")
|
||||
|
||||
print("\n📋 STEP 3: Testing video element creation...")
|
||||
|
||||
video_test = page.evaluate("""
|
||||
() => {
|
||||
// Create video element and test
|
||||
const video = document.createElement('video');
|
||||
video.style.display = 'none';
|
||||
document.body.appendChild(video);
|
||||
|
||||
const results = {
|
||||
video_created: true,
|
||||
video_properties: {
|
||||
autoplay: video.autoplay,
|
||||
controls: video.controls,
|
||||
muted: video.muted,
|
||||
preload: video.preload,
|
||||
crossOrigin: video.crossOrigin
|
||||
},
|
||||
video_methods: {
|
||||
canPlayType: typeof video.canPlayType,
|
||||
play: typeof video.play,
|
||||
pause: typeof video.pause,
|
||||
load: typeof video.load
|
||||
},
|
||||
codec_support: {
|
||||
mp4_h264: video.canPlayType('video/mp4; codecs="avc1.42E01E"'),
|
||||
mp4_h265: video.canPlayType('video/mp4; codecs="hev1.1.6.L93.B0"'),
|
||||
webm_vp8: video.canPlayType('video/webm; codecs="vp8"'),
|
||||
webm_vp9: video.canPlayType('video/webm; codecs="vp9"'),
|
||||
audio_aac: video.canPlayType('audio/mp4; codecs="mp4a.40.2"'),
|
||||
audio_opus: video.canPlayType('audio/webm; codecs="opus"')
|
||||
}
|
||||
};
|
||||
|
||||
document.body.removeChild(video);
|
||||
return results;
|
||||
}
|
||||
""")
|
||||
|
||||
print("\n📊 VIDEO ELEMENT TEST:")
|
||||
for key, value in video_test.items():
|
||||
print(f" {key}: {value}")
|
||||
|
||||
print("\n📋 STEP 4: Checking console errors...")
|
||||
|
||||
# Wait a bit for any console errors
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# Check for specific Instagram video errors
|
||||
print("\n📋 STEP 5: Looking for Instagram-specific issues...")
|
||||
|
||||
# Try to find any video elements or error messages
|
||||
video_status = page.evaluate("""
|
||||
() => {
|
||||
const results = {
|
||||
video_elements_count: document.querySelectorAll('video').length,
|
||||
error_messages: [],
|
||||
instagram_classes: {
|
||||
video_error_present: !!document.querySelector('.x6s0dn4.xatbrnm.x9f619'),
|
||||
video_containers: document.querySelectorAll('[class*="video"]').length,
|
||||
error_spans: []
|
||||
}
|
||||
};
|
||||
|
||||
// Look for error messages
|
||||
const errorSpans = document.querySelectorAll('span');
|
||||
errorSpans.forEach(span => {
|
||||
const text = span.textContent.trim();
|
||||
if (text.includes('Video') || text.includes('video') || text.includes('abgespielt') || text.includes('richtig')) {
|
||||
results.instagram_classes.error_spans.push({
|
||||
text: text,
|
||||
classes: span.className
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
""")
|
||||
|
||||
print("\n📊 INSTAGRAM VIDEO STATUS:")
|
||||
for key, value in video_status.items():
|
||||
print(f" {key}: {value}")
|
||||
|
||||
print("\n📋 STEP 6: Testing DRM capabilities...")
|
||||
|
||||
drm_test = page.evaluate("""
|
||||
() => {
|
||||
return new Promise((resolve) => {
|
||||
if (!navigator.requestMediaKeySystemAccess) {
|
||||
resolve({drm_support: false, error: 'No requestMediaKeySystemAccess'});
|
||||
return;
|
||||
}
|
||||
|
||||
navigator.requestMediaKeySystemAccess('com.widevine.alpha', [{
|
||||
initDataTypes: ['cenc'],
|
||||
videoCapabilities: [{contentType: 'video/mp4; codecs="avc1.42E01E"'}]
|
||||
}]).then(access => {
|
||||
resolve({
|
||||
drm_support: true,
|
||||
key_system: access.keySystem,
|
||||
configuration: access.getConfiguration()
|
||||
});
|
||||
}).catch(error => {
|
||||
resolve({
|
||||
drm_support: false,
|
||||
error: error.message
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
""")
|
||||
|
||||
print("\n📊 DRM TEST RESULTS:")
|
||||
print(f" {drm_test}")
|
||||
|
||||
print("\n🎯 FINAL DIAGNOSIS:")
|
||||
print("=" * 50)
|
||||
|
||||
# Analyze results
|
||||
issues = []
|
||||
|
||||
if not capabilities.get('video_element'):
|
||||
issues.append("❌ Video elements not supported")
|
||||
|
||||
if capabilities.get('webdriver_present'):
|
||||
issues.append("❌ Webdriver detection present")
|
||||
|
||||
if not capabilities.get('widevine_support'):
|
||||
issues.append("❌ Widevine DRM not supported")
|
||||
|
||||
if video_status.get('instagram_classes', {}).get('video_error_present'):
|
||||
issues.append("❌ Instagram video error message detected")
|
||||
|
||||
if not drm_test.get('drm_support'):
|
||||
issues.append(f"❌ DRM test failed: {drm_test.get('error', 'Unknown')}")
|
||||
|
||||
automation_markers = capabilities.get('automation_markers', {})
|
||||
detected_markers = [k for k, v in automation_markers.items() if v]
|
||||
if detected_markers:
|
||||
issues.append(f"❌ Automation markers detected: {detected_markers}")
|
||||
|
||||
if issues:
|
||||
print("🚨 CRITICAL ISSUES FOUND:")
|
||||
for issue in issues:
|
||||
print(f" {issue}")
|
||||
else:
|
||||
print("✅ No obvious technical issues detected")
|
||||
print("🤔 The problem might be:")
|
||||
print(" - Account-specific restrictions")
|
||||
print(" - Geographic blocking")
|
||||
print(" - Instagram A/B testing")
|
||||
print(" - Specific video content restrictions")
|
||||
|
||||
print("\n📋 RECOMMENDATION:")
|
||||
if len(issues) > 3:
|
||||
print(" 🔄 Technical fixes needed - automation still detectable")
|
||||
elif len(issues) > 0:
|
||||
print(" 🔧 Some technical issues remain")
|
||||
else:
|
||||
print(" 💡 Technical setup appears correct - likely policy/account issue")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Debug failed: {e}")
|
||||
print(f"❌ Debug script failed: {e}")
|
||||
|
||||
finally:
|
||||
manager.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(debug_video_issue())
|
||||
342
docs/CLEAN_ARCHITECTURE.md
Normale Datei
342
docs/CLEAN_ARCHITECTURE.md
Normale Datei
@ -0,0 +1,342 @@
|
||||
# Clean Architecture Design - AccountForger
|
||||
|
||||
## Übersicht
|
||||
|
||||
Diese Dokumentation beschreibt die saubere Architektur für das AccountForger-System mit Fokus auf das Fingerprint-Management und die Login-Funktionalität mit gespeicherten Fingerprints.
|
||||
|
||||
## Architektur-Schichten
|
||||
|
||||
### 1. Domain Layer (Innerster Kreis)
|
||||
**Keine Abhängigkeiten nach außen!**
|
||||
|
||||
```
|
||||
domain/
|
||||
├── entities/
|
||||
│ ├── account.py # Account Entity
|
||||
│ ├── browser_fingerprint.py # Fingerprint Entity
|
||||
│ └── browser_session.py # Session Entity
|
||||
├── value_objects/
|
||||
│ ├── fingerprint_id.py # Eindeutige Fingerprint-ID
|
||||
│ ├── account_id.py # Eindeutige Account-ID
|
||||
│ └── session_data.py # Session-Daten (Cookies, Storage)
|
||||
└── repositories/ # Interfaces (Abstrakte Klassen)
|
||||
├── fingerprint_repository.py
|
||||
├── account_repository.py
|
||||
└── session_repository.py
|
||||
```
|
||||
|
||||
### 2. Application Layer
|
||||
**Orchestriert Use Cases, kennt Domain**
|
||||
|
||||
```
|
||||
application/
|
||||
├── use_cases/
|
||||
│ ├── create_account/
|
||||
│ │ ├── create_account_use_case.py
|
||||
│ │ ├── create_account_dto.py
|
||||
│ │ └── create_account_presenter.py
|
||||
│ ├── login_account/
|
||||
│ │ ├── login_with_fingerprint_use_case.py
|
||||
│ │ ├── login_dto.py
|
||||
│ │ └── login_presenter.py
|
||||
│ └── manage_fingerprint/
|
||||
│ ├── generate_fingerprint_use_case.py
|
||||
│ ├── save_fingerprint_use_case.py
|
||||
│ └── load_fingerprint_use_case.py
|
||||
└── services/
|
||||
├── fingerprint_manager.py # Orchestriert Fingerprint-Operationen
|
||||
└── session_manager.py # Verwaltet Browser-Sessions
|
||||
```
|
||||
|
||||
### 3. Infrastructure Layer
|
||||
**Implementiert Interfaces aus Domain**
|
||||
|
||||
```
|
||||
infrastructure/
|
||||
├── persistence/
|
||||
│ ├── sqlite/
|
||||
│ │ ├── sqlite_fingerprint_repository.py
|
||||
│ │ ├── sqlite_account_repository.py
|
||||
│ │ └── sqlite_session_repository.py
|
||||
│ └── migrations/
|
||||
│ └── fingerprint_schema.sql
|
||||
├── browser/
|
||||
│ ├── playwright_adapter.py # Adapter für Playwright
|
||||
│ ├── fingerprint_injector.py # Injiziert Fingerprints in Browser
|
||||
│ └── protection_service.py # Browser-Schutz
|
||||
└── external/
|
||||
├── proxy_service.py
|
||||
└── email_service.py
|
||||
```
|
||||
|
||||
### 4. Presentation Layer
|
||||
**UI und Controller**
|
||||
|
||||
```
|
||||
presentation/
|
||||
├── controllers/
|
||||
│ ├── account_controller.py
|
||||
│ └── fingerprint_controller.py
|
||||
└── views/
|
||||
├── account_view.py
|
||||
└── login_view.py
|
||||
```
|
||||
|
||||
## Fingerprint-System Design
|
||||
|
||||
### Fingerprint Entity (Kern-Domain)
|
||||
```python
|
||||
# domain/entities/browser_fingerprint.py
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
import uuid
|
||||
|
||||
@dataclass(frozen=True) # Immutable!
|
||||
class FingerprintId:
|
||||
value: str
|
||||
|
||||
@classmethod
|
||||
def generate(cls) -> 'FingerprintId':
|
||||
return cls(str(uuid.uuid4()))
|
||||
|
||||
@dataclass
|
||||
class BrowserFingerprint:
|
||||
"""Immutable Fingerprint Entity - Kern der Domain"""
|
||||
id: FingerprintId
|
||||
canvas_seed: int
|
||||
webgl_vendor: str
|
||||
webgl_renderer: str
|
||||
audio_context_params: dict
|
||||
navigator_properties: dict
|
||||
hardware_config: dict
|
||||
timezone: str
|
||||
fonts: list[str]
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Serialisierung für Persistierung"""
|
||||
return {
|
||||
'id': self.id.value,
|
||||
'canvas_seed': self.canvas_seed,
|
||||
# ... weitere Felder
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'BrowserFingerprint':
|
||||
"""Deserialisierung aus Persistierung"""
|
||||
return cls(
|
||||
id=FingerprintId(data['id']),
|
||||
canvas_seed=data['canvas_seed'],
|
||||
# ... weitere Felder
|
||||
)
|
||||
```
|
||||
|
||||
### Fingerprint-Account-Session Verknüpfung
|
||||
```python
|
||||
# domain/entities/account.py
|
||||
@dataclass
|
||||
class Account:
|
||||
id: AccountId
|
||||
username: str
|
||||
platform: str
|
||||
fingerprint_id: FingerprintId # Verknüpfung!
|
||||
created_at: datetime
|
||||
|
||||
# domain/entities/browser_session.py
|
||||
@dataclass
|
||||
class BrowserSession:
|
||||
id: SessionId
|
||||
account_id: AccountId
|
||||
fingerprint_id: FingerprintId # Gleicher Fingerprint!
|
||||
cookies: str # Encrypted
|
||||
local_storage: str # Encrypted
|
||||
session_storage: str # Encrypted
|
||||
last_used: datetime
|
||||
is_valid: bool
|
||||
```
|
||||
|
||||
### Use Case: Login mit gespeichertem Fingerprint
|
||||
```python
|
||||
# application/use_cases/login_account/login_with_fingerprint_use_case.py
|
||||
class LoginWithFingerprintUseCase:
|
||||
def __init__(self,
|
||||
account_repo: IAccountRepository,
|
||||
fingerprint_repo: IFingerprintRepository,
|
||||
session_repo: ISessionRepository,
|
||||
browser_service: IBrowserService):
|
||||
self.account_repo = account_repo
|
||||
self.fingerprint_repo = fingerprint_repo
|
||||
self.session_repo = session_repo
|
||||
self.browser_service = browser_service
|
||||
|
||||
def execute(self, account_id: str) -> LoginResult:
|
||||
# 1. Account laden
|
||||
account = self.account_repo.find_by_id(AccountId(account_id))
|
||||
if not account:
|
||||
return LoginResult.failure("Account nicht gefunden")
|
||||
|
||||
# 2. Fingerprint laden
|
||||
fingerprint = self.fingerprint_repo.find_by_id(account.fingerprint_id)
|
||||
if not fingerprint:
|
||||
return LoginResult.failure("Fingerprint nicht gefunden")
|
||||
|
||||
# 3. Session laden
|
||||
session = self.session_repo.find_by_account_id(account.id)
|
||||
if not session or not session.is_valid:
|
||||
return LoginResult.failure("Keine gültige Session")
|
||||
|
||||
# 4. Browser mit Fingerprint starten
|
||||
browser = self.browser_service.create_with_fingerprint(fingerprint)
|
||||
|
||||
# 5. Session wiederherstellen
|
||||
browser.restore_session(session)
|
||||
|
||||
# 6. Login verifizieren
|
||||
if browser.verify_login(account.platform):
|
||||
return LoginResult.success(browser)
|
||||
else:
|
||||
return LoginResult.failure("Login fehlgeschlagen")
|
||||
```
|
||||
|
||||
### Repository Pattern (Clean!)
|
||||
```python
|
||||
# domain/repositories/fingerprint_repository.py
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
class IFingerprintRepository(ABC):
|
||||
@abstractmethod
|
||||
def save(self, fingerprint: BrowserFingerprint) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_id(self, id: FingerprintId) -> Optional[BrowserFingerprint]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_account_id(self, account_id: AccountId) -> Optional[BrowserFingerprint]:
|
||||
pass
|
||||
|
||||
# infrastructure/persistence/sqlite/sqlite_fingerprint_repository.py
|
||||
class SqliteFingerprintRepository(IFingerprintRepository):
|
||||
def save(self, fingerprint: BrowserFingerprint) -> None:
|
||||
# SQL Implementation
|
||||
query = "INSERT OR REPLACE INTO fingerprints ..."
|
||||
# Nur primitive Typen in DB!
|
||||
data = fingerprint.to_dict()
|
||||
self.db.execute(query, data)
|
||||
|
||||
def find_by_id(self, id: FingerprintId) -> Optional[BrowserFingerprint]:
|
||||
query = "SELECT * FROM fingerprints WHERE id = ?"
|
||||
row = self.db.fetchone(query, [id.value])
|
||||
return BrowserFingerprint.from_dict(row) if row else None
|
||||
```
|
||||
|
||||
### Dependency Injection Container
|
||||
```python
|
||||
# infrastructure/container.py
|
||||
class Container:
|
||||
def __init__(self):
|
||||
# Repositories
|
||||
self._fingerprint_repo = SqliteFingerprintRepository()
|
||||
self._account_repo = SqliteAccountRepository()
|
||||
self._session_repo = SqliteSessionRepository()
|
||||
|
||||
# Services
|
||||
self._browser_service = PlaywrightBrowserService()
|
||||
|
||||
# Use Cases
|
||||
self._login_use_case = LoginWithFingerprintUseCase(
|
||||
self._account_repo,
|
||||
self._fingerprint_repo,
|
||||
self._session_repo,
|
||||
self._browser_service
|
||||
)
|
||||
|
||||
@property
|
||||
def login_use_case(self) -> LoginWithFingerprintUseCase:
|
||||
return self._login_use_case
|
||||
```
|
||||
|
||||
## Datenbank-Schema
|
||||
|
||||
```sql
|
||||
-- Fingerprints Tabelle
|
||||
CREATE TABLE fingerprints (
|
||||
id TEXT PRIMARY KEY,
|
||||
canvas_seed INTEGER NOT NULL,
|
||||
webgl_vendor TEXT NOT NULL,
|
||||
webgl_renderer TEXT NOT NULL,
|
||||
audio_context_params TEXT NOT NULL, -- JSON
|
||||
navigator_properties TEXT NOT NULL, -- JSON
|
||||
hardware_config TEXT NOT NULL, -- JSON
|
||||
timezone TEXT NOT NULL,
|
||||
fonts TEXT NOT NULL, -- JSON Array
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Accounts Tabelle
|
||||
CREATE TABLE accounts (
|
||||
id TEXT PRIMARY KEY,
|
||||
username TEXT NOT NULL,
|
||||
platform TEXT NOT NULL,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES fingerprints(id)
|
||||
);
|
||||
|
||||
-- Sessions Tabelle
|
||||
CREATE TABLE browser_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
account_id TEXT NOT NULL,
|
||||
fingerprint_id TEXT NOT NULL,
|
||||
cookies TEXT NOT NULL, -- Encrypted
|
||||
local_storage TEXT, -- Encrypted
|
||||
session_storage TEXT, -- Encrypted
|
||||
last_used TIMESTAMP,
|
||||
is_valid BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (account_id) REFERENCES accounts(id),
|
||||
FOREIGN KEY (fingerprint_id) REFERENCES fingerprints(id)
|
||||
);
|
||||
|
||||
-- Index für Performance
|
||||
CREATE INDEX idx_accounts_fingerprint ON accounts(fingerprint_id);
|
||||
CREATE INDEX idx_sessions_account ON browser_sessions(account_id);
|
||||
```
|
||||
|
||||
## Vorteile dieser Architektur
|
||||
|
||||
1. **Testbarkeit**: Jede Schicht ist isoliert testbar
|
||||
2. **Flexibilität**: Repositories können ausgetauscht werden (SQLite → PostgreSQL)
|
||||
3. **Klarheit**: Klare Verantwortlichkeiten pro Schicht
|
||||
4. **Wartbarkeit**: Änderungen sind lokal begrenzt
|
||||
5. **Fingerprint-Konsistenz**: Ein Account = Ein Fingerprint = Konsistente Sessions
|
||||
|
||||
## Login-Flow mit Fingerprint
|
||||
|
||||
1. User wählt Account aus Liste
|
||||
2. System lädt Account mit verknüpftem Fingerprint
|
||||
3. Browser wird mit exakt diesem Fingerprint gestartet
|
||||
4. Gespeicherte Session (Cookies, Storage) wird geladen
|
||||
5. Browser navigiert zur Plattform
|
||||
6. Session ist wiederhergestellt = User ist eingeloggt
|
||||
|
||||
## Beispiel-Verwendung
|
||||
|
||||
```python
|
||||
# In der Presentation Layer
|
||||
container = Container()
|
||||
|
||||
# Login mit gespeichertem Fingerprint
|
||||
result = container.login_use_case.execute(account_id="abc-123")
|
||||
|
||||
if result.success:
|
||||
browser = result.browser
|
||||
# User ist jetzt eingeloggt mit dem gleichen Fingerprint
|
||||
else:
|
||||
print(f"Login fehlgeschlagen: {result.error}")
|
||||
```
|
||||
|
||||
Diese Architektur stellt sicher, dass:
|
||||
- Fingerprints konsistent bleiben
|
||||
- Sessions zuverlässig wiederhergestellt werden
|
||||
- Der Code wartbar und erweiterbar bleibt
|
||||
- Keine zirkulären Abhängigkeiten entstehen
|
||||
3
domain/__init__.py
Normale Datei
3
domain/__init__.py
Normale Datei
@ -0,0 +1,3 @@
|
||||
"""
|
||||
Domain Layer - Enthält die Geschäftslogik und Kernkonzepte der Anwendung
|
||||
"""
|
||||
15
domain/entities/__init__.py
Normale Datei
15
domain/entities/__init__.py
Normale Datei
@ -0,0 +1,15 @@
|
||||
"""
|
||||
Domain Entities - Geschäftsobjekte mit Identität
|
||||
"""
|
||||
|
||||
from .rate_limit_policy import RateLimitPolicy
|
||||
from .browser_fingerprint import BrowserFingerprint
|
||||
from .account_creation_event import AccountCreationEvent
|
||||
from .error_event import ErrorEvent
|
||||
|
||||
__all__ = [
|
||||
'RateLimitPolicy',
|
||||
'BrowserFingerprint',
|
||||
'AccountCreationEvent',
|
||||
'ErrorEvent'
|
||||
]
|
||||
174
domain/entities/account_creation_event.py
Normale Datei
174
domain/entities/account_creation_event.py
Normale Datei
@ -0,0 +1,174 @@
|
||||
"""
|
||||
Account Creation Event Entity - Event für jede Account-Erstellung
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
import uuid
|
||||
|
||||
|
||||
class WorkflowStepStatus(Enum):
|
||||
"""Status eines Workflow-Schritts"""
|
||||
PENDING = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
SKIPPED = "skipped"
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowStep:
|
||||
"""Einzelner Schritt im Account-Erstellungsprozess"""
|
||||
step_name: str
|
||||
start_time: datetime
|
||||
end_time: Optional[datetime] = None
|
||||
status: WorkflowStepStatus = WorkflowStepStatus.PENDING
|
||||
retry_count: int = 0
|
||||
error_message: Optional[str] = None
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def duration(self) -> Optional[timedelta]:
|
||||
"""Berechnet die Dauer des Schritts"""
|
||||
if self.start_time and self.end_time:
|
||||
return self.end_time - self.start_time
|
||||
return None
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
"""Prüft ob der Schritt erfolgreich war"""
|
||||
return self.status == WorkflowStepStatus.COMPLETED
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert zu Dictionary"""
|
||||
return {
|
||||
'step_name': self.step_name,
|
||||
'start_time': self.start_time.isoformat(),
|
||||
'end_time': self.end_time.isoformat() if self.end_time else None,
|
||||
'status': self.status.value,
|
||||
'retry_count': self.retry_count,
|
||||
'error_message': self.error_message,
|
||||
'metadata': self.metadata,
|
||||
'duration_seconds': self.duration.total_seconds() if self.duration else None
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccountData:
|
||||
"""Daten des erstellten Accounts"""
|
||||
platform: str
|
||||
username: str
|
||||
password: str
|
||||
email: str
|
||||
phone: Optional[str] = None
|
||||
full_name: Optional[str] = None
|
||||
birthday: Optional[str] = None
|
||||
profile_image: Optional[str] = None
|
||||
bio: Optional[str] = None
|
||||
verification_status: str = "unverified"
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorDetails:
|
||||
"""Details zu aufgetretenen Fehlern"""
|
||||
error_type: str
|
||||
error_message: str
|
||||
stack_trace: Optional[str] = None
|
||||
screenshot_path: Optional[str] = None
|
||||
recovery_attempted: bool = False
|
||||
recovery_successful: bool = False
|
||||
context: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccountCreationEvent:
|
||||
"""Event für jede Account-Erstellung"""
|
||||
|
||||
event_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
timestamp: datetime = field(default_factory=datetime.now)
|
||||
account_data: Optional[AccountData] = None
|
||||
session_id: str = ""
|
||||
fingerprint_id: str = ""
|
||||
duration: Optional[timedelta] = None
|
||||
success: bool = False
|
||||
error_details: Optional[ErrorDetails] = None
|
||||
steps_completed: List[WorkflowStep] = field(default_factory=list)
|
||||
|
||||
# Performance-Metriken
|
||||
total_retry_count: int = 0
|
||||
network_requests: int = 0
|
||||
screenshots_taken: int = 0
|
||||
|
||||
# Kontext-Informationen
|
||||
proxy_used: bool = False
|
||||
proxy_type: Optional[str] = None
|
||||
browser_type: str = "chromium"
|
||||
headless: bool = False
|
||||
|
||||
def add_step(self, step: WorkflowStep):
|
||||
"""Fügt einen Workflow-Schritt hinzu"""
|
||||
self.steps_completed.append(step)
|
||||
if step.retry_count > 0:
|
||||
self.total_retry_count += step.retry_count
|
||||
|
||||
def get_step(self, step_name: str) -> Optional[WorkflowStep]:
|
||||
"""Holt einen Schritt nach Name"""
|
||||
for step in self.steps_completed:
|
||||
if step.step_name == step_name:
|
||||
return step
|
||||
return None
|
||||
|
||||
def calculate_duration(self):
|
||||
"""Berechnet die Gesamtdauer der Account-Erstellung"""
|
||||
if self.steps_completed:
|
||||
first_step = min(self.steps_completed, key=lambda s: s.start_time)
|
||||
last_step = max(self.steps_completed, key=lambda s: s.end_time or s.start_time)
|
||||
if last_step.end_time:
|
||||
self.duration = last_step.end_time - first_step.start_time
|
||||
|
||||
def get_success_rate(self) -> float:
|
||||
"""Berechnet die Erfolgsrate der Schritte"""
|
||||
if not self.steps_completed:
|
||||
return 0.0
|
||||
successful_steps = sum(1 for step in self.steps_completed if step.success)
|
||||
return successful_steps / len(self.steps_completed)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert Event zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'event_id': self.event_id,
|
||||
'timestamp': self.timestamp.isoformat(),
|
||||
'account_data': {
|
||||
'platform': self.account_data.platform,
|
||||
'username': self.account_data.username,
|
||||
'email': self.account_data.email,
|
||||
'phone': self.account_data.phone,
|
||||
'full_name': self.account_data.full_name,
|
||||
'birthday': self.account_data.birthday,
|
||||
'verification_status': self.account_data.verification_status,
|
||||
'metadata': self.account_data.metadata
|
||||
} if self.account_data else None,
|
||||
'session_id': self.session_id,
|
||||
'fingerprint_id': self.fingerprint_id,
|
||||
'duration_seconds': self.duration.total_seconds() if self.duration else None,
|
||||
'success': self.success,
|
||||
'error_details': {
|
||||
'error_type': self.error_details.error_type,
|
||||
'error_message': self.error_details.error_message,
|
||||
'recovery_attempted': self.error_details.recovery_attempted,
|
||||
'recovery_successful': self.error_details.recovery_successful,
|
||||
'context': self.error_details.context
|
||||
} if self.error_details else None,
|
||||
'steps_completed': [step.to_dict() for step in self.steps_completed],
|
||||
'total_retry_count': self.total_retry_count,
|
||||
'network_requests': self.network_requests,
|
||||
'screenshots_taken': self.screenshots_taken,
|
||||
'proxy_used': self.proxy_used,
|
||||
'proxy_type': self.proxy_type,
|
||||
'browser_type': self.browser_type,
|
||||
'headless': self.headless,
|
||||
'success_rate': self.get_success_rate()
|
||||
}
|
||||
276
domain/entities/browser_fingerprint.py
Normale Datei
276
domain/entities/browser_fingerprint.py
Normale Datei
@ -0,0 +1,276 @@
|
||||
"""
|
||||
Browser Fingerprint Entity - Repräsentiert einen kompletten Browser-Fingerprint
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
import uuid
|
||||
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class StaticComponents:
|
||||
"""Static fingerprint components that don't change"""
|
||||
device_type: str = "desktop" # desktop/mobile/tablet
|
||||
os_family: str = "windows" # windows/macos/linux/android/ios
|
||||
browser_family: str = "chromium" # chromium/firefox/safari
|
||||
gpu_vendor: str = "Intel Inc."
|
||||
gpu_model: str = "Intel Iris OpenGL Engine"
|
||||
cpu_architecture: str = "x86_64"
|
||||
base_fonts: List[str] = field(default_factory=list)
|
||||
base_resolution: tuple = (1920, 1080)
|
||||
base_timezone: str = "Europe/Berlin"
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
'device_type': self.device_type,
|
||||
'os_family': self.os_family,
|
||||
'browser_family': self.browser_family,
|
||||
'gpu_vendor': self.gpu_vendor,
|
||||
'gpu_model': self.gpu_model,
|
||||
'cpu_architecture': self.cpu_architecture,
|
||||
'base_fonts': self.base_fonts,
|
||||
'base_resolution': self.base_resolution,
|
||||
'base_timezone': self.base_timezone
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class CanvasNoise:
|
||||
"""Canvas Fingerprinting Schutz-Konfiguration"""
|
||||
noise_level: float = 0.02
|
||||
seed: int = 42
|
||||
algorithm: str = "gaussian"
|
||||
|
||||
|
||||
@dataclass
|
||||
class WebRTCConfig:
|
||||
"""WebRTC Konfiguration für IP-Leak Prevention"""
|
||||
enabled: bool = True
|
||||
ice_servers: List[str] = field(default_factory=list)
|
||||
local_ip_mask: str = "10.0.0.x"
|
||||
disable_webrtc: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class HardwareConfig:
|
||||
"""Hardware-Konfiguration für Fingerprinting"""
|
||||
hardware_concurrency: int = 4
|
||||
device_memory: int = 8
|
||||
max_touch_points: int = 0
|
||||
screen_resolution: tuple = (1920, 1080)
|
||||
color_depth: int = 24
|
||||
pixel_ratio: float = 1.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class NavigatorProperties:
|
||||
"""Navigator-Eigenschaften für Browser-Fingerprint"""
|
||||
platform: str = "Win32"
|
||||
vendor: str = "Google Inc."
|
||||
vendor_sub: str = ""
|
||||
product: str = "Gecko"
|
||||
product_sub: str = "20030107"
|
||||
app_name: str = "Netscape"
|
||||
app_version: str = "5.0"
|
||||
user_agent: str = ""
|
||||
language: str = "de-DE"
|
||||
languages: List[str] = field(default_factory=lambda: ["de-DE", "de", "en-US", "en"])
|
||||
online: bool = True
|
||||
do_not_track: str = "1"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BrowserFingerprint:
|
||||
"""Repräsentiert einen kompletten Browser-Fingerprint"""
|
||||
|
||||
fingerprint_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
canvas_noise: CanvasNoise = field(default_factory=CanvasNoise)
|
||||
webrtc_config: WebRTCConfig = field(default_factory=WebRTCConfig)
|
||||
font_list: List[str] = field(default_factory=list)
|
||||
hardware_config: HardwareConfig = field(default_factory=HardwareConfig)
|
||||
navigator_props: NavigatorProperties = field(default_factory=NavigatorProperties)
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
last_rotated: datetime = None
|
||||
|
||||
# WebGL Parameter
|
||||
webgl_vendor: str = "Intel Inc."
|
||||
webgl_renderer: str = "Intel Iris OpenGL Engine"
|
||||
|
||||
# Audio Context
|
||||
audio_context_base_latency: float = 0.00
|
||||
audio_context_output_latency: float = 0.00
|
||||
audio_context_sample_rate: int = 48000
|
||||
|
||||
# Timezone
|
||||
timezone: str = "Europe/Berlin"
|
||||
timezone_offset: int = -60 # UTC+1
|
||||
|
||||
# Plugins
|
||||
plugins: List[Dict[str, str]] = field(default_factory=list)
|
||||
|
||||
# New fields for account-bound persistence
|
||||
static_components: Optional[StaticComponents] = None
|
||||
rotation_seed: Optional[str] = None
|
||||
account_bound: bool = False
|
||||
platform_specific_config: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert Fingerprint zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'fingerprint_id': self.fingerprint_id,
|
||||
'canvas_noise': {
|
||||
'noise_level': self.canvas_noise.noise_level,
|
||||
'seed': self.canvas_noise.seed,
|
||||
'algorithm': self.canvas_noise.algorithm
|
||||
},
|
||||
'webrtc_config': {
|
||||
'enabled': self.webrtc_config.enabled,
|
||||
'ice_servers': self.webrtc_config.ice_servers,
|
||||
'local_ip_mask': self.webrtc_config.local_ip_mask,
|
||||
'disable_webrtc': self.webrtc_config.disable_webrtc
|
||||
},
|
||||
'font_list': self.font_list,
|
||||
'hardware_config': {
|
||||
'hardware_concurrency': self.hardware_config.hardware_concurrency,
|
||||
'device_memory': self.hardware_config.device_memory,
|
||||
'max_touch_points': self.hardware_config.max_touch_points,
|
||||
'screen_resolution': self.hardware_config.screen_resolution,
|
||||
'color_depth': self.hardware_config.color_depth,
|
||||
'pixel_ratio': self.hardware_config.pixel_ratio
|
||||
},
|
||||
'navigator_props': {
|
||||
'platform': self.navigator_props.platform,
|
||||
'vendor': self.navigator_props.vendor,
|
||||
'vendor_sub': self.navigator_props.vendor_sub,
|
||||
'product': self.navigator_props.product,
|
||||
'product_sub': self.navigator_props.product_sub,
|
||||
'app_name': self.navigator_props.app_name,
|
||||
'app_version': self.navigator_props.app_version,
|
||||
'user_agent': self.navigator_props.user_agent,
|
||||
'language': self.navigator_props.language,
|
||||
'languages': self.navigator_props.languages,
|
||||
'online': self.navigator_props.online,
|
||||
'do_not_track': self.navigator_props.do_not_track
|
||||
},
|
||||
'webgl_vendor': self.webgl_vendor,
|
||||
'webgl_renderer': self.webgl_renderer,
|
||||
'audio_context': {
|
||||
'base_latency': self.audio_context_base_latency,
|
||||
'output_latency': self.audio_context_output_latency,
|
||||
'sample_rate': self.audio_context_sample_rate
|
||||
},
|
||||
'timezone': self.timezone,
|
||||
'timezone_offset': self.timezone_offset,
|
||||
'plugins': self.plugins,
|
||||
'created_at': self.created_at.isoformat(),
|
||||
'last_rotated': self.last_rotated.isoformat() if self.last_rotated else None,
|
||||
'static_components': self.static_components.to_dict() if self.static_components else None,
|
||||
'rotation_seed': self.rotation_seed,
|
||||
'account_bound': self.account_bound,
|
||||
'platform_specific_config': self.platform_specific_config
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'BrowserFingerprint':
|
||||
"""Creates BrowserFingerprint from dictionary"""
|
||||
fingerprint = cls()
|
||||
|
||||
# Basic fields
|
||||
fingerprint.fingerprint_id = data.get('fingerprint_id', str(uuid.uuid4()))
|
||||
fingerprint.webgl_vendor = data.get('webgl_vendor', "Intel Inc.")
|
||||
fingerprint.webgl_renderer = data.get('webgl_renderer', "Intel Iris OpenGL Engine")
|
||||
fingerprint.timezone = data.get('timezone', "Europe/Berlin")
|
||||
fingerprint.timezone_offset = data.get('timezone_offset', -60)
|
||||
fingerprint.plugins = data.get('plugins', [])
|
||||
|
||||
# Canvas noise
|
||||
if 'canvas_noise' in data:
|
||||
cn = data['canvas_noise']
|
||||
fingerprint.canvas_noise = CanvasNoise(
|
||||
noise_level=cn.get('noise_level', 0.02),
|
||||
seed=cn.get('seed', 42),
|
||||
algorithm=cn.get('algorithm', 'gaussian')
|
||||
)
|
||||
|
||||
# WebRTC config
|
||||
if 'webrtc_config' in data:
|
||||
wc = data['webrtc_config']
|
||||
fingerprint.webrtc_config = WebRTCConfig(
|
||||
enabled=wc.get('enabled', True),
|
||||
ice_servers=wc.get('ice_servers', []),
|
||||
local_ip_mask=wc.get('local_ip_mask', "10.0.0.x"),
|
||||
disable_webrtc=wc.get('disable_webrtc', False)
|
||||
)
|
||||
|
||||
# Hardware config
|
||||
if 'hardware_config' in data:
|
||||
hc = data['hardware_config']
|
||||
fingerprint.hardware_config = HardwareConfig(
|
||||
hardware_concurrency=hc.get('hardware_concurrency', 4),
|
||||
device_memory=hc.get('device_memory', 8),
|
||||
max_touch_points=hc.get('max_touch_points', 0),
|
||||
screen_resolution=tuple(hc.get('screen_resolution', [1920, 1080])),
|
||||
color_depth=hc.get('color_depth', 24),
|
||||
pixel_ratio=hc.get('pixel_ratio', 1.0)
|
||||
)
|
||||
|
||||
# Navigator properties
|
||||
if 'navigator_props' in data:
|
||||
np = data['navigator_props']
|
||||
fingerprint.navigator_props = NavigatorProperties(
|
||||
platform=np.get('platform', "Win32"),
|
||||
vendor=np.get('vendor', "Google Inc."),
|
||||
vendor_sub=np.get('vendor_sub', ""),
|
||||
product=np.get('product', "Gecko"),
|
||||
product_sub=np.get('product_sub', "20030107"),
|
||||
app_name=np.get('app_name', "Netscape"),
|
||||
app_version=np.get('app_version', "5.0"),
|
||||
user_agent=np.get('user_agent', ""),
|
||||
language=np.get('language', "de-DE"),
|
||||
languages=np.get('languages', ["de-DE", "de", "en-US", "en"]),
|
||||
online=np.get('online', True),
|
||||
do_not_track=np.get('do_not_track', "1")
|
||||
)
|
||||
|
||||
# Audio context
|
||||
if 'audio_context' in data:
|
||||
ac = data['audio_context']
|
||||
fingerprint.audio_context_base_latency = ac.get('base_latency', 0.00)
|
||||
fingerprint.audio_context_output_latency = ac.get('output_latency', 0.00)
|
||||
fingerprint.audio_context_sample_rate = ac.get('sample_rate', 48000)
|
||||
|
||||
# Font list
|
||||
fingerprint.font_list = data.get('font_list', [])
|
||||
|
||||
# Dates
|
||||
if 'created_at' in data:
|
||||
fingerprint.created_at = datetime.fromisoformat(data['created_at'])
|
||||
if 'last_rotated' in data and data['last_rotated']:
|
||||
fingerprint.last_rotated = datetime.fromisoformat(data['last_rotated'])
|
||||
|
||||
# New persistence fields
|
||||
if 'static_components' in data and data['static_components']:
|
||||
sc = data['static_components']
|
||||
fingerprint.static_components = StaticComponents(
|
||||
device_type=sc.get('device_type', 'desktop'),
|
||||
os_family=sc.get('os_family', 'windows'),
|
||||
browser_family=sc.get('browser_family', 'chromium'),
|
||||
gpu_vendor=sc.get('gpu_vendor', 'Intel Inc.'),
|
||||
gpu_model=sc.get('gpu_model', 'Intel Iris OpenGL Engine'),
|
||||
cpu_architecture=sc.get('cpu_architecture', 'x86_64'),
|
||||
base_fonts=sc.get('base_fonts', []),
|
||||
base_resolution=tuple(sc.get('base_resolution', [1920, 1080])),
|
||||
base_timezone=sc.get('base_timezone', 'Europe/Berlin')
|
||||
)
|
||||
|
||||
fingerprint.rotation_seed = data.get('rotation_seed')
|
||||
fingerprint.account_bound = data.get('account_bound', False)
|
||||
fingerprint.platform_specific_config = data.get('platform_specific_config', {})
|
||||
|
||||
return fingerprint
|
||||
150
domain/entities/error_event.py
Normale Datei
150
domain/entities/error_event.py
Normale Datei
@ -0,0 +1,150 @@
|
||||
"""
|
||||
Error Event Entity - Detailliertes Fehler-Event
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, List
|
||||
from enum import Enum
|
||||
import uuid
|
||||
|
||||
|
||||
class ErrorType(Enum):
|
||||
"""Typen von Fehlern die auftreten können"""
|
||||
RATE_LIMIT = "rate_limit"
|
||||
CAPTCHA = "captcha"
|
||||
NETWORK = "network"
|
||||
VALIDATION = "validation"
|
||||
BROWSER = "browser"
|
||||
PROXY = "proxy"
|
||||
EMAIL = "email"
|
||||
TIMEOUT = "timeout"
|
||||
AUTHENTICATION = "authentication"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
class ErrorSeverity(Enum):
|
||||
"""Schweregrad des Fehlers"""
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorContext:
|
||||
"""Kontext-Informationen zum Fehler"""
|
||||
url: Optional[str] = None
|
||||
action: Optional[str] = None
|
||||
step_name: Optional[str] = None
|
||||
user_input: Optional[Dict[str, Any]] = None
|
||||
browser_state: Optional[Dict[str, Any]] = None
|
||||
network_state: Optional[Dict[str, Any]] = None
|
||||
screenshot_path: Optional[str] = None
|
||||
html_snapshot: Optional[str] = None
|
||||
additional_data: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecoveryAttempt:
|
||||
"""Informationen über Wiederherstellungsversuche"""
|
||||
strategy: str
|
||||
timestamp: datetime
|
||||
successful: bool
|
||||
error_message: Optional[str] = None
|
||||
duration_seconds: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorEvent:
|
||||
"""Detailliertes Fehler-Event"""
|
||||
|
||||
error_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
timestamp: datetime = field(default_factory=datetime.now)
|
||||
error_type: ErrorType = ErrorType.UNKNOWN
|
||||
error_message: str = ""
|
||||
stack_trace: Optional[str] = None
|
||||
context: ErrorContext = field(default_factory=ErrorContext)
|
||||
recovery_attempted: bool = False
|
||||
recovery_successful: bool = False
|
||||
recovery_attempts: List[RecoveryAttempt] = field(default_factory=list)
|
||||
|
||||
# Fehler-Metadaten
|
||||
severity: ErrorSeverity = ErrorSeverity.MEDIUM
|
||||
platform: Optional[str] = None
|
||||
session_id: Optional[str] = None
|
||||
account_id: Optional[str] = None
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
# Impact-Metriken
|
||||
user_impact: bool = True
|
||||
system_impact: bool = False
|
||||
data_loss: bool = False
|
||||
|
||||
def add_recovery_attempt(self, attempt: RecoveryAttempt):
|
||||
"""Fügt einen Wiederherstellungsversuch hinzu"""
|
||||
self.recovery_attempts.append(attempt)
|
||||
self.recovery_attempted = True
|
||||
if attempt.successful:
|
||||
self.recovery_successful = True
|
||||
|
||||
def get_recovery_success_rate(self) -> float:
|
||||
"""Berechnet die Erfolgsrate der Wiederherstellungsversuche"""
|
||||
if not self.recovery_attempts:
|
||||
return 0.0
|
||||
successful = sum(1 for attempt in self.recovery_attempts if attempt.successful)
|
||||
return successful / len(self.recovery_attempts)
|
||||
|
||||
def is_critical(self) -> bool:
|
||||
"""Prüft ob der Fehler kritisch ist"""
|
||||
return self.severity == ErrorSeverity.CRITICAL or self.data_loss
|
||||
|
||||
def should_retry(self) -> bool:
|
||||
"""Entscheidet ob ein Retry sinnvoll ist"""
|
||||
recoverable_types = [
|
||||
ErrorType.NETWORK,
|
||||
ErrorType.TIMEOUT,
|
||||
ErrorType.RATE_LIMIT,
|
||||
ErrorType.PROXY
|
||||
]
|
||||
return (self.error_type in recoverable_types and
|
||||
len(self.recovery_attempts) < 3 and
|
||||
not self.recovery_successful)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert Event zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'error_id': self.error_id,
|
||||
'timestamp': self.timestamp.isoformat(),
|
||||
'error_type': self.error_type.value,
|
||||
'error_message': self.error_message,
|
||||
'stack_trace': self.stack_trace,
|
||||
'context': {
|
||||
'url': self.context.url,
|
||||
'action': self.context.action,
|
||||
'step_name': self.context.step_name,
|
||||
'screenshot_path': self.context.screenshot_path,
|
||||
'additional_data': self.context.additional_data
|
||||
},
|
||||
'recovery_attempted': self.recovery_attempted,
|
||||
'recovery_successful': self.recovery_successful,
|
||||
'recovery_attempts': [
|
||||
{
|
||||
'strategy': attempt.strategy,
|
||||
'timestamp': attempt.timestamp.isoformat(),
|
||||
'successful': attempt.successful,
|
||||
'error_message': attempt.error_message,
|
||||
'duration_seconds': attempt.duration_seconds
|
||||
}
|
||||
for attempt in self.recovery_attempts
|
||||
],
|
||||
'severity': self.severity.value,
|
||||
'platform': self.platform,
|
||||
'session_id': self.session_id,
|
||||
'account_id': self.account_id,
|
||||
'correlation_id': self.correlation_id,
|
||||
'user_impact': self.user_impact,
|
||||
'system_impact': self.system_impact,
|
||||
'data_loss': self.data_loss,
|
||||
'recovery_success_rate': self.get_recovery_success_rate()
|
||||
}
|
||||
435
domain/entities/method_rotation.py
Normale Datei
435
domain/entities/method_rotation.py
Normale Datei
@ -0,0 +1,435 @@
|
||||
"""
|
||||
Domain entities for method rotation system.
|
||||
These entities represent the core business logic and rules for method rotation.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional, Any
|
||||
import uuid
|
||||
import json
|
||||
|
||||
|
||||
class RiskLevel(Enum):
|
||||
"""Risk levels for method strategies"""
|
||||
LOW = "LOW"
|
||||
MEDIUM = "MEDIUM"
|
||||
HIGH = "HIGH"
|
||||
|
||||
|
||||
class RotationEventType(Enum):
|
||||
"""Types of rotation events"""
|
||||
SUCCESS = "SUCCESS"
|
||||
FAILURE = "FAILURE"
|
||||
ROTATION = "ROTATION"
|
||||
COOLDOWN = "COOLDOWN"
|
||||
CONFIG_CHANGE = "CONFIG_CHANGE"
|
||||
EMERGENCY_MODE = "EMERGENCY_MODE"
|
||||
|
||||
|
||||
class RotationStrategy(Enum):
|
||||
"""Rotation strategy types"""
|
||||
SEQUENTIAL = "sequential" # Try methods in order
|
||||
RANDOM = "random" # Random method selection
|
||||
ADAPTIVE = "adaptive" # Learn from success patterns
|
||||
SMART = "smart" # AI-driven method selection
|
||||
|
||||
|
||||
@dataclass
|
||||
class MethodStrategy:
|
||||
"""
|
||||
Represents a registration/login method strategy for a platform.
|
||||
Contains configuration, performance metrics, and business rules.
|
||||
"""
|
||||
strategy_id: str
|
||||
platform: str
|
||||
method_name: str
|
||||
priority: int = 5 # 1-10, higher = preferred
|
||||
success_rate: float = 0.0
|
||||
failure_rate: float = 0.0
|
||||
last_success: Optional[datetime] = None
|
||||
last_failure: Optional[datetime] = None
|
||||
cooldown_period: int = 0 # seconds
|
||||
max_daily_attempts: int = 10
|
||||
risk_level: RiskLevel = RiskLevel.MEDIUM
|
||||
is_active: bool = True
|
||||
configuration: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
updated_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate and normalize data after initialization"""
|
||||
if not self.strategy_id:
|
||||
self.strategy_id = f"{self.platform}_{self.method_name}_{uuid.uuid4().hex[:8]}"
|
||||
|
||||
# Ensure priority is within valid range
|
||||
self.priority = max(1, min(10, self.priority))
|
||||
|
||||
# Ensure rates are valid percentages
|
||||
self.success_rate = max(0.0, min(1.0, self.success_rate))
|
||||
self.failure_rate = max(0.0, min(1.0, self.failure_rate))
|
||||
|
||||
@property
|
||||
def is_on_cooldown(self) -> bool:
|
||||
"""Check if method is currently on cooldown"""
|
||||
if not self.last_failure or self.cooldown_period == 0:
|
||||
return False
|
||||
|
||||
cooldown_until = self.last_failure + timedelta(seconds=self.cooldown_period)
|
||||
return datetime.now() < cooldown_until
|
||||
|
||||
@property
|
||||
def cooldown_remaining_seconds(self) -> int:
|
||||
"""Get remaining cooldown time in seconds"""
|
||||
if not self.is_on_cooldown:
|
||||
return 0
|
||||
|
||||
cooldown_until = self.last_failure + timedelta(seconds=self.cooldown_period)
|
||||
remaining = cooldown_until - datetime.now()
|
||||
return max(0, int(remaining.total_seconds()))
|
||||
|
||||
@property
|
||||
def effectiveness_score(self) -> float:
|
||||
"""Calculate overall effectiveness score for method selection"""
|
||||
base_score = self.priority / 10.0
|
||||
|
||||
# Adjust for success rate
|
||||
if self.success_rate > 0:
|
||||
base_score *= (1 + self.success_rate)
|
||||
|
||||
# Penalize for high failure rate
|
||||
if self.failure_rate > 0.5:
|
||||
base_score *= (1 - self.failure_rate * 0.5)
|
||||
|
||||
# Penalize for high risk
|
||||
risk_penalties = {
|
||||
RiskLevel.LOW: 0.0,
|
||||
RiskLevel.MEDIUM: 0.1,
|
||||
RiskLevel.HIGH: 0.3
|
||||
}
|
||||
base_score *= (1 - risk_penalties.get(self.risk_level, 0.1))
|
||||
|
||||
# Penalize if on cooldown
|
||||
if self.is_on_cooldown:
|
||||
base_score *= 0.1
|
||||
|
||||
# Penalize if inactive
|
||||
if not self.is_active:
|
||||
base_score = 0.0
|
||||
|
||||
return max(0.0, min(1.0, base_score))
|
||||
|
||||
def update_performance(self, success: bool, execution_time: float = 0.0):
|
||||
"""Update performance metrics based on execution result"""
|
||||
self.updated_at = datetime.now()
|
||||
|
||||
if success:
|
||||
self.last_success = datetime.now()
|
||||
# Update success rate with exponential moving average
|
||||
self.success_rate = 0.8 * self.success_rate + 0.2 * 1.0
|
||||
self.failure_rate = 0.8 * self.failure_rate + 0.2 * 0.0
|
||||
else:
|
||||
self.last_failure = datetime.now()
|
||||
# Update failure rate with exponential moving average
|
||||
self.success_rate = 0.8 * self.success_rate + 0.2 * 0.0
|
||||
self.failure_rate = 0.8 * self.failure_rate + 0.2 * 1.0
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for serialization"""
|
||||
return {
|
||||
'strategy_id': self.strategy_id,
|
||||
'platform': self.platform,
|
||||
'method_name': self.method_name,
|
||||
'priority': self.priority,
|
||||
'success_rate': self.success_rate,
|
||||
'failure_rate': self.failure_rate,
|
||||
'last_success': self.last_success.isoformat() if self.last_success else None,
|
||||
'last_failure': self.last_failure.isoformat() if self.last_failure else None,
|
||||
'cooldown_period': self.cooldown_period,
|
||||
'max_daily_attempts': self.max_daily_attempts,
|
||||
'risk_level': self.risk_level.value,
|
||||
'is_active': self.is_active,
|
||||
'configuration': self.configuration,
|
||||
'tags': self.tags,
|
||||
'created_at': self.created_at.isoformat(),
|
||||
'updated_at': self.updated_at.isoformat()
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class RotationSession:
|
||||
"""
|
||||
Represents an active rotation session for account creation/login.
|
||||
Tracks the current state and history of method attempts.
|
||||
"""
|
||||
session_id: str
|
||||
platform: str
|
||||
account_id: Optional[str] = None
|
||||
current_method: str = ""
|
||||
attempted_methods: List[str] = field(default_factory=list)
|
||||
session_start: datetime = field(default_factory=datetime.now)
|
||||
last_rotation: Optional[datetime] = None
|
||||
rotation_count: int = 0
|
||||
success_count: int = 0
|
||||
failure_count: int = 0
|
||||
is_active: bool = True
|
||||
rotation_reason: Optional[str] = None
|
||||
fingerprint_id: Optional[str] = None
|
||||
session_metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate and normalize data after initialization"""
|
||||
if not self.session_id:
|
||||
self.session_id = f"session_{uuid.uuid4().hex}"
|
||||
|
||||
@property
|
||||
def session_duration(self) -> timedelta:
|
||||
"""Get total session duration"""
|
||||
return datetime.now() - self.session_start
|
||||
|
||||
@property
|
||||
def success_rate(self) -> float:
|
||||
"""Calculate session success rate"""
|
||||
total_attempts = self.success_count + self.failure_count
|
||||
if total_attempts == 0:
|
||||
return 0.0
|
||||
return self.success_count / total_attempts
|
||||
|
||||
@property
|
||||
def should_rotate(self) -> bool:
|
||||
"""Determine if rotation should occur based on failure patterns"""
|
||||
# Rotate after 2 consecutive failures
|
||||
if self.failure_count >= 2 and self.success_count == 0:
|
||||
return True
|
||||
|
||||
# Rotate if failure rate is high and we have alternatives
|
||||
if len(self.attempted_methods) < 3 and self.success_rate < 0.3:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def add_attempt(self, method_name: str, success: bool, error_message: Optional[str] = None):
|
||||
"""Record a method attempt"""
|
||||
if method_name not in self.attempted_methods:
|
||||
self.attempted_methods.append(method_name)
|
||||
|
||||
self.current_method = method_name
|
||||
|
||||
if success:
|
||||
self.success_count += 1
|
||||
else:
|
||||
self.failure_count += 1
|
||||
|
||||
# Add to metadata
|
||||
attempt_data = {
|
||||
'method': method_name,
|
||||
'success': success,
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'error': error_message
|
||||
}
|
||||
|
||||
if 'attempts' not in self.session_metadata:
|
||||
self.session_metadata['attempts'] = []
|
||||
self.session_metadata['attempts'].append(attempt_data)
|
||||
|
||||
def rotate_to_method(self, new_method: str, reason: str):
|
||||
"""Rotate to a new method"""
|
||||
self.current_method = new_method
|
||||
self.last_rotation = datetime.now()
|
||||
self.rotation_count += 1
|
||||
self.rotation_reason = reason
|
||||
|
||||
def complete_session(self, success: bool):
|
||||
"""Mark session as completed"""
|
||||
self.is_active = False
|
||||
self.session_metadata['completed_at'] = datetime.now().isoformat()
|
||||
self.session_metadata['final_success'] = success
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for serialization"""
|
||||
return {
|
||||
'session_id': self.session_id,
|
||||
'platform': self.platform,
|
||||
'account_id': self.account_id,
|
||||
'current_method': self.current_method,
|
||||
'attempted_methods': self.attempted_methods,
|
||||
'session_start': self.session_start.isoformat(),
|
||||
'last_rotation': self.last_rotation.isoformat() if self.last_rotation else None,
|
||||
'rotation_count': self.rotation_count,
|
||||
'success_count': self.success_count,
|
||||
'failure_count': self.failure_count,
|
||||
'is_active': self.is_active,
|
||||
'rotation_reason': self.rotation_reason,
|
||||
'fingerprint_id': self.fingerprint_id,
|
||||
'session_metadata': self.session_metadata
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class RotationEvent:
|
||||
"""
|
||||
Represents a specific event in the rotation system.
|
||||
Used for detailed logging and analytics.
|
||||
"""
|
||||
event_id: str
|
||||
session_id: str
|
||||
method_name: str
|
||||
event_type: RotationEventType
|
||||
timestamp: datetime = field(default_factory=datetime.now)
|
||||
details: Dict[str, Any] = field(default_factory=dict)
|
||||
error_message: Optional[str] = None
|
||||
performance_metrics: Dict[str, float] = field(default_factory=dict)
|
||||
correlation_id: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate and normalize data after initialization"""
|
||||
if not self.event_id:
|
||||
self.event_id = f"event_{uuid.uuid4().hex}"
|
||||
|
||||
@classmethod
|
||||
def create_success_event(cls, session_id: str, method_name: str,
|
||||
execution_time: float = 0.0, **kwargs) -> 'RotationEvent':
|
||||
"""Create a success event"""
|
||||
return cls(
|
||||
event_id=f"success_{uuid.uuid4().hex[:8]}",
|
||||
session_id=session_id,
|
||||
method_name=method_name,
|
||||
event_type=RotationEventType.SUCCESS,
|
||||
performance_metrics={'execution_time': execution_time},
|
||||
details=kwargs
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_failure_event(cls, session_id: str, method_name: str,
|
||||
error_message: str, **kwargs) -> 'RotationEvent':
|
||||
"""Create a failure event"""
|
||||
return cls(
|
||||
event_id=f"failure_{uuid.uuid4().hex[:8]}",
|
||||
session_id=session_id,
|
||||
method_name=method_name,
|
||||
event_type=RotationEventType.FAILURE,
|
||||
error_message=error_message,
|
||||
details=kwargs
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_rotation_event(cls, session_id: str, from_method: str,
|
||||
to_method: str, reason: str, **kwargs) -> 'RotationEvent':
|
||||
"""Create a rotation event"""
|
||||
return cls(
|
||||
event_id=f"rotation_{uuid.uuid4().hex[:8]}",
|
||||
session_id=session_id,
|
||||
method_name=to_method,
|
||||
event_type=RotationEventType.ROTATION,
|
||||
details={
|
||||
'from_method': from_method,
|
||||
'to_method': to_method,
|
||||
'reason': reason,
|
||||
**kwargs
|
||||
}
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for serialization"""
|
||||
return {
|
||||
'event_id': self.event_id,
|
||||
'session_id': self.session_id,
|
||||
'method_name': self.method_name,
|
||||
'event_type': self.event_type.value,
|
||||
'timestamp': self.timestamp.isoformat(),
|
||||
'details': self.details,
|
||||
'error_message': self.error_message,
|
||||
'performance_metrics': self.performance_metrics,
|
||||
'correlation_id': self.correlation_id
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlatformMethodState:
|
||||
"""
|
||||
Represents the rotation state for a specific platform.
|
||||
Tracks preferences, blocks, and daily limits.
|
||||
"""
|
||||
platform: str
|
||||
last_successful_method: Optional[str] = None
|
||||
last_successful_at: Optional[datetime] = None
|
||||
preferred_methods: List[str] = field(default_factory=list)
|
||||
blocked_methods: List[str] = field(default_factory=list)
|
||||
daily_attempt_counts: Dict[str, int] = field(default_factory=dict)
|
||||
reset_date: datetime = field(default_factory=lambda: datetime.now().replace(hour=0, minute=0, second=0, microsecond=0))
|
||||
rotation_strategy: RotationStrategy = RotationStrategy.ADAPTIVE
|
||||
emergency_mode: bool = False
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
updated_at: datetime = field(default_factory=datetime.now)
|
||||
|
||||
def is_method_available(self, method_name: str, max_daily_attempts: int) -> bool:
|
||||
"""Check if a method is available for use"""
|
||||
# Check if method is blocked
|
||||
if method_name in self.blocked_methods:
|
||||
return False
|
||||
|
||||
# Check daily limits
|
||||
current_attempts = self.daily_attempt_counts.get(method_name, 0)
|
||||
if current_attempts >= max_daily_attempts:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def increment_daily_attempts(self, method_name: str):
|
||||
"""Increment daily attempt count for a method"""
|
||||
# Reset counts if it's a new day
|
||||
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
if today > self.reset_date:
|
||||
self.daily_attempt_counts = {}
|
||||
self.reset_date = today
|
||||
|
||||
self.daily_attempt_counts[method_name] = self.daily_attempt_counts.get(method_name, 0) + 1
|
||||
self.updated_at = datetime.now()
|
||||
|
||||
def record_success(self, method_name: str):
|
||||
"""Record a successful method execution"""
|
||||
self.last_successful_method = method_name
|
||||
self.last_successful_at = datetime.now()
|
||||
self.updated_at = datetime.now()
|
||||
|
||||
# Move successful method to front of preferred list
|
||||
if method_name in self.preferred_methods:
|
||||
self.preferred_methods.remove(method_name)
|
||||
self.preferred_methods.insert(0, method_name)
|
||||
|
||||
def block_method(self, method_name: str, reason: str):
|
||||
"""Temporarily block a method"""
|
||||
if method_name not in self.blocked_methods:
|
||||
self.blocked_methods.append(method_name)
|
||||
|
||||
self.metadata[f'block_reason_{method_name}'] = reason
|
||||
self.metadata[f'blocked_at_{method_name}'] = datetime.now().isoformat()
|
||||
self.updated_at = datetime.now()
|
||||
|
||||
def unblock_method(self, method_name: str):
|
||||
"""Remove method from blocked list"""
|
||||
if method_name in self.blocked_methods:
|
||||
self.blocked_methods.remove(method_name)
|
||||
|
||||
# Clean up metadata
|
||||
self.metadata.pop(f'block_reason_{method_name}', None)
|
||||
self.metadata.pop(f'blocked_at_{method_name}', None)
|
||||
self.updated_at = datetime.now()
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for serialization"""
|
||||
return {
|
||||
'platform': self.platform,
|
||||
'last_successful_method': self.last_successful_method,
|
||||
'last_successful_at': self.last_successful_at.isoformat() if self.last_successful_at else None,
|
||||
'preferred_methods': self.preferred_methods,
|
||||
'blocked_methods': self.blocked_methods,
|
||||
'daily_attempt_counts': self.daily_attempt_counts,
|
||||
'reset_date': self.reset_date.isoformat(),
|
||||
'rotation_strategy': self.rotation_strategy.value,
|
||||
'emergency_mode': self.emergency_mode,
|
||||
'metadata': self.metadata,
|
||||
'updated_at': self.updated_at.isoformat()
|
||||
}
|
||||
36
domain/entities/rate_limit_policy.py
Normale Datei
36
domain/entities/rate_limit_policy.py
Normale Datei
@ -0,0 +1,36 @@
|
||||
"""
|
||||
Rate Limit Policy Entity - Definiert Geschwindigkeitsregeln für verschiedene Aktionen
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class RateLimitPolicy:
|
||||
"""Definiert Geschwindigkeitsregeln für verschiedene Aktionen"""
|
||||
|
||||
min_delay: float
|
||||
max_delay: float
|
||||
adaptive: bool = True
|
||||
backoff_multiplier: float = 1.5
|
||||
max_retries: int = 3
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validierung der Policy-Parameter"""
|
||||
if self.min_delay < 0:
|
||||
raise ValueError("min_delay muss >= 0 sein")
|
||||
if self.max_delay < self.min_delay:
|
||||
raise ValueError("max_delay muss >= min_delay sein")
|
||||
if self.backoff_multiplier < 1.0:
|
||||
raise ValueError("backoff_multiplier muss >= 1.0 sein")
|
||||
if self.max_retries < 0:
|
||||
raise ValueError("max_retries muss >= 0 sein")
|
||||
|
||||
def calculate_backoff_delay(self, attempt: int) -> float:
|
||||
"""Berechnet Verzögerung basierend auf Versuchsnummer"""
|
||||
if not self.adaptive:
|
||||
return self.min_delay
|
||||
|
||||
delay = self.min_delay * (self.backoff_multiplier ** attempt)
|
||||
return min(delay, self.max_delay)
|
||||
96
domain/exceptions.py
Normale Datei
96
domain/exceptions.py
Normale Datei
@ -0,0 +1,96 @@
|
||||
"""
|
||||
Domain-spezifische Exceptions für AccountForger
|
||||
"""
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class AccountForgerException(Exception):
|
||||
"""Basis-Exception für alle AccountForger-spezifischen Fehler"""
|
||||
|
||||
def __init__(self, message: str, details: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.details = details or {}
|
||||
|
||||
|
||||
class AccountCreationException(AccountForgerException):
|
||||
"""Fehler bei Account-Erstellung"""
|
||||
|
||||
def __init__(self, message: str, platform: Optional[str] = None,
|
||||
error_type: Optional[str] = None, recovery_suggestion: Optional[str] = None):
|
||||
details = {
|
||||
"platform": platform,
|
||||
"error_type": error_type,
|
||||
"recovery_suggestion": recovery_suggestion
|
||||
}
|
||||
super().__init__(message, details)
|
||||
self.platform = platform
|
||||
self.error_type = error_type
|
||||
self.recovery_suggestion = recovery_suggestion
|
||||
|
||||
@property
|
||||
def user_friendly_message(self) -> str:
|
||||
"""Gibt eine benutzerfreundliche Fehlermeldung zurück"""
|
||||
if self.recovery_suggestion:
|
||||
return f"{self.message}\n\nLösungsvorschlag: {self.recovery_suggestion}"
|
||||
return self.message
|
||||
|
||||
|
||||
class FingerprintException(AccountForgerException):
|
||||
"""Fehler bei Fingerprint-Operationen"""
|
||||
pass
|
||||
|
||||
|
||||
class SessionException(AccountForgerException):
|
||||
"""Fehler bei Session-Operationen"""
|
||||
pass
|
||||
|
||||
|
||||
class RateLimitException(AccountCreationException):
|
||||
"""Rate-Limit wurde erreicht"""
|
||||
|
||||
def __init__(self, platform: str, retry_after: Optional[int] = None):
|
||||
message = f"Zu viele Anfragen an {platform}"
|
||||
recovery = f"Bitte warten Sie {retry_after} Sekunden" if retry_after else "Bitte warten Sie einige Minuten"
|
||||
super().__init__(
|
||||
message=message,
|
||||
platform=platform,
|
||||
error_type="rate_limit",
|
||||
recovery_suggestion=recovery
|
||||
)
|
||||
self.retry_after = retry_after
|
||||
|
||||
|
||||
class CaptchaRequiredException(AccountCreationException):
|
||||
"""Captcha-Verifizierung erforderlich"""
|
||||
|
||||
def __init__(self, platform: str):
|
||||
super().__init__(
|
||||
message=f"{platform} erfordert Captcha-Verifizierung",
|
||||
platform=platform,
|
||||
error_type="captcha",
|
||||
recovery_suggestion="Versuchen Sie es später erneut oder nutzen Sie einen anderen Proxy"
|
||||
)
|
||||
|
||||
|
||||
class ValidationException(AccountForgerException):
|
||||
"""Validierungsfehler"""
|
||||
|
||||
def __init__(self, field: str, message: str):
|
||||
super().__init__(f"Validierungsfehler bei {field}: {message}")
|
||||
self.field = field
|
||||
|
||||
|
||||
class ProxyException(AccountForgerException):
|
||||
"""Proxy-bezogene Fehler"""
|
||||
pass
|
||||
|
||||
|
||||
class NetworkException(AccountForgerException):
|
||||
"""Netzwerk-bezogene Fehler"""
|
||||
|
||||
def __init__(self, message: str = "Netzwerkfehler aufgetreten"):
|
||||
super().__init__(
|
||||
message=message,
|
||||
details={"recovery_suggestion": "Überprüfen Sie Ihre Internetverbindung"}
|
||||
)
|
||||
17
domain/repositories/__init__.py
Normale Datei
17
domain/repositories/__init__.py
Normale Datei
@ -0,0 +1,17 @@
|
||||
"""
|
||||
Domain repository interfaces.
|
||||
|
||||
These interfaces define the contracts for data persistence,
|
||||
following the Dependency Inversion Principle.
|
||||
Infrastructure layer will implement these interfaces.
|
||||
"""
|
||||
|
||||
from .fingerprint_repository import IFingerprintRepository
|
||||
from .analytics_repository import IAnalyticsRepository
|
||||
from .rate_limit_repository import IRateLimitRepository
|
||||
|
||||
__all__ = [
|
||||
'IFingerprintRepository',
|
||||
'IAnalyticsRepository',
|
||||
'IRateLimitRepository'
|
||||
]
|
||||
79
domain/repositories/analytics_repository.py
Normale Datei
79
domain/repositories/analytics_repository.py
Normale Datei
@ -0,0 +1,79 @@
|
||||
"""
|
||||
Analytics repository interface.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Dict, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.account_creation_event import AccountCreationEvent
|
||||
from domain.entities.error_event import ErrorEvent
|
||||
from domain.value_objects.error_summary import ErrorSummary
|
||||
|
||||
|
||||
class IAnalyticsRepository(ABC):
|
||||
"""Interface for analytics data persistence."""
|
||||
|
||||
@abstractmethod
|
||||
def save_account_event(self, event: AccountCreationEvent) -> None:
|
||||
"""Save an account creation event."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_error_event(self, event: ErrorEvent) -> None:
|
||||
"""Save an error event."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_account_events(self, platform: str = None,
|
||||
start_date: datetime = None,
|
||||
end_date: datetime = None) -> List[AccountCreationEvent]:
|
||||
"""Get account creation events with optional filters."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_error_events(self, platform: str = None,
|
||||
error_type: str = None,
|
||||
start_date: datetime = None,
|
||||
end_date: datetime = None) -> List[ErrorEvent]:
|
||||
"""Get error events with optional filters."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_success_rate(self, platform: str = None,
|
||||
start_date: datetime = None,
|
||||
end_date: datetime = None) -> float:
|
||||
"""Calculate success rate for account creation."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_error_summary(self, platform: str = None,
|
||||
days: int = 7) -> ErrorSummary:
|
||||
"""Get error summary for specified period."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_platform_statistics(self) -> Dict[str, Dict[str, int]]:
|
||||
"""Get statistics grouped by platform."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_hourly_distribution(self, platform: str = None,
|
||||
days: int = 7) -> Dict[int, int]:
|
||||
"""Get hourly distribution of account creation."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_fingerprint_performance(self, fingerprint_id: str) -> Dict[str, any]:
|
||||
"""Get performance metrics for a specific fingerprint."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_proxy_performance(self, days: int = 7) -> Dict[str, Dict[str, int]]:
|
||||
"""Get proxy performance metrics."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_events(self, days_to_keep: int = 30) -> int:
|
||||
"""Remove events older than specified days. Returns count deleted."""
|
||||
pass
|
||||
63
domain/repositories/fingerprint_repository.py
Normale Datei
63
domain/repositories/fingerprint_repository.py
Normale Datei
@ -0,0 +1,63 @@
|
||||
"""
|
||||
Fingerprint repository interface.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
|
||||
class IFingerprintRepository(ABC):
|
||||
"""Interface for fingerprint persistence."""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Save a fingerprint and return its ID."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_id(self, fingerprint_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Find a fingerprint by ID."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_account_id(self, account_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Find a fingerprint associated with an account."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_all(self) -> List[BrowserFingerprint]:
|
||||
"""Find all fingerprints."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update(self, fingerprint: BrowserFingerprint) -> bool:
|
||||
"""Update an existing fingerprint."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, fingerprint_id: str) -> bool:
|
||||
"""Delete a fingerprint by ID."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_platform(self, platform: str) -> List[BrowserFingerprint]:
|
||||
"""Find all fingerprints for a specific platform."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, fingerprint_id: str) -> bool:
|
||||
"""Check if a fingerprint exists."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def count(self) -> int:
|
||||
"""Count total fingerprints."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_recent(self, limit: int = 10) -> List[BrowserFingerprint]:
|
||||
"""Find most recently created fingerprints."""
|
||||
pass
|
||||
310
domain/repositories/method_rotation_repository.py
Normale Datei
310
domain/repositories/method_rotation_repository.py
Normale Datei
@ -0,0 +1,310 @@
|
||||
"""
|
||||
Repository interfaces for method rotation system.
|
||||
These interfaces define the contracts for data access without implementation details.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, date
|
||||
from typing import List, Optional, Dict, Any
|
||||
from domain.entities.method_rotation import (
|
||||
MethodStrategy, RotationSession, RotationEvent, PlatformMethodState
|
||||
)
|
||||
|
||||
|
||||
class IMethodStrategyRepository(ABC):
|
||||
"""Interface for method strategy data access"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, strategy: MethodStrategy) -> None:
|
||||
"""Save or update a method strategy"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_id(self, strategy_id: str) -> Optional[MethodStrategy]:
|
||||
"""Find a strategy by its ID"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_platform(self, platform: str) -> List[MethodStrategy]:
|
||||
"""Find all strategies for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_active_by_platform(self, platform: str) -> List[MethodStrategy]:
|
||||
"""Find all active strategies for a platform, ordered by effectiveness"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_platform_and_method(self, platform: str, method_name: str) -> Optional[MethodStrategy]:
|
||||
"""Find a specific method strategy"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_performance_metrics(self, strategy_id: str, success: bool,
|
||||
execution_time: float = 0.0) -> None:
|
||||
"""Update performance metrics for a strategy"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_next_available_method(self, platform: str,
|
||||
excluded_methods: List[str] = None,
|
||||
max_risk_level: str = "HIGH") -> Optional[MethodStrategy]:
|
||||
"""Get the next best available method for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def disable_method(self, platform: str, method_name: str, reason: str) -> None:
|
||||
"""Disable a method temporarily or permanently"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def enable_method(self, platform: str, method_name: str) -> None:
|
||||
"""Re-enable a disabled method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_platform_statistics(self, platform: str) -> Dict[str, Any]:
|
||||
"""Get aggregated statistics for all methods on a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_data(self, days_to_keep: int = 90) -> int:
|
||||
"""Clean up old performance data and return number of records removed"""
|
||||
pass
|
||||
|
||||
|
||||
class IRotationSessionRepository(ABC):
|
||||
"""Interface for rotation session data access"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, session: RotationSession) -> None:
|
||||
"""Save or update a rotation session"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_id(self, session_id: str) -> Optional[RotationSession]:
|
||||
"""Find a session by its ID"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_active_session(self, platform: str, account_id: Optional[str] = None) -> Optional[RotationSession]:
|
||||
"""Find an active session for a platform/account"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_active_sessions_by_platform(self, platform: str) -> List[RotationSession]:
|
||||
"""Find all active sessions for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_session_metrics(self, session_id: str, success: bool,
|
||||
method_name: str, error_message: Optional[str] = None) -> None:
|
||||
"""Update session metrics after a method attempt"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def archive_session(self, session_id: str, final_success: bool = False) -> None:
|
||||
"""Mark a session as completed/archived"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_session_history(self, platform: str, limit: int = 100) -> List[RotationSession]:
|
||||
"""Get recent session history for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_session_statistics(self, platform: str, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get session statistics for a platform over specified days"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_sessions(self, days_to_keep: int = 30) -> int:
|
||||
"""Clean up old session data and return number of records removed"""
|
||||
pass
|
||||
|
||||
|
||||
class IRotationEventRepository(ABC):
|
||||
"""Interface for rotation event data access"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, event: RotationEvent) -> None:
|
||||
"""Save a rotation event"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_batch(self, events: List[RotationEvent]) -> None:
|
||||
"""Save multiple events in a batch for performance"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_session(self, session_id: str) -> List[RotationEvent]:
|
||||
"""Find all events for a specific session"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_method(self, platform: str, method_name: str,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None) -> List[RotationEvent]:
|
||||
"""Find events for a specific method within date range"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_recent_failures(self, platform: str, method_name: str,
|
||||
hours: int = 24) -> List[RotationEvent]:
|
||||
"""Find recent failure events for a method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_event_statistics(self, platform: str,
|
||||
start_date: Optional[datetime] = None,
|
||||
end_date: Optional[datetime] = None) -> Dict[str, Any]:
|
||||
"""Get event statistics for analysis"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_error_patterns(self, platform: str, method_name: str,
|
||||
days: int = 7) -> Dict[str, int]:
|
||||
"""Get error patterns for a method to identify common issues"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_events(self, days_to_keep: int = 90) -> int:
|
||||
"""Clean up old event data and return number of records removed"""
|
||||
pass
|
||||
|
||||
|
||||
class IPlatformMethodStateRepository(ABC):
|
||||
"""Interface for platform method state data access"""
|
||||
|
||||
@abstractmethod
|
||||
def save(self, state: PlatformMethodState) -> None:
|
||||
"""Save or update platform method state"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def find_by_platform(self, platform: str) -> Optional[PlatformMethodState]:
|
||||
"""Find method state for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_or_create_state(self, platform: str) -> PlatformMethodState:
|
||||
"""Get existing state or create new one with defaults"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_daily_attempts(self, platform: str, method_name: str) -> None:
|
||||
"""Increment daily attempt counter for a method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def reset_daily_counters(self, platform: str) -> None:
|
||||
"""Reset daily attempt counters (typically called at midnight)"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def block_method(self, platform: str, method_name: str, reason: str) -> None:
|
||||
"""Block a method temporarily"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def unblock_method(self, platform: str, method_name: str) -> None:
|
||||
"""Unblock a previously blocked method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def record_method_success(self, platform: str, method_name: str) -> None:
|
||||
"""Record successful method execution"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_preferred_method_order(self, platform: str) -> List[str]:
|
||||
"""Get preferred method order for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_emergency_mode(self, platform: str, enabled: bool) -> None:
|
||||
"""Enable/disable emergency mode for a platform"""
|
||||
pass
|
||||
|
||||
|
||||
class IMethodPerformanceRepository(ABC):
|
||||
"""Interface for method performance analytics data access"""
|
||||
|
||||
@abstractmethod
|
||||
def record_daily_performance(self, platform: str, method_name: str,
|
||||
success: bool, execution_time: float = 0.0) -> None:
|
||||
"""Record performance data for daily aggregation"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_daily_performance(self, platform: str, method_name: str,
|
||||
start_date: date, end_date: date) -> List[Dict[str, Any]]:
|
||||
"""Get daily performance data for a method within date range"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_method_trends(self, platform: str, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get performance trends for all methods on a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_success_rate_history(self, platform: str, method_name: str,
|
||||
days: int = 30) -> List[Dict[str, Any]]:
|
||||
"""Get success rate history for trend analysis"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_peak_usage_patterns(self, platform: str, method_name: str) -> Dict[str, Any]:
|
||||
"""Get usage patterns to identify peak hours and optimize timing"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def aggregate_daily_stats(self, target_date: date) -> int:
|
||||
"""Aggregate raw performance data into daily statistics"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_performance_data(self, days_to_keep: int = 365) -> int:
|
||||
"""Clean up old performance data and return number of records removed"""
|
||||
pass
|
||||
|
||||
|
||||
class IMethodCooldownRepository(ABC):
|
||||
"""Interface for method cooldown data access"""
|
||||
|
||||
@abstractmethod
|
||||
def add_cooldown(self, platform: str, method_name: str,
|
||||
cooldown_until: datetime, reason: str) -> None:
|
||||
"""Add a cooldown period for a method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def remove_cooldown(self, platform: str, method_name: str) -> None:
|
||||
"""Remove cooldown for a method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_method_on_cooldown(self, platform: str, method_name: str) -> bool:
|
||||
"""Check if a method is currently on cooldown"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_cooldown_info(self, platform: str, method_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cooldown information for a method"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_active_cooldowns(self, platform: str) -> List[Dict[str, Any]]:
|
||||
"""Get all active cooldowns for a platform"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_expired_cooldowns(self) -> int:
|
||||
"""Remove expired cooldowns and return number of records removed"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def extend_cooldown(self, platform: str, method_name: str,
|
||||
additional_seconds: int) -> None:
|
||||
"""Extend existing cooldown period"""
|
||||
pass
|
||||
75
domain/repositories/rate_limit_repository.py
Normale Datei
75
domain/repositories/rate_limit_repository.py
Normale Datei
@ -0,0 +1,75 @@
|
||||
"""
|
||||
Rate limit repository interface.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, List, Dict
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
|
||||
|
||||
class IRateLimitRepository(ABC):
|
||||
"""Interface for rate limit data persistence."""
|
||||
|
||||
@abstractmethod
|
||||
def save_policy(self, policy: RateLimitPolicy) -> None:
|
||||
"""Save or update a rate limit policy."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_policy(self, platform: str, action: str) -> Optional[RateLimitPolicy]:
|
||||
"""Get rate limit policy for platform and action."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_all_policies(self, platform: str = None) -> List[RateLimitPolicy]:
|
||||
"""Get all policies, optionally filtered by platform."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def record_action(self, platform: str, action: str,
|
||||
success: bool = True, proxy: str = None) -> None:
|
||||
"""Record an action for rate limit tracking."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_action_count(self, platform: str, action: str,
|
||||
window_minutes: int = 60,
|
||||
proxy: str = None) -> int:
|
||||
"""Get action count within time window."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_recent_actions(self, platform: str, action: str,
|
||||
limit: int = 100) -> List[Dict[str, any]]:
|
||||
"""Get recent actions for analysis."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_rate_limited(self, platform: str, action: str,
|
||||
proxy: str = None) -> bool:
|
||||
"""Check if action is currently rate limited."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_wait_time(self, platform: str, action: str,
|
||||
proxy: str = None) -> int:
|
||||
"""Get wait time in seconds before next action allowed."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def reset_limits(self, platform: str = None, action: str = None,
|
||||
proxy: str = None) -> int:
|
||||
"""Reset rate limits. Returns count of records affected."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_limit_status(self, platform: str) -> Dict[str, Dict[str, any]]:
|
||||
"""Get current rate limit status for all actions on platform."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_records(self, days_to_keep: int = 7) -> int:
|
||||
"""Remove old rate limit records. Returns count deleted."""
|
||||
pass
|
||||
13
domain/services/__init__.py
Normale Datei
13
domain/services/__init__.py
Normale Datei
@ -0,0 +1,13 @@
|
||||
"""
|
||||
Domain Services - Geschäftslogik-Interfaces die nicht in Entities gehören
|
||||
"""
|
||||
|
||||
from .rate_limit_service import IRateLimitService
|
||||
from .fingerprint_service import IFingerprintService
|
||||
from .analytics_service import IAnalyticsService
|
||||
|
||||
__all__ = [
|
||||
'IRateLimitService',
|
||||
'IFingerprintService',
|
||||
'IAnalyticsService'
|
||||
]
|
||||
181
domain/services/analytics_service.py
Normale Datei
181
domain/services/analytics_service.py
Normale Datei
@ -0,0 +1,181 @@
|
||||
"""
|
||||
Analytics Service Interface - Domain Service für Analytics und Reporting
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.entities.account_creation_event import AccountCreationEvent
|
||||
from domain.entities.error_event import ErrorEvent
|
||||
from domain.value_objects.error_summary import ErrorSummary
|
||||
from domain.value_objects.report import Report, ReportType
|
||||
|
||||
|
||||
class IAnalyticsService(ABC):
|
||||
"""
|
||||
Interface für Analytics Service.
|
||||
Definiert die Geschäftslogik für Event-Tracking und Reporting.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def log_event(self, event: Union[AccountCreationEvent, ErrorEvent, Any]) -> None:
|
||||
"""
|
||||
Loggt ein Event für spätere Analyse.
|
||||
|
||||
Args:
|
||||
event: Zu loggendes Event
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_success_rate(self,
|
||||
timeframe: Optional[timedelta] = None,
|
||||
platform: Optional[str] = None) -> float:
|
||||
"""
|
||||
Berechnet die Erfolgsrate für Account-Erstellung.
|
||||
|
||||
Args:
|
||||
timeframe: Optional - Zeitrahmen für Berechnung
|
||||
platform: Optional - Spezifische Plattform
|
||||
|
||||
Returns:
|
||||
Erfolgsrate zwischen 0.0 und 1.0
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_common_errors(self,
|
||||
limit: int = 10,
|
||||
timeframe: Optional[timedelta] = None) -> List[ErrorSummary]:
|
||||
"""
|
||||
Holt die häufigsten Fehler.
|
||||
|
||||
Args:
|
||||
limit: Maximale Anzahl von Fehlern
|
||||
timeframe: Optional - Zeitrahmen für Analyse
|
||||
|
||||
Returns:
|
||||
Liste von Fehler-Zusammenfassungen
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_report(self,
|
||||
report_type: ReportType,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]] = None) -> Report:
|
||||
"""
|
||||
Generiert einen detaillierten Report.
|
||||
|
||||
Args:
|
||||
report_type: Typ des Reports
|
||||
start: Startdatum
|
||||
end: Enddatum
|
||||
platforms: Optional - Filter für spezifische Plattformen
|
||||
|
||||
Returns:
|
||||
Generierter Report
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_real_time_metrics(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Holt Echtzeit-Metriken für Dashboard.
|
||||
|
||||
Returns:
|
||||
Dictionary mit aktuellen Metriken
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def track_performance(self,
|
||||
metric_name: str,
|
||||
value: float,
|
||||
tags: Optional[Dict[str, str]] = None) -> None:
|
||||
"""
|
||||
Trackt eine Performance-Metrik.
|
||||
|
||||
Args:
|
||||
metric_name: Name der Metrik
|
||||
value: Wert der Metrik
|
||||
tags: Optional - Zusätzliche Tags
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_account_creation_timeline(self,
|
||||
hours: int = 24,
|
||||
platform: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Holt Timeline der Account-Erstellungen.
|
||||
|
||||
Args:
|
||||
hours: Anzahl Stunden zurück
|
||||
platform: Optional - Spezifische Plattform
|
||||
|
||||
Returns:
|
||||
Timeline-Daten für Visualisierung
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def analyze_failure_patterns(self,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Analysiert Muster in Fehlern.
|
||||
|
||||
Args:
|
||||
timeframe: Optional - Zeitrahmen für Analyse
|
||||
|
||||
Returns:
|
||||
Dictionary mit Fehler-Mustern und Insights
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_platform_comparison(self,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Vergleicht Performance zwischen Plattformen.
|
||||
|
||||
Args:
|
||||
timeframe: Optional - Zeitrahmen für Vergleich
|
||||
|
||||
Returns:
|
||||
Dictionary mit Plattform-Vergleichsdaten
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def export_data(self,
|
||||
format: str = "json",
|
||||
start: Optional[datetime] = None,
|
||||
end: Optional[datetime] = None) -> bytes:
|
||||
"""
|
||||
Exportiert Analytics-Daten.
|
||||
|
||||
Args:
|
||||
format: Export-Format ("json", "csv", "excel")
|
||||
start: Optional - Startdatum
|
||||
end: Optional - Enddatum
|
||||
|
||||
Returns:
|
||||
Exportierte Daten als Bytes
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_events(self, older_than: datetime) -> int:
|
||||
"""
|
||||
Bereinigt alte Events.
|
||||
|
||||
Args:
|
||||
older_than: Lösche Events älter als dieses Datum
|
||||
|
||||
Returns:
|
||||
Anzahl gelöschter Events
|
||||
"""
|
||||
pass
|
||||
152
domain/services/fingerprint_service.py
Normale Datei
152
domain/services/fingerprint_service.py
Normale Datei
@ -0,0 +1,152 @@
|
||||
"""
|
||||
Fingerprint Service Interface - Domain Service für Browser Fingerprinting
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
|
||||
class IFingerprintService(ABC):
|
||||
"""
|
||||
Interface für Fingerprint Service.
|
||||
Definiert die Geschäftslogik für Browser Fingerprint Management.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def generate_fingerprint(self,
|
||||
profile_type: Optional[str] = None,
|
||||
platform: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""
|
||||
Generiert einen neuen, realistischen Browser-Fingerprint.
|
||||
|
||||
Args:
|
||||
profile_type: Optional - Typ des Profils (z.B. "mobile", "desktop")
|
||||
platform: Optional - Zielplattform (z.B. "instagram", "tiktok")
|
||||
|
||||
Returns:
|
||||
Neuer Browser-Fingerprint
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def rotate_fingerprint(self,
|
||||
current: BrowserFingerprint,
|
||||
rotation_strategy: str = "gradual") -> BrowserFingerprint:
|
||||
"""
|
||||
Rotiert einen bestehenden Fingerprint für mehr Anonymität.
|
||||
|
||||
Args:
|
||||
current: Aktueller Fingerprint
|
||||
rotation_strategy: Strategie für Rotation ("gradual", "complete", "minimal")
|
||||
|
||||
Returns:
|
||||
Neuer rotierter Fingerprint
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def validate_fingerprint(self, fingerprint: BrowserFingerprint) -> tuple[bool, List[str]]:
|
||||
"""
|
||||
Validiert einen Fingerprint auf Konsistenz und Realismus.
|
||||
|
||||
Args:
|
||||
fingerprint: Zu validierender Fingerprint
|
||||
|
||||
Returns:
|
||||
Tuple aus (ist_valide, liste_von_problemen)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_fingerprint(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""
|
||||
Speichert einen Fingerprint für spätere Verwendung.
|
||||
|
||||
Args:
|
||||
fingerprint: Zu speichernder Fingerprint
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def load_fingerprint(self, fingerprint_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""
|
||||
Lädt einen gespeicherten Fingerprint.
|
||||
|
||||
Args:
|
||||
fingerprint_id: ID des Fingerprints
|
||||
|
||||
Returns:
|
||||
Fingerprint oder None wenn nicht gefunden
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_fingerprint_pool(self,
|
||||
count: int = 10,
|
||||
platform: Optional[str] = None) -> List[BrowserFingerprint]:
|
||||
"""
|
||||
Holt einen Pool von Fingerprints für Rotation.
|
||||
|
||||
Args:
|
||||
count: Anzahl der gewünschten Fingerprints
|
||||
platform: Optional - Filter für spezifische Plattform
|
||||
|
||||
Returns:
|
||||
Liste von Fingerprints
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def apply_fingerprint(self,
|
||||
browser_context: Any,
|
||||
fingerprint: BrowserFingerprint) -> None:
|
||||
"""
|
||||
Wendet einen Fingerprint auf einen Browser-Kontext an.
|
||||
|
||||
Args:
|
||||
browser_context: Playwright Browser Context
|
||||
fingerprint: Anzuwendender Fingerprint
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def detect_fingerprinting(self, page_content: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Erkennt Fingerprinting-Versuche auf einer Webseite.
|
||||
|
||||
Args:
|
||||
page_content: HTML oder JavaScript Content der Seite
|
||||
|
||||
Returns:
|
||||
Dictionary mit erkannten Fingerprinting-Techniken
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_fingerprint_score(self, fingerprint: BrowserFingerprint) -> float:
|
||||
"""
|
||||
Bewertet die Qualität/Einzigartigkeit eines Fingerprints.
|
||||
|
||||
Args:
|
||||
fingerprint: Zu bewertender Fingerprint
|
||||
|
||||
Returns:
|
||||
Score zwischen 0.0 (schlecht) und 1.0 (gut)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def cleanup_old_fingerprints(self, older_than: datetime) -> int:
|
||||
"""
|
||||
Bereinigt alte, nicht mehr verwendete Fingerprints.
|
||||
|
||||
Args:
|
||||
older_than: Lösche Fingerprints älter als dieses Datum
|
||||
|
||||
Returns:
|
||||
Anzahl gelöschter Fingerprints
|
||||
"""
|
||||
pass
|
||||
125
domain/services/rate_limit_service.py
Normale Datei
125
domain/services/rate_limit_service.py
Normale Datei
@ -0,0 +1,125 @@
|
||||
"""
|
||||
Rate Limit Service Interface - Domain Service für Rate Limiting
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.value_objects.action_timing import ActionTiming, ActionType
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
|
||||
|
||||
class IRateLimitService(ABC):
|
||||
"""
|
||||
Interface für Rate Limit Service.
|
||||
Definiert die Geschäftslogik für adaptives Rate Limiting.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def calculate_delay(self, action_type: ActionType, context: Optional[Dict[str, Any]] = None) -> float:
|
||||
"""
|
||||
Berechnet die optimale Verzögerung für eine Aktion.
|
||||
|
||||
Args:
|
||||
action_type: Typ der auszuführenden Aktion
|
||||
context: Optionaler Kontext (z.B. Platform, Session-ID)
|
||||
|
||||
Returns:
|
||||
Verzögerung in Sekunden
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def record_action(self, timing: ActionTiming) -> None:
|
||||
"""
|
||||
Zeichnet eine ausgeführte Aktion für Analyse auf.
|
||||
|
||||
Args:
|
||||
timing: Timing-Informationen der Aktion
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def detect_rate_limit(self, response: Any) -> bool:
|
||||
"""
|
||||
Erkennt ob eine Response auf Rate Limiting hindeutet.
|
||||
|
||||
Args:
|
||||
response: HTTP Response oder Browser-Seite
|
||||
|
||||
Returns:
|
||||
True wenn Rate Limit erkannt wurde
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_policy(self, action_type: ActionType) -> RateLimitPolicy:
|
||||
"""
|
||||
Holt die aktuelle Rate Limit Policy für einen Action Type.
|
||||
|
||||
Args:
|
||||
action_type: Typ der Aktion
|
||||
|
||||
Returns:
|
||||
Rate Limit Policy
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update_policy(self, action_type: ActionType, policy: RateLimitPolicy) -> None:
|
||||
"""
|
||||
Aktualisiert die Rate Limit Policy für einen Action Type.
|
||||
|
||||
Args:
|
||||
action_type: Typ der Aktion
|
||||
policy: Neue Policy
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_statistics(self,
|
||||
action_type: Optional[ActionType] = None,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Holt Statistiken über Rate Limiting.
|
||||
|
||||
Args:
|
||||
action_type: Optional - nur für spezifischen Action Type
|
||||
timeframe: Optional - nur für bestimmten Zeitraum
|
||||
|
||||
Returns:
|
||||
Dictionary mit Statistiken
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def reset_statistics(self) -> None:
|
||||
"""Setzt alle gesammelten Statistiken zurück."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_action_allowed(self, action_type: ActionType) -> bool:
|
||||
"""
|
||||
Prüft ob eine Aktion basierend auf Rate Limits erlaubt ist.
|
||||
|
||||
Args:
|
||||
action_type: Typ der Aktion
|
||||
|
||||
Returns:
|
||||
True wenn Aktion erlaubt ist
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def wait_if_needed(self, action_type: ActionType) -> float:
|
||||
"""
|
||||
Wartet die notwendige Zeit bevor eine Aktion ausgeführt werden kann.
|
||||
|
||||
Args:
|
||||
action_type: Typ der Aktion
|
||||
|
||||
Returns:
|
||||
Tatsächlich gewartete Zeit in Sekunden
|
||||
"""
|
||||
pass
|
||||
17
domain/value_objects/__init__.py
Normale Datei
17
domain/value_objects/__init__.py
Normale Datei
@ -0,0 +1,17 @@
|
||||
"""
|
||||
Domain Value Objects - Unveränderliche Wertobjekte ohne Identität
|
||||
"""
|
||||
|
||||
from .action_timing import ActionTiming, ActionType
|
||||
from .error_summary import ErrorSummary
|
||||
from .report import Report, ReportType
|
||||
from .login_credentials import LoginCredentials
|
||||
|
||||
__all__ = [
|
||||
'ActionTiming',
|
||||
'ActionType',
|
||||
'ErrorSummary',
|
||||
'Report',
|
||||
'ReportType',
|
||||
'LoginCredentials'
|
||||
]
|
||||
120
domain/value_objects/account_creation_params.py
Normale Datei
120
domain/value_objects/account_creation_params.py
Normale Datei
@ -0,0 +1,120 @@
|
||||
"""
|
||||
Typsichere Parameter für Account-Erstellung
|
||||
"""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Dict, Any, List
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValidationResult:
|
||||
"""Ergebnis einer Validierung"""
|
||||
is_valid: bool
|
||||
errors: List[str]
|
||||
|
||||
def get_error_message(self) -> str:
|
||||
"""Gibt eine formatierte Fehlermeldung zurück"""
|
||||
if self.is_valid:
|
||||
return ""
|
||||
return "\n".join(self.errors)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccountCreationParams:
|
||||
"""Typsichere Parameter für Account-Erstellung"""
|
||||
full_name: str
|
||||
age: int
|
||||
registration_method: str = "email"
|
||||
show_browser: bool = False
|
||||
proxy_type: Optional[str] = None
|
||||
fingerprint: Optional[BrowserFingerprint] = None
|
||||
email_domain: str = "z5m7q9dk3ah2v1plx6ju.com"
|
||||
username: Optional[str] = None
|
||||
password: Optional[str] = None
|
||||
phone_number: Optional[str] = None
|
||||
imap_handler: Optional[Any] = None
|
||||
phone_service: Optional[Any] = None
|
||||
additional_params: Dict[str, Any] = None
|
||||
|
||||
# Platform-spezifische Konstanten
|
||||
MIN_AGE: int = 13
|
||||
MAX_AGE: int = 99
|
||||
|
||||
def __post_init__(self):
|
||||
if self.additional_params is None:
|
||||
self.additional_params = {}
|
||||
|
||||
def validate(self) -> ValidationResult:
|
||||
"""Validiert alle Parameter"""
|
||||
errors = []
|
||||
|
||||
# Name validieren
|
||||
if not self.full_name or len(self.full_name.strip()) < 2:
|
||||
errors.append("Der Name muss mindestens 2 Zeichen lang sein")
|
||||
|
||||
# Alter validieren
|
||||
if self.age < self.MIN_AGE:
|
||||
errors.append(f"Das Alter muss mindestens {self.MIN_AGE} sein")
|
||||
elif self.age > self.MAX_AGE:
|
||||
errors.append(f"Das Alter darf maximal {self.MAX_AGE} sein")
|
||||
|
||||
# Registrierungsmethode validieren
|
||||
if self.registration_method not in ["email", "phone"]:
|
||||
errors.append("Ungültige Registrierungsmethode")
|
||||
|
||||
# Telefonnummer bei Phone-Registrierung
|
||||
if self.registration_method == "phone" and not self.phone_number:
|
||||
errors.append("Telefonnummer erforderlich für Phone-Registrierung")
|
||||
|
||||
# E-Mail-Domain validieren
|
||||
if self.registration_method == "email" and not self.email_domain:
|
||||
errors.append("E-Mail-Domain erforderlich für Email-Registrierung")
|
||||
|
||||
return ValidationResult(is_valid=len(errors)==0, errors=errors)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert zu Dictionary für Kompatibilität"""
|
||||
result = {
|
||||
"full_name": self.full_name,
|
||||
"age": self.age,
|
||||
"registration_method": self.registration_method,
|
||||
"show_browser": self.show_browser,
|
||||
"proxy_type": self.proxy_type,
|
||||
"fingerprint": self.fingerprint,
|
||||
"email_domain": self.email_domain,
|
||||
"username": self.username,
|
||||
"password": self.password,
|
||||
"phone_number": self.phone_number,
|
||||
"imap_handler": self.imap_handler,
|
||||
"phone_service": self.phone_service
|
||||
}
|
||||
|
||||
# Additional params hinzufügen
|
||||
result.update(self.additional_params)
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> 'AccountCreationParams':
|
||||
"""Erstellt aus Dictionary"""
|
||||
# Bekannte Parameter extrahieren
|
||||
known_params = {
|
||||
"full_name": data.get("full_name", ""),
|
||||
"age": data.get("age", 18),
|
||||
"registration_method": data.get("registration_method", "email"),
|
||||
"show_browser": data.get("show_browser", False),
|
||||
"proxy_type": data.get("proxy_type"),
|
||||
"fingerprint": data.get("fingerprint"),
|
||||
"email_domain": data.get("email_domain", "z5m7q9dk3ah2v1plx6ju.com"),
|
||||
"username": data.get("username"),
|
||||
"password": data.get("password"),
|
||||
"phone_number": data.get("phone_number"),
|
||||
"imap_handler": data.get("imap_handler"),
|
||||
"phone_service": data.get("phone_service")
|
||||
}
|
||||
|
||||
# Alle anderen Parameter als additional_params
|
||||
additional = {k: v for k, v in data.items() if k not in known_params}
|
||||
known_params["additional_params"] = additional
|
||||
|
||||
return cls(**known_params)
|
||||
102
domain/value_objects/action_timing.py
Normale Datei
102
domain/value_objects/action_timing.py
Normale Datei
@ -0,0 +1,102 @@
|
||||
"""
|
||||
Action Timing Value Object - Repräsentiert Timing-Informationen einer Aktion
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class ActionType(Enum):
|
||||
"""Typen von Aktionen die getimed werden"""
|
||||
# Navigation
|
||||
PAGE_LOAD = "page_load"
|
||||
PAGE_NAVIGATION = "page_navigation"
|
||||
|
||||
# Form-Interaktionen
|
||||
FORM_FILL = "form_fill"
|
||||
BUTTON_CLICK = "button_click"
|
||||
INPUT_TYPE = "input_type"
|
||||
DROPDOWN_SELECT = "dropdown_select"
|
||||
CHECKBOX_TOGGLE = "checkbox_toggle"
|
||||
|
||||
# Verifizierung
|
||||
EMAIL_CHECK = "email_check"
|
||||
SMS_CHECK = "sms_check"
|
||||
CAPTCHA_SOLVE = "captcha_solve"
|
||||
|
||||
# Account-Aktionen
|
||||
REGISTRATION_START = "registration_start"
|
||||
REGISTRATION_COMPLETE = "registration_complete"
|
||||
LOGIN_ATTEMPT = "login_attempt"
|
||||
LOGOUT = "logout"
|
||||
|
||||
# Daten-Operationen
|
||||
SCREENSHOT = "screenshot"
|
||||
DATA_SAVE = "data_save"
|
||||
SESSION_SAVE = "session_save"
|
||||
|
||||
# Netzwerk
|
||||
API_REQUEST = "api_request"
|
||||
FILE_UPLOAD = "file_upload"
|
||||
FILE_DOWNLOAD = "file_download"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ActionTiming:
|
||||
"""
|
||||
Repräsentiert Timing-Informationen einer Aktion.
|
||||
Frozen dataclass macht es unveränderlich (Value Object).
|
||||
"""
|
||||
|
||||
action_type: ActionType
|
||||
timestamp: datetime
|
||||
duration: float # in Sekunden
|
||||
success: bool
|
||||
|
||||
# Optionale Metadaten
|
||||
url: Optional[str] = None
|
||||
element_selector: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
retry_count: int = 0
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validierung der Timing-Daten"""
|
||||
if self.duration < 0:
|
||||
raise ValueError("Duration kann nicht negativ sein")
|
||||
if self.retry_count < 0:
|
||||
raise ValueError("Retry count kann nicht negativ sein")
|
||||
|
||||
@property
|
||||
def duration_ms(self) -> float:
|
||||
"""Gibt die Dauer in Millisekunden zurück"""
|
||||
return self.duration * 1000
|
||||
|
||||
@property
|
||||
def is_slow(self) -> bool:
|
||||
"""Prüft ob die Aktion langsam war (> 3 Sekunden)"""
|
||||
return self.duration > 3.0
|
||||
|
||||
@property
|
||||
def is_very_slow(self) -> bool:
|
||||
"""Prüft ob die Aktion sehr langsam war (> 10 Sekunden)"""
|
||||
return self.duration > 10.0
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'action_type': self.action_type.value,
|
||||
'timestamp': self.timestamp.isoformat(),
|
||||
'duration': self.duration,
|
||||
'duration_ms': self.duration_ms,
|
||||
'success': self.success,
|
||||
'url': self.url,
|
||||
'element_selector': self.element_selector,
|
||||
'error_message': self.error_message,
|
||||
'retry_count': self.retry_count,
|
||||
'metadata': self.metadata or {},
|
||||
'is_slow': self.is_slow,
|
||||
'is_very_slow': self.is_very_slow
|
||||
}
|
||||
29
domain/value_objects/browser_protection_style.py
Normale Datei
29
domain/value_objects/browser_protection_style.py
Normale Datei
@ -0,0 +1,29 @@
|
||||
"""Browser protection style value object."""
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
class ProtectionLevel(Enum):
|
||||
"""Defines the level of browser protection during automation."""
|
||||
NONE = "none" # No protection
|
||||
LIGHT = "light" # Visual indicator only
|
||||
MEDIUM = "medium" # Transparent overlay with interaction blocking
|
||||
STRONG = "strong" # Full blocking with opaque overlay
|
||||
|
||||
|
||||
@dataclass
|
||||
class BrowserProtectionStyle:
|
||||
"""Configuration for browser protection during automation."""
|
||||
level: ProtectionLevel = ProtectionLevel.MEDIUM
|
||||
show_border: bool = True # Show animated border
|
||||
show_badge: bool = True # Show info badge
|
||||
blur_effect: bool = False # Apply blur to page content
|
||||
opacity: float = 0.1 # Overlay opacity (0.0 - 1.0)
|
||||
badge_text: str = "🔒 Automatisierung läuft - Nicht eingreifen"
|
||||
badge_position: str = "top-right" # top-left, top-right, bottom-left, bottom-right
|
||||
border_color: str = "rgba(255, 0, 0, 0.5)"
|
||||
overlay_color: str = "rgba(0, 0, 0, {opacity})" # {opacity} will be replaced
|
||||
|
||||
def get_overlay_color(self) -> str:
|
||||
"""Get the overlay color with the configured opacity."""
|
||||
return self.overlay_color.format(opacity=self.opacity)
|
||||
98
domain/value_objects/error_summary.py
Normale Datei
98
domain/value_objects/error_summary.py
Normale Datei
@ -0,0 +1,98 @@
|
||||
"""
|
||||
Error Summary Value Object - Zusammenfassung von Fehlerinformationen
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ErrorSummary:
|
||||
"""
|
||||
Zusammenfassung von Fehlerinformationen für Berichte und Analysen.
|
||||
Frozen dataclass macht es unveränderlich (Value Object).
|
||||
"""
|
||||
|
||||
error_type: str
|
||||
error_count: int
|
||||
first_occurrence: datetime
|
||||
last_occurrence: datetime
|
||||
affected_sessions: List[str]
|
||||
affected_accounts: List[str]
|
||||
|
||||
# Statistiken
|
||||
avg_recovery_time: float # in Sekunden
|
||||
recovery_success_rate: float # 0.0 - 1.0
|
||||
|
||||
# Häufigste Kontexte
|
||||
most_common_urls: List[str]
|
||||
most_common_actions: List[str]
|
||||
most_common_steps: List[str]
|
||||
|
||||
# Impact
|
||||
total_user_impact: int
|
||||
total_system_impact: int
|
||||
data_loss_incidents: int
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validierung der Summary-Daten"""
|
||||
if self.error_count < 0:
|
||||
raise ValueError("Error count kann nicht negativ sein")
|
||||
if not 0.0 <= self.recovery_success_rate <= 1.0:
|
||||
raise ValueError("Recovery success rate muss zwischen 0.0 und 1.0 liegen")
|
||||
if self.first_occurrence > self.last_occurrence:
|
||||
raise ValueError("First occurrence kann nicht nach last occurrence liegen")
|
||||
|
||||
@property
|
||||
def duration(self) -> float:
|
||||
"""Zeitspanne zwischen erstem und letztem Auftreten in Stunden"""
|
||||
delta = self.last_occurrence - self.first_occurrence
|
||||
return delta.total_seconds() / 3600
|
||||
|
||||
@property
|
||||
def frequency(self) -> float:
|
||||
"""Fehler pro Stunde"""
|
||||
if self.duration > 0:
|
||||
return self.error_count / self.duration
|
||||
return self.error_count
|
||||
|
||||
@property
|
||||
def severity_score(self) -> float:
|
||||
"""
|
||||
Berechnet einen Schweregrad-Score basierend auf:
|
||||
- Häufigkeit
|
||||
- Impact
|
||||
- Wiederherstellungsrate
|
||||
"""
|
||||
frequency_factor = min(self.frequency / 10, 1.0) # Normalisiert auf 0-1
|
||||
impact_factor = min((self.total_user_impact + self.total_system_impact) / 100, 1.0)
|
||||
recovery_factor = 1.0 - self.recovery_success_rate
|
||||
data_loss_factor = min(self.data_loss_incidents / 10, 1.0)
|
||||
|
||||
return (frequency_factor * 0.3 +
|
||||
impact_factor * 0.3 +
|
||||
recovery_factor * 0.2 +
|
||||
data_loss_factor * 0.2)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Konvertiert zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'error_type': self.error_type,
|
||||
'error_count': self.error_count,
|
||||
'first_occurrence': self.first_occurrence.isoformat(),
|
||||
'last_occurrence': self.last_occurrence.isoformat(),
|
||||
'duration_hours': self.duration,
|
||||
'frequency_per_hour': self.frequency,
|
||||
'affected_sessions': self.affected_sessions,
|
||||
'affected_accounts': self.affected_accounts,
|
||||
'avg_recovery_time': self.avg_recovery_time,
|
||||
'recovery_success_rate': self.recovery_success_rate,
|
||||
'most_common_urls': self.most_common_urls[:5],
|
||||
'most_common_actions': self.most_common_actions[:5],
|
||||
'most_common_steps': self.most_common_steps[:5],
|
||||
'total_user_impact': self.total_user_impact,
|
||||
'total_system_impact': self.total_system_impact,
|
||||
'data_loss_incidents': self.data_loss_incidents,
|
||||
'severity_score': self.severity_score
|
||||
}
|
||||
44
domain/value_objects/login_credentials.py
Normale Datei
44
domain/value_objects/login_credentials.py
Normale Datei
@ -0,0 +1,44 @@
|
||||
"""
|
||||
Login Credentials Value Object - Repräsentiert Login-Daten mit Session-Status
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LoginCredentials:
|
||||
"""Unveränderliche Login-Daten für einen Account"""
|
||||
|
||||
username: str
|
||||
password: str
|
||||
platform: str
|
||||
session_status: str # ACTIVE, EXPIRED, LOCKED, REQUIRES_2FA, UNKNOWN
|
||||
last_successful_login: Optional[datetime] = None
|
||||
session_id: Optional[str] = None
|
||||
fingerprint_id: Optional[str] = None
|
||||
|
||||
def is_session_active(self) -> bool:
|
||||
"""Prüft ob die Session aktiv ist"""
|
||||
return self.session_status == "ACTIVE"
|
||||
|
||||
def requires_manual_login(self) -> bool:
|
||||
"""Prüft ob manueller Login erforderlich ist"""
|
||||
return self.session_status in ["EXPIRED", "LOCKED", "REQUIRES_2FA", "UNKNOWN"]
|
||||
|
||||
def has_session_data(self) -> bool:
|
||||
"""Prüft ob Session-Daten vorhanden sind"""
|
||||
return self.session_id is not None and self.fingerprint_id is not None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Konvertiert zu Dictionary für Serialisierung"""
|
||||
return {
|
||||
'username': self.username,
|
||||
'password': self.password,
|
||||
'platform': self.platform,
|
||||
'session_status': self.session_status,
|
||||
'last_successful_login': self.last_successful_login.isoformat() if self.last_successful_login else None,
|
||||
'session_id': self.session_id,
|
||||
'fingerprint_id': self.fingerprint_id
|
||||
}
|
||||
Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden Mehr anzeigen
In neuem Issue referenzieren
Einen Benutzer sperren