Initial commit
Dieser Commit ist enthalten in:
3
infrastructure/__init__.py
Normale Datei
3
infrastructure/__init__.py
Normale Datei
@ -0,0 +1,3 @@
|
||||
"""
|
||||
Infrastructure Layer - Technische Implementierungen und externe Services
|
||||
"""
|
||||
13
infrastructure/repositories/__init__.py
Normale Datei
13
infrastructure/repositories/__init__.py
Normale Datei
@ -0,0 +1,13 @@
|
||||
"""
|
||||
Infrastructure Repositories - Datenpersistierung und -zugriff
|
||||
"""
|
||||
|
||||
from .fingerprint_repository import FingerprintRepository
|
||||
from .analytics_repository import AnalyticsRepository
|
||||
from .rate_limit_repository import RateLimitRepository
|
||||
|
||||
__all__ = [
|
||||
'FingerprintRepository',
|
||||
'AnalyticsRepository',
|
||||
'RateLimitRepository'
|
||||
]
|
||||
179
infrastructure/repositories/account_repository.py
Normale Datei
179
infrastructure/repositories/account_repository.py
Normale Datei
@ -0,0 +1,179 @@
|
||||
"""
|
||||
Account Repository - Zugriff auf Account-Daten in der Datenbank
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
|
||||
from infrastructure.repositories.base_repository import BaseRepository
|
||||
|
||||
|
||||
class AccountRepository(BaseRepository):
|
||||
"""Repository für Account-Datenzugriff"""
|
||||
|
||||
def get_by_id(self, account_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Holt einen Account nach ID.
|
||||
|
||||
Args:
|
||||
account_id: Account ID
|
||||
|
||||
Returns:
|
||||
Dict mit Account-Daten oder None
|
||||
"""
|
||||
# Sichere Abfrage die mit verschiedenen Schema-Versionen funktioniert
|
||||
query = "SELECT * FROM accounts WHERE id = ?"
|
||||
|
||||
rows = self._execute_query(query, (account_id,))
|
||||
|
||||
if not rows:
|
||||
return None
|
||||
|
||||
return self._row_to_account(rows[0])
|
||||
|
||||
def get_by_username(self, username: str, platform: str = None) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Holt einen Account nach Username.
|
||||
|
||||
Args:
|
||||
username: Username
|
||||
platform: Optional platform filter
|
||||
|
||||
Returns:
|
||||
Dict mit Account-Daten oder None
|
||||
"""
|
||||
if platform:
|
||||
query = "SELECT * FROM accounts WHERE username = ? AND platform = ?"
|
||||
params = (username, platform)
|
||||
else:
|
||||
query = "SELECT * FROM accounts WHERE username = ?"
|
||||
params = (username,)
|
||||
|
||||
rows = self._execute_query(query, params)
|
||||
|
||||
if not rows:
|
||||
return None
|
||||
|
||||
return self._row_to_account(rows[0])
|
||||
|
||||
def get_all(self, platform: str = None, status: str = None) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Holt alle Accounts mit optionalen Filtern.
|
||||
|
||||
Args:
|
||||
platform: Optional platform filter
|
||||
status: Optional status filter
|
||||
|
||||
Returns:
|
||||
Liste von Account-Dicts
|
||||
"""
|
||||
query = "SELECT * FROM accounts WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if platform:
|
||||
query += " AND platform = ?"
|
||||
params.append(platform)
|
||||
|
||||
if status:
|
||||
query += " AND status = ?"
|
||||
params.append(status)
|
||||
|
||||
query += " ORDER BY created_at DESC"
|
||||
|
||||
rows = self._execute_query(query, params)
|
||||
return [self._row_to_account(row) for row in rows]
|
||||
|
||||
def update_fingerprint_id(self, account_id: int, fingerprint_id: str) -> bool:
|
||||
"""
|
||||
Aktualisiert die Fingerprint ID eines Accounts.
|
||||
|
||||
Args:
|
||||
account_id: Account ID
|
||||
fingerprint_id: Neue Fingerprint ID
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
query = "UPDATE accounts SET fingerprint_id = ? WHERE id = ?"
|
||||
return self._execute_update(query, (fingerprint_id, account_id)) > 0
|
||||
|
||||
def update_session_id(self, account_id: int, session_id: str) -> bool:
|
||||
"""
|
||||
Aktualisiert die Session ID eines Accounts.
|
||||
|
||||
Args:
|
||||
account_id: Account ID
|
||||
session_id: Neue Session ID
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
query = """
|
||||
UPDATE accounts
|
||||
SET session_id = ?, last_session_update = datetime('now')
|
||||
WHERE id = ?
|
||||
"""
|
||||
return self._execute_update(query, (session_id, account_id)) > 0
|
||||
|
||||
def update_status(self, account_id: int, status: str) -> bool:
|
||||
"""
|
||||
Aktualisiert den Status eines Accounts.
|
||||
|
||||
Args:
|
||||
account_id: Account ID
|
||||
status: Neuer Status
|
||||
|
||||
Returns:
|
||||
True bei Erfolg, False bei Fehler
|
||||
"""
|
||||
query = "UPDATE accounts SET status = ? WHERE id = ?"
|
||||
return self._execute_update(query, (status, account_id)) > 0
|
||||
|
||||
def _row_to_account(self, row) -> Dict[str, Any]:
|
||||
"""Konvertiert eine Datenbankzeile zu einem Account-Dict"""
|
||||
# sqlite3.Row unterstützt dict() Konvertierung direkt
|
||||
if hasattr(row, 'keys'):
|
||||
# Es ist ein sqlite3.Row Objekt
|
||||
account = dict(row)
|
||||
else:
|
||||
# Fallback für normale Tuples
|
||||
# Hole die tatsächlichen Spaltennamen aus der Datenbank
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("PRAGMA table_info(accounts)")
|
||||
columns_info = cursor.fetchall()
|
||||
conn.close()
|
||||
|
||||
# Extrahiere Spaltennamen
|
||||
columns = [col[1] for col in columns_info]
|
||||
|
||||
# Erstelle Dict mit vorhandenen Spalten
|
||||
account = {}
|
||||
for i, value in enumerate(row):
|
||||
if i < len(columns):
|
||||
account[columns[i]] = value
|
||||
|
||||
# Parse metadata wenn vorhanden und die Spalte existiert
|
||||
if 'metadata' in account and account.get('metadata'):
|
||||
try:
|
||||
metadata = json.loads(account['metadata'])
|
||||
account['metadata'] = metadata
|
||||
# Extrahiere platform aus metadata wenn vorhanden
|
||||
if isinstance(metadata, dict) and 'platform' in metadata:
|
||||
account['platform'] = metadata['platform']
|
||||
except:
|
||||
account['metadata'] = {}
|
||||
|
||||
# Setze Standardwerte für fehlende Felder
|
||||
if 'platform' not in account:
|
||||
# Standardmäßig auf instagram setzen
|
||||
account['platform'] = 'instagram'
|
||||
|
||||
# Stelle sicher dass wichtige Felder existieren
|
||||
for field in ['fingerprint_id', 'metadata']:
|
||||
if field not in account:
|
||||
account[field] = None
|
||||
|
||||
return account
|
||||
306
infrastructure/repositories/analytics_repository.py
Normale Datei
306
infrastructure/repositories/analytics_repository.py
Normale Datei
@ -0,0 +1,306 @@
|
||||
"""
|
||||
Analytics Repository - Persistierung von Analytics und Events
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
from infrastructure.repositories.base_repository import BaseRepository
|
||||
from domain.entities.account_creation_event import AccountCreationEvent, WorkflowStep
|
||||
from domain.entities.error_event import ErrorEvent, ErrorType
|
||||
from domain.value_objects.error_summary import ErrorSummary
|
||||
|
||||
|
||||
class AnalyticsRepository(BaseRepository):
|
||||
"""Repository für Analytics Events und Reporting"""
|
||||
|
||||
def save_account_creation_event(self, event: AccountCreationEvent) -> None:
|
||||
"""Speichert ein Account Creation Event"""
|
||||
query = """
|
||||
INSERT INTO account_creation_analytics (
|
||||
event_id, timestamp, account_id, session_id, fingerprint_id,
|
||||
duration_seconds, success, error_type, error_message,
|
||||
workflow_steps, metadata, total_retry_count, network_requests,
|
||||
screenshots_taken, proxy_used, proxy_type, browser_type,
|
||||
headless, success_rate
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
# Serialisiere komplexe Daten
|
||||
workflow_steps_json = self._serialize_json([
|
||||
step.to_dict() for step in event.steps_completed
|
||||
])
|
||||
|
||||
metadata = {
|
||||
'platform': event.account_data.platform if event.account_data else None,
|
||||
'username': event.account_data.username if event.account_data else None,
|
||||
'email': event.account_data.email if event.account_data else None,
|
||||
'additional': event.account_data.metadata if event.account_data else {}
|
||||
}
|
||||
|
||||
params = (
|
||||
event.event_id,
|
||||
event.timestamp,
|
||||
event.account_data.username if event.account_data else None,
|
||||
event.session_id,
|
||||
event.fingerprint_id,
|
||||
event.duration.total_seconds() if event.duration else 0,
|
||||
event.success,
|
||||
event.error_details.error_type if event.error_details else None,
|
||||
event.error_details.error_message if event.error_details else None,
|
||||
workflow_steps_json,
|
||||
self._serialize_json(metadata),
|
||||
event.total_retry_count,
|
||||
event.network_requests,
|
||||
event.screenshots_taken,
|
||||
event.proxy_used,
|
||||
event.proxy_type,
|
||||
event.browser_type,
|
||||
event.headless,
|
||||
event.get_success_rate()
|
||||
)
|
||||
|
||||
self._execute_insert(query, params)
|
||||
|
||||
def save_error_event(self, event: ErrorEvent) -> None:
|
||||
"""Speichert ein Error Event"""
|
||||
query = """
|
||||
INSERT INTO error_events (
|
||||
error_id, timestamp, error_type, error_message, stack_trace,
|
||||
context, recovery_attempted, recovery_successful, recovery_attempts,
|
||||
severity, platform, session_id, account_id, correlation_id,
|
||||
user_impact, system_impact, data_loss
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
context_json = self._serialize_json({
|
||||
'url': event.context.url,
|
||||
'action': event.context.action,
|
||||
'step_name': event.context.step_name,
|
||||
'screenshot_path': event.context.screenshot_path,
|
||||
'additional_data': event.context.additional_data
|
||||
})
|
||||
|
||||
recovery_attempts_json = self._serialize_json([
|
||||
{
|
||||
'strategy': attempt.strategy,
|
||||
'timestamp': attempt.timestamp.isoformat(),
|
||||
'successful': attempt.successful,
|
||||
'error_message': attempt.error_message,
|
||||
'duration_seconds': attempt.duration_seconds
|
||||
}
|
||||
for attempt in event.recovery_attempts
|
||||
])
|
||||
|
||||
params = (
|
||||
event.error_id,
|
||||
event.timestamp,
|
||||
event.error_type.value,
|
||||
event.error_message,
|
||||
event.stack_trace,
|
||||
context_json,
|
||||
event.recovery_attempted,
|
||||
event.recovery_successful,
|
||||
recovery_attempts_json,
|
||||
event.severity.value,
|
||||
event.platform,
|
||||
event.session_id,
|
||||
event.account_id,
|
||||
event.correlation_id,
|
||||
event.user_impact,
|
||||
event.system_impact,
|
||||
event.data_loss
|
||||
)
|
||||
|
||||
self._execute_insert(query, params)
|
||||
|
||||
def get_success_rate(self, timeframe: Optional[timedelta] = None,
|
||||
platform: Optional[str] = None) -> float:
|
||||
"""Berechnet die Erfolgsrate"""
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful
|
||||
FROM account_creation_analytics
|
||||
WHERE 1=1
|
||||
"""
|
||||
params = []
|
||||
|
||||
if timeframe:
|
||||
query += " AND timestamp > datetime('now', '-' || ? || ' seconds')"
|
||||
params.append(int(timeframe.total_seconds()))
|
||||
|
||||
if platform:
|
||||
query += " AND json_extract(metadata, '$.platform') = ?"
|
||||
params.append(platform)
|
||||
|
||||
row = self._execute_query(query, tuple(params))[0]
|
||||
|
||||
if row['total'] > 0:
|
||||
return row['successful'] / row['total']
|
||||
return 0.0
|
||||
|
||||
def get_common_errors(self, limit: int = 10,
|
||||
timeframe: Optional[timedelta] = None) -> List[ErrorSummary]:
|
||||
"""Holt die häufigsten Fehler"""
|
||||
query = """
|
||||
SELECT
|
||||
error_type,
|
||||
COUNT(*) as error_count,
|
||||
MIN(timestamp) as first_occurrence,
|
||||
MAX(timestamp) as last_occurrence,
|
||||
AVG(CASE WHEN recovery_successful = 1 THEN 1.0 ELSE 0.0 END) as recovery_rate,
|
||||
GROUP_CONCAT(DISTINCT session_id) as sessions,
|
||||
GROUP_CONCAT(DISTINCT account_id) as accounts,
|
||||
SUM(user_impact) as total_user_impact,
|
||||
SUM(system_impact) as total_system_impact,
|
||||
SUM(data_loss) as data_loss_incidents
|
||||
FROM error_events
|
||||
WHERE 1=1
|
||||
"""
|
||||
params = []
|
||||
|
||||
if timeframe:
|
||||
query += " AND timestamp > datetime('now', '-' || ? || ' seconds')"
|
||||
params.append(int(timeframe.total_seconds()))
|
||||
|
||||
query += " GROUP BY error_type ORDER BY error_count DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
rows = self._execute_query(query, tuple(params))
|
||||
|
||||
summaries = []
|
||||
for row in rows:
|
||||
# Hole zusätzliche Details für diesen Fehlertyp
|
||||
detail_query = """
|
||||
SELECT
|
||||
json_extract(context, '$.url') as url,
|
||||
json_extract(context, '$.action') as action,
|
||||
json_extract(context, '$.step_name') as step,
|
||||
COUNT(*) as count
|
||||
FROM error_events
|
||||
WHERE error_type = ?
|
||||
GROUP BY url, action, step
|
||||
ORDER BY count DESC
|
||||
LIMIT 5
|
||||
"""
|
||||
details = self._execute_query(detail_query, (row['error_type'],))
|
||||
|
||||
urls = []
|
||||
actions = []
|
||||
steps = []
|
||||
|
||||
for detail in details:
|
||||
if detail['url']:
|
||||
urls.append(detail['url'])
|
||||
if detail['action']:
|
||||
actions.append(detail['action'])
|
||||
if detail['step']:
|
||||
steps.append(detail['step'])
|
||||
|
||||
summary = ErrorSummary(
|
||||
error_type=row['error_type'],
|
||||
error_count=row['error_count'],
|
||||
first_occurrence=self._parse_datetime(row['first_occurrence']),
|
||||
last_occurrence=self._parse_datetime(row['last_occurrence']),
|
||||
affected_sessions=row['sessions'].split(',') if row['sessions'] else [],
|
||||
affected_accounts=row['accounts'].split(',') if row['accounts'] else [],
|
||||
avg_recovery_time=0.0, # TODO: Berechnen aus recovery_attempts
|
||||
recovery_success_rate=row['recovery_rate'] or 0.0,
|
||||
most_common_urls=urls,
|
||||
most_common_actions=actions,
|
||||
most_common_steps=steps,
|
||||
total_user_impact=row['total_user_impact'] or 0,
|
||||
total_system_impact=row['total_system_impact'] or 0,
|
||||
data_loss_incidents=row['data_loss_incidents'] or 0
|
||||
)
|
||||
|
||||
summaries.append(summary)
|
||||
|
||||
return summaries
|
||||
|
||||
def get_timeline_data(self, metric: str, hours: int = 24,
|
||||
platform: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""Holt Timeline-Daten für Graphen"""
|
||||
# Erstelle stündliche Buckets
|
||||
query = """
|
||||
SELECT
|
||||
strftime('%Y-%m-%d %H:00:00', timestamp) as hour,
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful,
|
||||
AVG(duration_seconds) as avg_duration
|
||||
FROM account_creation_analytics
|
||||
WHERE timestamp > datetime('now', '-' || ? || ' hours')
|
||||
"""
|
||||
params = [hours]
|
||||
|
||||
if platform:
|
||||
query += " AND json_extract(metadata, '$.platform') = ?"
|
||||
params.append(platform)
|
||||
|
||||
query += " GROUP BY hour ORDER BY hour"
|
||||
|
||||
rows = self._execute_query(query, tuple(params))
|
||||
|
||||
timeline = []
|
||||
for row in rows:
|
||||
data = {
|
||||
'timestamp': row['hour'],
|
||||
'total': row['total'],
|
||||
'successful': row['successful'],
|
||||
'success_rate': row['successful'] / row['total'] if row['total'] > 0 else 0,
|
||||
'avg_duration': row['avg_duration']
|
||||
}
|
||||
timeline.append(data)
|
||||
|
||||
return timeline
|
||||
|
||||
def get_platform_stats(self, timeframe: Optional[timedelta] = None) -> Dict[str, Dict[str, Any]]:
|
||||
"""Holt Statistiken pro Plattform"""
|
||||
query = """
|
||||
SELECT
|
||||
json_extract(metadata, '$.platform') as platform,
|
||||
COUNT(*) as total_attempts,
|
||||
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful,
|
||||
AVG(duration_seconds) as avg_duration,
|
||||
AVG(total_retry_count) as avg_retries
|
||||
FROM account_creation_analytics
|
||||
WHERE json_extract(metadata, '$.platform') IS NOT NULL
|
||||
"""
|
||||
params = []
|
||||
|
||||
if timeframe:
|
||||
query += " AND timestamp > datetime('now', '-' || ? || ' seconds')"
|
||||
params.append(int(timeframe.total_seconds()))
|
||||
|
||||
query += " GROUP BY platform"
|
||||
|
||||
rows = self._execute_query(query, tuple(params))
|
||||
|
||||
stats = {}
|
||||
for row in rows:
|
||||
stats[row['platform']] = {
|
||||
'total_attempts': row['total_attempts'],
|
||||
'successful_accounts': row['successful'],
|
||||
'failed_attempts': row['total_attempts'] - row['successful'],
|
||||
'success_rate': row['successful'] / row['total_attempts'] if row['total_attempts'] > 0 else 0,
|
||||
'avg_duration_seconds': row['avg_duration'],
|
||||
'avg_retries': row['avg_retries']
|
||||
}
|
||||
|
||||
return stats
|
||||
|
||||
def cleanup_old_events(self, older_than: datetime) -> int:
|
||||
"""Bereinigt alte Events"""
|
||||
count1 = self._execute_delete(
|
||||
"DELETE FROM account_creation_analytics WHERE timestamp < ?",
|
||||
(older_than,)
|
||||
)
|
||||
count2 = self._execute_delete(
|
||||
"DELETE FROM error_events WHERE timestamp < ?",
|
||||
(older_than,)
|
||||
)
|
||||
return count1 + count2
|
||||
112
infrastructure/repositories/base_repository.py
Normale Datei
112
infrastructure/repositories/base_repository.py
Normale Datei
@ -0,0 +1,112 @@
|
||||
"""
|
||||
Base Repository - Abstrakte Basis für alle Repositories
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Union
|
||||
from datetime import datetime
|
||||
from contextlib import contextmanager
|
||||
|
||||
from config.paths import PathConfig
|
||||
|
||||
logger = logging.getLogger("base_repository")
|
||||
|
||||
|
||||
class BaseRepository:
|
||||
"""Basis-Repository mit gemeinsamen Datenbankfunktionen"""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""
|
||||
Initialisiert das Repository.
|
||||
|
||||
Args:
|
||||
db_path: Pfad zur Datenbank (falls None, wird PathConfig.MAIN_DB verwendet)
|
||||
"""
|
||||
self.db_path = db_path if db_path is not None else PathConfig.MAIN_DB
|
||||
self._ensure_schema()
|
||||
|
||||
def _ensure_schema(self):
|
||||
"""Stellt sicher dass das erweiterte Schema existiert"""
|
||||
try:
|
||||
if PathConfig.file_exists(PathConfig.SCHEMA_V2):
|
||||
with open(PathConfig.SCHEMA_V2, "r", encoding='utf-8') as f:
|
||||
schema_sql = f.read()
|
||||
|
||||
with self.get_connection() as conn:
|
||||
conn.executescript(schema_sql)
|
||||
conn.commit()
|
||||
logger.info("Schema v2 erfolgreich geladen")
|
||||
else:
|
||||
logger.warning(f"schema_v2.sql nicht gefunden unter {PathConfig.SCHEMA_V2}, nutze existierendes Schema")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Schema-Update: {e}")
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self):
|
||||
"""Context Manager für Datenbankverbindungen"""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _serialize_json(self, data: Any) -> str:
|
||||
"""Serialisiert Daten zu JSON"""
|
||||
if data is None:
|
||||
return None
|
||||
return json.dumps(data, default=str)
|
||||
|
||||
def _deserialize_json(self, data: str) -> Any:
|
||||
"""Deserialisiert JSON zu Python-Objekten"""
|
||||
if data is None:
|
||||
return None
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f"Fehler beim JSON-Parsing: {data}")
|
||||
return None
|
||||
|
||||
def _parse_datetime(self, dt_string: str) -> Optional[datetime]:
|
||||
"""Parst einen Datetime-String"""
|
||||
if not dt_string:
|
||||
return None
|
||||
try:
|
||||
# SQLite datetime format
|
||||
return datetime.strptime(dt_string, "%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
try:
|
||||
# ISO format
|
||||
return datetime.fromisoformat(dt_string.replace('Z', '+00:00'))
|
||||
except:
|
||||
logger.error(f"Konnte Datetime nicht parsen: {dt_string}")
|
||||
return None
|
||||
|
||||
def _execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
|
||||
"""Führt eine SELECT-Query aus"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
return cursor.fetchall()
|
||||
|
||||
def _execute_insert(self, query: str, params: tuple = ()) -> int:
|
||||
"""Führt eine INSERT-Query aus und gibt die ID zurück"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def _execute_update(self, query: str, params: tuple = ()) -> int:
|
||||
"""Führt eine UPDATE-Query aus und gibt affected rows zurück"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
def _execute_delete(self, query: str, params: tuple = ()) -> int:
|
||||
"""Führt eine DELETE-Query aus und gibt affected rows zurück"""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
273
infrastructure/repositories/fingerprint_repository.py
Normale Datei
273
infrastructure/repositories/fingerprint_repository.py
Normale Datei
@ -0,0 +1,273 @@
|
||||
"""
|
||||
Fingerprint Repository - Persistierung von Browser Fingerprints
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
from infrastructure.repositories.base_repository import BaseRepository
|
||||
from domain.entities.browser_fingerprint import (
|
||||
BrowserFingerprint, CanvasNoise, WebRTCConfig,
|
||||
HardwareConfig, NavigatorProperties, StaticComponents
|
||||
)
|
||||
|
||||
|
||||
class FingerprintRepository(BaseRepository):
|
||||
"""Repository für Browser Fingerprint Persistierung"""
|
||||
|
||||
def save(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Speichert einen Fingerprint in der Datenbank"""
|
||||
query = """
|
||||
INSERT OR REPLACE INTO browser_fingerprints (
|
||||
id, canvas_noise_config, webrtc_config, fonts,
|
||||
hardware_config, navigator_props, webgl_vendor,
|
||||
webgl_renderer, audio_context_config, timezone,
|
||||
timezone_offset, plugins, created_at, last_rotated,
|
||||
platform_specific, static_components, rotation_seed,
|
||||
account_bound
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
audio_config = {
|
||||
'base_latency': fingerprint.audio_context_base_latency,
|
||||
'output_latency': fingerprint.audio_context_output_latency,
|
||||
'sample_rate': fingerprint.audio_context_sample_rate
|
||||
}
|
||||
|
||||
|
||||
params = (
|
||||
fingerprint.fingerprint_id,
|
||||
self._serialize_json({
|
||||
'noise_level': fingerprint.canvas_noise.noise_level,
|
||||
'seed': fingerprint.canvas_noise.seed,
|
||||
'algorithm': fingerprint.canvas_noise.algorithm
|
||||
}),
|
||||
self._serialize_json({
|
||||
'enabled': fingerprint.webrtc_config.enabled,
|
||||
'ice_servers': fingerprint.webrtc_config.ice_servers,
|
||||
'local_ip_mask': fingerprint.webrtc_config.local_ip_mask,
|
||||
'disable_webrtc': fingerprint.webrtc_config.disable_webrtc
|
||||
}),
|
||||
self._serialize_json(fingerprint.font_list),
|
||||
self._serialize_json({
|
||||
'hardware_concurrency': fingerprint.hardware_config.hardware_concurrency,
|
||||
'device_memory': fingerprint.hardware_config.device_memory,
|
||||
'max_touch_points': fingerprint.hardware_config.max_touch_points,
|
||||
'screen_resolution': fingerprint.hardware_config.screen_resolution,
|
||||
'color_depth': fingerprint.hardware_config.color_depth,
|
||||
'pixel_ratio': fingerprint.hardware_config.pixel_ratio
|
||||
}),
|
||||
self._serialize_json({
|
||||
'platform': fingerprint.navigator_props.platform,
|
||||
'vendor': fingerprint.navigator_props.vendor,
|
||||
'vendor_sub': fingerprint.navigator_props.vendor_sub,
|
||||
'product': fingerprint.navigator_props.product,
|
||||
'product_sub': fingerprint.navigator_props.product_sub,
|
||||
'app_name': fingerprint.navigator_props.app_name,
|
||||
'app_version': fingerprint.navigator_props.app_version,
|
||||
'user_agent': fingerprint.navigator_props.user_agent,
|
||||
'language': fingerprint.navigator_props.language,
|
||||
'languages': fingerprint.navigator_props.languages,
|
||||
'online': fingerprint.navigator_props.online,
|
||||
'do_not_track': fingerprint.navigator_props.do_not_track
|
||||
}),
|
||||
fingerprint.webgl_vendor,
|
||||
fingerprint.webgl_renderer,
|
||||
self._serialize_json(audio_config),
|
||||
fingerprint.timezone,
|
||||
fingerprint.timezone_offset,
|
||||
self._serialize_json(fingerprint.plugins),
|
||||
fingerprint.created_at,
|
||||
fingerprint.last_rotated,
|
||||
self._serialize_json(fingerprint.platform_specific_config), # platform_specific
|
||||
self._serialize_json(fingerprint.static_components.to_dict() if fingerprint.static_components else None),
|
||||
fingerprint.rotation_seed,
|
||||
fingerprint.account_bound
|
||||
)
|
||||
|
||||
self._execute_insert(query, params)
|
||||
|
||||
def find_by_id(self, fingerprint_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Findet einen Fingerprint nach ID"""
|
||||
query = "SELECT * FROM browser_fingerprints WHERE id = ?"
|
||||
rows = self._execute_query(query, (fingerprint_id,))
|
||||
|
||||
if not rows:
|
||||
return None
|
||||
|
||||
return self._row_to_fingerprint(rows[0])
|
||||
|
||||
def find_all(self, limit: int = 100) -> List[BrowserFingerprint]:
|
||||
"""Holt alle Fingerprints (mit Limit)"""
|
||||
query = "SELECT * FROM browser_fingerprints ORDER BY created_at DESC LIMIT ?"
|
||||
rows = self._execute_query(query, (limit,))
|
||||
|
||||
return [self._row_to_fingerprint(row) for row in rows]
|
||||
|
||||
def find_recent(self, hours: int = 24) -> List[BrowserFingerprint]:
|
||||
"""Findet kürzlich erstellte Fingerprints"""
|
||||
query = """
|
||||
SELECT * FROM browser_fingerprints
|
||||
WHERE created_at > datetime('now', '-' || ? || ' hours')
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
rows = self._execute_query(query, (hours,))
|
||||
|
||||
return [self._row_to_fingerprint(row) for row in rows]
|
||||
|
||||
def update_last_rotated(self, fingerprint_id: str, timestamp: datetime) -> None:
|
||||
"""Aktualisiert den last_rotated Timestamp"""
|
||||
query = "UPDATE browser_fingerprints SET last_rotated = ? WHERE id = ?"
|
||||
self._execute_update(query, (timestamp, fingerprint_id))
|
||||
|
||||
def delete_older_than(self, timestamp: datetime) -> int:
|
||||
"""Löscht Fingerprints älter als timestamp"""
|
||||
query = "DELETE FROM browser_fingerprints WHERE created_at < ?"
|
||||
return self._execute_delete(query, (timestamp,))
|
||||
|
||||
def get_random_fingerprints(self, count: int = 10) -> List[BrowserFingerprint]:
|
||||
"""Holt zufällige Fingerprints für Pool"""
|
||||
query = """
|
||||
SELECT * FROM browser_fingerprints
|
||||
ORDER BY RANDOM()
|
||||
LIMIT ?
|
||||
"""
|
||||
rows = self._execute_query(query, (count,))
|
||||
|
||||
return [self._row_to_fingerprint(row) for row in rows]
|
||||
|
||||
def link_to_account(self, fingerprint_id: str, account_id: str, primary: bool = True) -> None:
|
||||
"""Links a fingerprint to an account using simple 1:1 relationship"""
|
||||
query = """
|
||||
UPDATE accounts SET fingerprint_id = ? WHERE id = ?
|
||||
"""
|
||||
self._execute_update(query, (fingerprint_id, account_id))
|
||||
|
||||
def get_primary_fingerprint_for_account(self, account_id: str) -> Optional[str]:
|
||||
"""Gets the fingerprint ID for an account (1:1 relationship)"""
|
||||
query = """
|
||||
SELECT fingerprint_id FROM accounts
|
||||
WHERE id = ? AND fingerprint_id IS NOT NULL
|
||||
"""
|
||||
rows = self._execute_query(query, (account_id,))
|
||||
return dict(rows[0])['fingerprint_id'] if rows else None
|
||||
|
||||
def get_fingerprints_for_account(self, account_id: str) -> List[BrowserFingerprint]:
|
||||
"""Gets the fingerprint associated with an account (1:1 relationship)"""
|
||||
fingerprint_id = self.get_primary_fingerprint_for_account(account_id)
|
||||
if fingerprint_id:
|
||||
fingerprint = self.find_by_id(fingerprint_id)
|
||||
return [fingerprint] if fingerprint else []
|
||||
return []
|
||||
|
||||
def update_fingerprint_stats(self, fingerprint_id: str, account_id: str,
|
||||
success: bool) -> None:
|
||||
"""Updates fingerprint last used timestamp (simplified for 1:1)"""
|
||||
# Update the fingerprint's last used time
|
||||
query = """
|
||||
UPDATE browser_fingerprints
|
||||
SET last_rotated = datetime('now')
|
||||
WHERE id = ?
|
||||
"""
|
||||
self._execute_update(query, (fingerprint_id,))
|
||||
|
||||
# Also update account's last login
|
||||
query = """
|
||||
UPDATE accounts
|
||||
SET last_login = datetime('now')
|
||||
WHERE id = ? AND fingerprint_id = ?
|
||||
"""
|
||||
self._execute_update(query, (account_id, fingerprint_id))
|
||||
|
||||
def _row_to_fingerprint(self, row: sqlite3.Row) -> BrowserFingerprint:
|
||||
"""Konvertiert eine Datenbankzeile zu einem Fingerprint"""
|
||||
# Canvas Noise
|
||||
canvas_config = self._deserialize_json(row['canvas_noise_config'])
|
||||
canvas_noise = CanvasNoise(
|
||||
noise_level=canvas_config.get('noise_level', 0.02),
|
||||
seed=canvas_config.get('seed', 42),
|
||||
algorithm=canvas_config.get('algorithm', 'gaussian')
|
||||
)
|
||||
|
||||
# WebRTC Config
|
||||
webrtc_config_data = self._deserialize_json(row['webrtc_config'])
|
||||
webrtc_config = WebRTCConfig(
|
||||
enabled=webrtc_config_data.get('enabled', True),
|
||||
ice_servers=webrtc_config_data.get('ice_servers', []),
|
||||
local_ip_mask=webrtc_config_data.get('local_ip_mask', '10.0.0.x'),
|
||||
disable_webrtc=webrtc_config_data.get('disable_webrtc', False)
|
||||
)
|
||||
|
||||
# Hardware Config
|
||||
hw_config = self._deserialize_json(row['hardware_config'])
|
||||
hardware_config = HardwareConfig(
|
||||
hardware_concurrency=hw_config.get('hardware_concurrency', 4),
|
||||
device_memory=hw_config.get('device_memory', 8),
|
||||
max_touch_points=hw_config.get('max_touch_points', 0),
|
||||
screen_resolution=tuple(hw_config.get('screen_resolution', [1920, 1080])),
|
||||
color_depth=hw_config.get('color_depth', 24),
|
||||
pixel_ratio=hw_config.get('pixel_ratio', 1.0)
|
||||
)
|
||||
|
||||
# Navigator Properties
|
||||
nav_props = self._deserialize_json(row['navigator_props'])
|
||||
navigator_props = NavigatorProperties(
|
||||
platform=nav_props.get('platform', 'Win32'),
|
||||
vendor=nav_props.get('vendor', 'Google Inc.'),
|
||||
vendor_sub=nav_props.get('vendor_sub', ''),
|
||||
product=nav_props.get('product', 'Gecko'),
|
||||
product_sub=nav_props.get('product_sub', '20030107'),
|
||||
app_name=nav_props.get('app_name', 'Netscape'),
|
||||
app_version=nav_props.get('app_version', '5.0'),
|
||||
user_agent=nav_props.get('user_agent', ''),
|
||||
language=nav_props.get('language', 'de-DE'),
|
||||
languages=nav_props.get('languages', ['de-DE', 'de', 'en-US', 'en']),
|
||||
online=nav_props.get('online', True),
|
||||
do_not_track=nav_props.get('do_not_track', '1')
|
||||
)
|
||||
|
||||
# Audio Context
|
||||
audio_config = self._deserialize_json(row['audio_context_config']) or {}
|
||||
|
||||
# Static Components
|
||||
static_components = None
|
||||
if 'static_components' in row.keys() and row['static_components']:
|
||||
sc_data = self._deserialize_json(row['static_components'])
|
||||
if sc_data:
|
||||
static_components = StaticComponents(
|
||||
device_type=sc_data.get('device_type', 'desktop'),
|
||||
os_family=sc_data.get('os_family', 'windows'),
|
||||
browser_family=sc_data.get('browser_family', 'chromium'),
|
||||
gpu_vendor=sc_data.get('gpu_vendor', 'Intel Inc.'),
|
||||
gpu_model=sc_data.get('gpu_model', 'Intel Iris OpenGL Engine'),
|
||||
cpu_architecture=sc_data.get('cpu_architecture', 'x86_64'),
|
||||
base_fonts=sc_data.get('base_fonts', []),
|
||||
base_resolution=tuple(sc_data.get('base_resolution', [1920, 1080])),
|
||||
base_timezone=sc_data.get('base_timezone', 'Europe/Berlin')
|
||||
)
|
||||
|
||||
|
||||
return BrowserFingerprint(
|
||||
fingerprint_id=row['id'],
|
||||
canvas_noise=canvas_noise,
|
||||
webrtc_config=webrtc_config,
|
||||
font_list=self._deserialize_json(row['fonts']) or [],
|
||||
hardware_config=hardware_config,
|
||||
navigator_props=navigator_props,
|
||||
created_at=self._parse_datetime(row['created_at']),
|
||||
last_rotated=self._parse_datetime(row['last_rotated']),
|
||||
webgl_vendor=row['webgl_vendor'],
|
||||
webgl_renderer=row['webgl_renderer'],
|
||||
audio_context_base_latency=audio_config.get('base_latency', 0.0),
|
||||
audio_context_output_latency=audio_config.get('output_latency', 0.0),
|
||||
audio_context_sample_rate=audio_config.get('sample_rate', 48000),
|
||||
timezone=row['timezone'],
|
||||
timezone_offset=row['timezone_offset'],
|
||||
plugins=self._deserialize_json(row['plugins']) or [],
|
||||
static_components=static_components,
|
||||
rotation_seed=row['rotation_seed'] if 'rotation_seed' in row.keys() else None,
|
||||
account_bound=row['account_bound'] if 'account_bound' in row.keys() else False,
|
||||
platform_specific_config=self._deserialize_json(row['platform_specific'] if 'platform_specific' in row.keys() else '{}') or {}
|
||||
)
|
||||
282
infrastructure/repositories/method_strategy_repository.py
Normale Datei
282
infrastructure/repositories/method_strategy_repository.py
Normale Datei
@ -0,0 +1,282 @@
|
||||
"""
|
||||
SQLite implementation of method strategy repository.
|
||||
Handles persistence and retrieval of method strategies with performance optimization.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
from domain.entities.method_rotation import MethodStrategy, RiskLevel
|
||||
from domain.repositories.method_rotation_repository import IMethodStrategyRepository
|
||||
from database.db_manager import DatabaseManager
|
||||
|
||||
|
||||
class MethodStrategyRepository(IMethodStrategyRepository):
|
||||
"""SQLite implementation of method strategy repository"""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
self.db_manager = db_manager
|
||||
|
||||
def save(self, strategy: MethodStrategy) -> None:
|
||||
"""Save or update a method strategy"""
|
||||
strategy.updated_at = datetime.now()
|
||||
|
||||
query = """
|
||||
INSERT OR REPLACE INTO method_strategies (
|
||||
id, platform, method_name, priority, success_rate, failure_rate,
|
||||
last_success, last_failure, cooldown_period, max_daily_attempts,
|
||||
risk_level, is_active, configuration, tags, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
strategy.strategy_id,
|
||||
strategy.platform,
|
||||
strategy.method_name,
|
||||
strategy.priority,
|
||||
strategy.success_rate,
|
||||
strategy.failure_rate,
|
||||
strategy.last_success.isoformat() if strategy.last_success else None,
|
||||
strategy.last_failure.isoformat() if strategy.last_failure else None,
|
||||
strategy.cooldown_period,
|
||||
strategy.max_daily_attempts,
|
||||
strategy.risk_level.value,
|
||||
strategy.is_active,
|
||||
json.dumps(strategy.configuration),
|
||||
json.dumps(strategy.tags),
|
||||
strategy.created_at.isoformat(),
|
||||
strategy.updated_at.isoformat()
|
||||
)
|
||||
|
||||
self.db_manager.execute_query(query, params)
|
||||
|
||||
def find_by_id(self, strategy_id: str) -> Optional[MethodStrategy]:
|
||||
"""Find a strategy by its ID"""
|
||||
query = "SELECT * FROM method_strategies WHERE id = ?"
|
||||
result = self.db_manager.fetch_one(query, (strategy_id,))
|
||||
return self._row_to_strategy(result) if result else None
|
||||
|
||||
def find_by_platform(self, platform: str) -> List[MethodStrategy]:
|
||||
"""Find all strategies for a platform"""
|
||||
query = """
|
||||
SELECT * FROM method_strategies
|
||||
WHERE platform = ?
|
||||
ORDER BY priority DESC, success_rate DESC
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform,))
|
||||
return [self._row_to_strategy(row) for row in results]
|
||||
|
||||
def find_active_by_platform(self, platform: str) -> List[MethodStrategy]:
|
||||
"""Find all active strategies for a platform, ordered by effectiveness"""
|
||||
query = """
|
||||
SELECT * FROM method_strategies
|
||||
WHERE platform = ? AND is_active = 1
|
||||
ORDER BY priority DESC, success_rate DESC, last_success DESC
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform,))
|
||||
strategies = [self._row_to_strategy(row) for row in results]
|
||||
|
||||
# Sort by effectiveness score
|
||||
strategies.sort(key=lambda s: s.effectiveness_score, reverse=True)
|
||||
return strategies
|
||||
|
||||
def find_by_platform_and_method(self, platform: str, method_name: str) -> Optional[MethodStrategy]:
|
||||
"""Find a specific method strategy"""
|
||||
query = "SELECT * FROM method_strategies WHERE platform = ? AND method_name = ?"
|
||||
result = self.db_manager.fetch_one(query, (platform, method_name))
|
||||
return self._row_to_strategy(result) if result else None
|
||||
|
||||
def update_performance_metrics(self, strategy_id: str, success: bool,
|
||||
execution_time: float = 0.0) -> None:
|
||||
"""Update performance metrics for a strategy"""
|
||||
strategy = self.find_by_id(strategy_id)
|
||||
if not strategy:
|
||||
return
|
||||
|
||||
strategy.update_performance(success, execution_time)
|
||||
self.save(strategy)
|
||||
|
||||
def get_next_available_method(self, platform: str,
|
||||
excluded_methods: List[str] = None,
|
||||
max_risk_level: str = "HIGH") -> Optional[MethodStrategy]:
|
||||
"""Get the next best available method for a platform"""
|
||||
if excluded_methods is None:
|
||||
excluded_methods = []
|
||||
|
||||
# Build query with exclusions
|
||||
placeholders = ','.join(['?' for _ in excluded_methods])
|
||||
exclusion_clause = f"AND method_name NOT IN ({placeholders})" if excluded_methods else ""
|
||||
|
||||
# Build risk level clause
|
||||
risk_clause = "'LOW', 'MEDIUM'"
|
||||
if max_risk_level == 'HIGH':
|
||||
risk_clause += ", 'HIGH'"
|
||||
|
||||
query = f"""
|
||||
SELECT * FROM method_strategies
|
||||
WHERE platform = ?
|
||||
AND is_active = 1
|
||||
AND risk_level IN ({risk_clause})
|
||||
{exclusion_clause}
|
||||
ORDER BY priority DESC, success_rate DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
params = [platform] + excluded_methods
|
||||
result = self.db_manager.fetch_one(query, params)
|
||||
|
||||
if not result:
|
||||
return None
|
||||
|
||||
strategy = self._row_to_strategy(result)
|
||||
|
||||
# Check if method is on cooldown
|
||||
if strategy.is_on_cooldown:
|
||||
# Try to find another method
|
||||
excluded_methods.append(strategy.method_name)
|
||||
return self.get_next_available_method(platform, excluded_methods, max_risk_level)
|
||||
|
||||
return strategy
|
||||
|
||||
def disable_method(self, platform: str, method_name: str, reason: str) -> None:
|
||||
"""Disable a method temporarily or permanently"""
|
||||
query = """
|
||||
UPDATE method_strategies
|
||||
SET is_active = 0, updated_at = ?
|
||||
WHERE platform = ? AND method_name = ?
|
||||
"""
|
||||
self.db_manager.execute_query(query, (datetime.now().isoformat(), platform, method_name))
|
||||
|
||||
# Log the reason in configuration
|
||||
strategy = self.find_by_platform_and_method(platform, method_name)
|
||||
if strategy:
|
||||
strategy.configuration['disabled_reason'] = reason
|
||||
strategy.configuration['disabled_at'] = datetime.now().isoformat()
|
||||
self.save(strategy)
|
||||
|
||||
def enable_method(self, platform: str, method_name: str) -> None:
|
||||
"""Re-enable a disabled method"""
|
||||
query = """
|
||||
UPDATE method_strategies
|
||||
SET is_active = 1, updated_at = ?
|
||||
WHERE platform = ? AND method_name = ?
|
||||
"""
|
||||
self.db_manager.execute_query(query, (datetime.now().isoformat(), platform, method_name))
|
||||
|
||||
# Clear disabled reason from configuration
|
||||
strategy = self.find_by_platform_and_method(platform, method_name)
|
||||
if strategy:
|
||||
strategy.configuration.pop('disabled_reason', None)
|
||||
strategy.configuration.pop('disabled_at', None)
|
||||
self.save(strategy)
|
||||
|
||||
def get_platform_statistics(self, platform: str) -> Dict[str, Any]:
|
||||
"""Get aggregated statistics for all methods on a platform"""
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_methods,
|
||||
COUNT(CASE WHEN is_active = 1 THEN 1 END) as active_methods,
|
||||
AVG(success_rate) as avg_success_rate,
|
||||
MAX(success_rate) as best_success_rate,
|
||||
MIN(success_rate) as worst_success_rate,
|
||||
AVG(priority) as avg_priority,
|
||||
COUNT(CASE WHEN last_success > datetime('now', '-24 hours') THEN 1 END) as recent_successes,
|
||||
COUNT(CASE WHEN last_failure > datetime('now', '-24 hours') THEN 1 END) as recent_failures
|
||||
FROM method_strategies
|
||||
WHERE platform = ?
|
||||
"""
|
||||
|
||||
result = self.db_manager.fetch_one(query, (platform,))
|
||||
|
||||
if not result:
|
||||
return {}
|
||||
|
||||
return {
|
||||
'total_methods': result[0] or 0,
|
||||
'active_methods': result[1] or 0,
|
||||
'avg_success_rate': round(result[2] or 0.0, 3),
|
||||
'best_success_rate': result[3] or 0.0,
|
||||
'worst_success_rate': result[4] or 0.0,
|
||||
'avg_priority': round(result[5] or 0.0, 1),
|
||||
'recent_successes_24h': result[6] or 0,
|
||||
'recent_failures_24h': result[7] or 0
|
||||
}
|
||||
|
||||
def cleanup_old_data(self, days_to_keep: int = 90) -> int:
|
||||
"""Clean up old performance data and return number of records removed"""
|
||||
# This implementation doesn't remove strategies but resets old performance data
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
|
||||
query = """
|
||||
UPDATE method_strategies
|
||||
SET last_success = NULL, last_failure = NULL, success_rate = 0.0, failure_rate = 0.0
|
||||
WHERE (last_success < ? OR last_failure < ?)
|
||||
AND (last_success IS NOT NULL OR last_failure IS NOT NULL)
|
||||
"""
|
||||
|
||||
cursor = self.db_manager.execute_query(query, (cutoff_date.isoformat(), cutoff_date.isoformat()))
|
||||
return cursor.rowcount if cursor else 0
|
||||
|
||||
def get_methods_by_risk_level(self, platform: str, risk_level: RiskLevel) -> List[MethodStrategy]:
|
||||
"""Get methods filtered by risk level"""
|
||||
query = """
|
||||
SELECT * FROM method_strategies
|
||||
WHERE platform = ? AND risk_level = ? AND is_active = 1
|
||||
ORDER BY priority DESC, success_rate DESC
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform, risk_level.value))
|
||||
return [self._row_to_strategy(row) for row in results]
|
||||
|
||||
def get_emergency_methods(self, platform: str) -> List[MethodStrategy]:
|
||||
"""Get only the most reliable methods for emergency mode"""
|
||||
query = """
|
||||
SELECT * FROM method_strategies
|
||||
WHERE platform = ?
|
||||
AND is_active = 1
|
||||
AND risk_level = 'LOW'
|
||||
AND success_rate > 0.5
|
||||
ORDER BY success_rate DESC, priority DESC
|
||||
LIMIT 2
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform,))
|
||||
return [self._row_to_strategy(row) for row in results]
|
||||
|
||||
def bulk_update_priorities(self, platform: str, priority_updates: Dict[str, int]) -> None:
|
||||
"""Bulk update method priorities for a platform"""
|
||||
query = """
|
||||
UPDATE method_strategies
|
||||
SET priority = ?, updated_at = ?
|
||||
WHERE platform = ? AND method_name = ?
|
||||
"""
|
||||
|
||||
params_list = [
|
||||
(priority, datetime.now().isoformat(), platform, method_name)
|
||||
for method_name, priority in priority_updates.items()
|
||||
]
|
||||
|
||||
with self.db_manager.get_connection() as conn:
|
||||
conn.executemany(query, params_list)
|
||||
conn.commit()
|
||||
|
||||
def _row_to_strategy(self, row) -> MethodStrategy:
|
||||
"""Convert database row to MethodStrategy entity"""
|
||||
return MethodStrategy(
|
||||
strategy_id=row[0],
|
||||
platform=row[1],
|
||||
method_name=row[2],
|
||||
priority=row[3],
|
||||
success_rate=row[4],
|
||||
failure_rate=row[5],
|
||||
last_success=datetime.fromisoformat(row[6]) if row[6] else None,
|
||||
last_failure=datetime.fromisoformat(row[7]) if row[7] else None,
|
||||
cooldown_period=row[8],
|
||||
max_daily_attempts=row[9],
|
||||
risk_level=RiskLevel(row[10]),
|
||||
is_active=bool(row[11]),
|
||||
configuration=json.loads(row[12]) if row[12] else {},
|
||||
tags=json.loads(row[13]) if row[13] else [],
|
||||
created_at=datetime.fromisoformat(row[14]),
|
||||
updated_at=datetime.fromisoformat(row[15])
|
||||
)
|
||||
233
infrastructure/repositories/platform_method_state_repository.py
Normale Datei
233
infrastructure/repositories/platform_method_state_repository.py
Normale Datei
@ -0,0 +1,233 @@
|
||||
"""
|
||||
SQLite implementation of platform method state repository.
|
||||
Handles persistence and retrieval of platform-specific rotation states.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, date
|
||||
from typing import List, Optional
|
||||
|
||||
from domain.entities.method_rotation import PlatformMethodState, RotationStrategy
|
||||
from domain.repositories.method_rotation_repository import IPlatformMethodStateRepository
|
||||
from database.db_manager import DatabaseManager
|
||||
|
||||
|
||||
class PlatformMethodStateRepository(IPlatformMethodStateRepository):
|
||||
"""SQLite implementation of platform method state repository"""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
self.db_manager = db_manager
|
||||
|
||||
def save(self, state: PlatformMethodState) -> None:
|
||||
"""Save or update platform method state"""
|
||||
state.updated_at = datetime.now()
|
||||
|
||||
query = """
|
||||
INSERT OR REPLACE INTO platform_method_states (
|
||||
id, platform, last_successful_method, last_successful_at,
|
||||
preferred_methods, blocked_methods, daily_attempt_counts,
|
||||
reset_date, rotation_strategy, emergency_mode, metadata, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
state_id = f"state_{state.platform}"
|
||||
|
||||
params = (
|
||||
state_id,
|
||||
state.platform,
|
||||
state.last_successful_method,
|
||||
state.last_successful_at.isoformat() if state.last_successful_at else None,
|
||||
json.dumps(state.preferred_methods),
|
||||
json.dumps(state.blocked_methods),
|
||||
json.dumps(state.daily_attempt_counts),
|
||||
state.reset_date.isoformat(),
|
||||
state.rotation_strategy.value,
|
||||
state.emergency_mode,
|
||||
json.dumps(state.metadata),
|
||||
state.updated_at.isoformat()
|
||||
)
|
||||
|
||||
self.db_manager.execute_query(query, params)
|
||||
|
||||
def find_by_platform(self, platform: str) -> Optional[PlatformMethodState]:
|
||||
"""Find method state for a platform"""
|
||||
query = "SELECT * FROM platform_method_states WHERE platform = ?"
|
||||
result = self.db_manager.fetch_one(query, (platform,))
|
||||
return self._row_to_state(result) if result else None
|
||||
|
||||
def get_or_create_state(self, platform: str) -> PlatformMethodState:
|
||||
"""Get existing state or create new one with defaults"""
|
||||
state = self.find_by_platform(platform)
|
||||
if state:
|
||||
return state
|
||||
|
||||
# Create new state with defaults
|
||||
new_state = PlatformMethodState(
|
||||
platform=platform,
|
||||
preferred_methods=self._get_default_methods(platform),
|
||||
rotation_strategy=RotationStrategy.ADAPTIVE,
|
||||
reset_date=datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
)
|
||||
self.save(new_state)
|
||||
return new_state
|
||||
|
||||
def update_daily_attempts(self, platform: str, method_name: str) -> None:
|
||||
"""Increment daily attempt counter for a method"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.increment_daily_attempts(method_name)
|
||||
self.save(state)
|
||||
|
||||
def reset_daily_counters(self, platform: str) -> None:
|
||||
"""Reset daily attempt counters (typically called at midnight)"""
|
||||
state = self.find_by_platform(platform)
|
||||
if not state:
|
||||
return
|
||||
|
||||
state.daily_attempt_counts = {}
|
||||
state.reset_date = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
self.save(state)
|
||||
|
||||
def block_method(self, platform: str, method_name: str, reason: str) -> None:
|
||||
"""Block a method temporarily"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.block_method(method_name, reason)
|
||||
self.save(state)
|
||||
|
||||
def unblock_method(self, platform: str, method_name: str) -> None:
|
||||
"""Unblock a previously blocked method"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.unblock_method(method_name)
|
||||
self.save(state)
|
||||
|
||||
def record_method_success(self, platform: str, method_name: str) -> None:
|
||||
"""Record successful method execution"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.record_success(method_name)
|
||||
self.save(state)
|
||||
|
||||
def get_preferred_method_order(self, platform: str) -> List[str]:
|
||||
"""Get preferred method order for a platform"""
|
||||
state = self.find_by_platform(platform)
|
||||
if not state:
|
||||
return self._get_default_methods(platform)
|
||||
return state.preferred_methods
|
||||
|
||||
def set_emergency_mode(self, platform: str, enabled: bool) -> None:
|
||||
"""Enable/disable emergency mode for a platform"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.emergency_mode = enabled
|
||||
|
||||
if enabled:
|
||||
# In emergency mode, prefer only low-risk methods
|
||||
state.metadata['emergency_activated_at'] = datetime.now().isoformat()
|
||||
state.metadata['pre_emergency_preferred'] = state.preferred_methods.copy()
|
||||
# Filter to only include low-risk methods
|
||||
emergency_methods = [m for m in state.preferred_methods if m in ['email', 'standard_registration']]
|
||||
if emergency_methods:
|
||||
state.preferred_methods = emergency_methods
|
||||
else:
|
||||
# Restore previous preferred methods
|
||||
if 'pre_emergency_preferred' in state.metadata:
|
||||
state.preferred_methods = state.metadata.pop('pre_emergency_preferred')
|
||||
state.metadata.pop('emergency_activated_at', None)
|
||||
|
||||
self.save(state)
|
||||
|
||||
def get_daily_attempt_counts(self, platform: str) -> dict:
|
||||
"""Get current daily attempt counts for all methods"""
|
||||
state = self.find_by_platform(platform)
|
||||
if not state:
|
||||
return {}
|
||||
return state.daily_attempt_counts.copy()
|
||||
|
||||
def is_method_available(self, platform: str, method_name: str, max_daily_attempts: int) -> bool:
|
||||
"""Check if a method is available for use"""
|
||||
state = self.find_by_platform(platform)
|
||||
if not state:
|
||||
return True
|
||||
return state.is_method_available(method_name, max_daily_attempts)
|
||||
|
||||
def get_blocked_methods(self, platform: str) -> List[str]:
|
||||
"""Get list of currently blocked methods"""
|
||||
state = self.find_by_platform(platform)
|
||||
if not state:
|
||||
return []
|
||||
return state.blocked_methods.copy()
|
||||
|
||||
def update_rotation_strategy(self, platform: str, strategy: RotationStrategy) -> None:
|
||||
"""Update rotation strategy for a platform"""
|
||||
state = self.get_or_create_state(platform)
|
||||
state.rotation_strategy = strategy
|
||||
state.metadata['strategy_changed_at'] = datetime.now().isoformat()
|
||||
self.save(state)
|
||||
|
||||
def bulk_reset_daily_counters(self) -> int:
|
||||
"""Reset daily counters for all platforms (maintenance operation)"""
|
||||
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
query = """
|
||||
UPDATE platform_method_states
|
||||
SET daily_attempt_counts = '{}',
|
||||
reset_date = ?,
|
||||
updated_at = ?
|
||||
WHERE reset_date < ?
|
||||
"""
|
||||
|
||||
cursor = self.db_manager.execute_query(query, (
|
||||
today.isoformat(),
|
||||
datetime.now().isoformat(),
|
||||
today.isoformat()
|
||||
))
|
||||
return cursor.rowcount if cursor else 0
|
||||
|
||||
def get_all_platform_states(self) -> List[PlatformMethodState]:
|
||||
"""Get states for all platforms"""
|
||||
query = "SELECT * FROM platform_method_states ORDER BY platform"
|
||||
results = self.db_manager.fetch_all(query)
|
||||
return [self._row_to_state(row) for row in results]
|
||||
|
||||
def cleanup_emergency_modes(self, hours_threshold: int = 24) -> int:
|
||||
"""Automatically disable emergency modes that have been active too long"""
|
||||
cutoff_time = datetime.now() - datetime.timedelta(hours=hours_threshold)
|
||||
|
||||
query = """
|
||||
SELECT platform FROM platform_method_states
|
||||
WHERE emergency_mode = 1
|
||||
AND JSON_EXTRACT(metadata, '$.emergency_activated_at') < ?
|
||||
"""
|
||||
|
||||
results = self.db_manager.fetch_all(query, (cutoff_time.isoformat(),))
|
||||
count = 0
|
||||
|
||||
for row in results:
|
||||
platform = row[0]
|
||||
self.set_emergency_mode(platform, False)
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def _row_to_state(self, row) -> PlatformMethodState:
|
||||
"""Convert database row to PlatformMethodState entity"""
|
||||
return PlatformMethodState(
|
||||
platform=row[1],
|
||||
last_successful_method=row[2],
|
||||
last_successful_at=datetime.fromisoformat(row[3]) if row[3] else None,
|
||||
preferred_methods=json.loads(row[4]) if row[4] else [],
|
||||
blocked_methods=json.loads(row[5]) if row[5] else [],
|
||||
daily_attempt_counts=json.loads(row[6]) if row[6] else {},
|
||||
reset_date=datetime.fromisoformat(row[7]),
|
||||
rotation_strategy=RotationStrategy(row[8]),
|
||||
emergency_mode=bool(row[9]),
|
||||
metadata=json.loads(row[10]) if row[10] else {},
|
||||
updated_at=datetime.fromisoformat(row[11])
|
||||
)
|
||||
|
||||
def _get_default_methods(self, platform: str) -> List[str]:
|
||||
"""Get default method order for a platform"""
|
||||
default_methods = {
|
||||
'instagram': ['email', 'phone', 'social_login'],
|
||||
'tiktok': ['email', 'phone'],
|
||||
'x': ['email', 'phone'],
|
||||
'gmail': ['standard_registration', 'recovery_registration']
|
||||
}
|
||||
return default_methods.get(platform, ['email'])
|
||||
252
infrastructure/repositories/rate_limit_repository.py
Normale Datei
252
infrastructure/repositories/rate_limit_repository.py
Normale Datei
@ -0,0 +1,252 @@
|
||||
"""
|
||||
Rate Limit Repository - Persistierung von Rate Limit Events und Policies
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
from infrastructure.repositories.base_repository import BaseRepository
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
from domain.value_objects.action_timing import ActionTiming, ActionType
|
||||
|
||||
|
||||
class RateLimitRepository(BaseRepository):
|
||||
"""Repository für Rate Limit Daten"""
|
||||
|
||||
def save_timing(self, timing: ActionTiming) -> None:
|
||||
"""Speichert ein Action Timing Event"""
|
||||
query = """
|
||||
INSERT INTO rate_limit_events (
|
||||
timestamp, action_type, duration_ms, success, response_code,
|
||||
session_id, url, element_selector, error_message, retry_count, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
timing.timestamp,
|
||||
timing.action_type.value,
|
||||
int(timing.duration_ms),
|
||||
timing.success,
|
||||
timing.metadata.get('response_code') if timing.metadata else None,
|
||||
timing.metadata.get('session_id') if timing.metadata else None,
|
||||
timing.url,
|
||||
timing.element_selector,
|
||||
timing.error_message,
|
||||
timing.retry_count,
|
||||
self._serialize_json(timing.metadata) if timing.metadata else None
|
||||
)
|
||||
|
||||
self._execute_insert(query, params)
|
||||
|
||||
def get_recent_timings(self, action_type: Optional[ActionType] = None,
|
||||
hours: int = 1) -> List[ActionTiming]:
|
||||
"""Holt kürzliche Timing-Events"""
|
||||
query = """
|
||||
SELECT * FROM rate_limit_events
|
||||
WHERE timestamp > datetime('now', '-' || ? || ' hours')
|
||||
"""
|
||||
params = [hours]
|
||||
|
||||
if action_type:
|
||||
query += " AND action_type = ?"
|
||||
params.append(action_type.value)
|
||||
|
||||
query += " ORDER BY timestamp DESC"
|
||||
|
||||
rows = self._execute_query(query, tuple(params))
|
||||
|
||||
timings = []
|
||||
for row in rows:
|
||||
timing = ActionTiming(
|
||||
action_type=ActionType(row['action_type']),
|
||||
timestamp=self._parse_datetime(row['timestamp']),
|
||||
duration=row['duration_ms'] / 1000.0,
|
||||
success=bool(row['success']),
|
||||
url=row['url'],
|
||||
element_selector=row['element_selector'],
|
||||
error_message=row['error_message'],
|
||||
retry_count=row['retry_count'],
|
||||
metadata=self._deserialize_json(row['metadata'])
|
||||
)
|
||||
timings.append(timing)
|
||||
|
||||
return timings
|
||||
|
||||
def save_policy(self, action_type: ActionType, policy: RateLimitPolicy) -> None:
|
||||
"""Speichert oder aktualisiert eine Rate Limit Policy"""
|
||||
query = """
|
||||
INSERT OR REPLACE INTO rate_limit_policies (
|
||||
action_type, min_delay, max_delay, adaptive,
|
||||
backoff_multiplier, max_retries, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
action_type.value,
|
||||
policy.min_delay,
|
||||
policy.max_delay,
|
||||
policy.adaptive,
|
||||
policy.backoff_multiplier,
|
||||
policy.max_retries,
|
||||
datetime.now()
|
||||
)
|
||||
|
||||
self._execute_insert(query, params)
|
||||
|
||||
def get_policy(self, action_type: ActionType) -> Optional[RateLimitPolicy]:
|
||||
"""Holt eine Rate Limit Policy"""
|
||||
query = "SELECT * FROM rate_limit_policies WHERE action_type = ?"
|
||||
rows = self._execute_query(query, (action_type.value,))
|
||||
|
||||
if not rows:
|
||||
return None
|
||||
|
||||
row = rows[0]
|
||||
return RateLimitPolicy(
|
||||
min_delay=row['min_delay'],
|
||||
max_delay=row['max_delay'],
|
||||
adaptive=bool(row['adaptive']),
|
||||
backoff_multiplier=row['backoff_multiplier'],
|
||||
max_retries=row['max_retries']
|
||||
)
|
||||
|
||||
def get_all_policies(self) -> Dict[ActionType, RateLimitPolicy]:
|
||||
"""Holt alle gespeicherten Policies"""
|
||||
query = "SELECT * FROM rate_limit_policies"
|
||||
rows = self._execute_query(query)
|
||||
|
||||
policies = {}
|
||||
for row in rows:
|
||||
try:
|
||||
action_type = ActionType(row['action_type'])
|
||||
policy = RateLimitPolicy(
|
||||
min_delay=row['min_delay'],
|
||||
max_delay=row['max_delay'],
|
||||
adaptive=bool(row['adaptive']),
|
||||
backoff_multiplier=row['backoff_multiplier'],
|
||||
max_retries=row['max_retries']
|
||||
)
|
||||
policies[action_type] = policy
|
||||
except ValueError:
|
||||
# Unbekannter ActionType
|
||||
pass
|
||||
|
||||
return policies
|
||||
|
||||
def get_statistics(self, action_type: Optional[ActionType] = None,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""Berechnet Statistiken über Rate Limiting"""
|
||||
query = """
|
||||
SELECT
|
||||
action_type,
|
||||
COUNT(*) as total_actions,
|
||||
AVG(duration_ms) as avg_duration_ms,
|
||||
MIN(duration_ms) as min_duration_ms,
|
||||
MAX(duration_ms) as max_duration_ms,
|
||||
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as successful_actions,
|
||||
SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as failed_actions,
|
||||
AVG(retry_count) as avg_retry_count,
|
||||
MAX(retry_count) as max_retry_count
|
||||
FROM rate_limit_events
|
||||
WHERE 1=1
|
||||
"""
|
||||
params = []
|
||||
|
||||
if timeframe:
|
||||
query += " AND timestamp > datetime('now', '-' || ? || ' seconds')"
|
||||
params.append(int(timeframe.total_seconds()))
|
||||
|
||||
if action_type:
|
||||
query += " AND action_type = ?"
|
||||
params.append(action_type.value)
|
||||
query += " GROUP BY action_type"
|
||||
else:
|
||||
query += " GROUP BY action_type"
|
||||
|
||||
rows = self._execute_query(query, tuple(params))
|
||||
|
||||
if action_type and rows:
|
||||
# Einzelne Action Type Statistik
|
||||
row = rows[0]
|
||||
return {
|
||||
'action_type': row['action_type'],
|
||||
'total_actions': row['total_actions'],
|
||||
'avg_duration_ms': row['avg_duration_ms'],
|
||||
'min_duration_ms': row['min_duration_ms'],
|
||||
'max_duration_ms': row['max_duration_ms'],
|
||||
'success_rate': row['successful_actions'] / row['total_actions'] if row['total_actions'] > 0 else 0,
|
||||
'failed_actions': row['failed_actions'],
|
||||
'avg_retry_count': row['avg_retry_count'],
|
||||
'max_retry_count': row['max_retry_count']
|
||||
}
|
||||
else:
|
||||
# Statistiken für alle Action Types
|
||||
stats = {}
|
||||
for row in rows:
|
||||
stats[row['action_type']] = {
|
||||
'total_actions': row['total_actions'],
|
||||
'avg_duration_ms': row['avg_duration_ms'],
|
||||
'min_duration_ms': row['min_duration_ms'],
|
||||
'max_duration_ms': row['max_duration_ms'],
|
||||
'success_rate': row['successful_actions'] / row['total_actions'] if row['total_actions'] > 0 else 0,
|
||||
'failed_actions': row['failed_actions'],
|
||||
'avg_retry_count': row['avg_retry_count'],
|
||||
'max_retry_count': row['max_retry_count']
|
||||
}
|
||||
return stats
|
||||
|
||||
def detect_anomalies(self, action_type: ActionType,
|
||||
threshold_multiplier: float = 2.0) -> List[Dict[str, Any]]:
|
||||
"""Erkennt Anomalien in den Timing-Daten"""
|
||||
# Berechne Durchschnitt und Standardabweichung
|
||||
query = """
|
||||
SELECT
|
||||
AVG(duration_ms) as avg_duration,
|
||||
AVG(duration_ms * duration_ms) - AVG(duration_ms) * AVG(duration_ms) as variance
|
||||
FROM rate_limit_events
|
||||
WHERE action_type = ?
|
||||
AND timestamp > datetime('now', '-1 hour')
|
||||
AND success = 1
|
||||
"""
|
||||
|
||||
row = self._execute_query(query, (action_type.value,))[0]
|
||||
|
||||
if not row['avg_duration']:
|
||||
return []
|
||||
|
||||
avg_duration = row['avg_duration']
|
||||
std_dev = (row['variance'] ** 0.5) if row['variance'] > 0 else 0
|
||||
threshold = avg_duration + (std_dev * threshold_multiplier)
|
||||
|
||||
# Finde Anomalien
|
||||
query = """
|
||||
SELECT * FROM rate_limit_events
|
||||
WHERE action_type = ?
|
||||
AND timestamp > datetime('now', '-1 hour')
|
||||
AND duration_ms > ?
|
||||
ORDER BY duration_ms DESC
|
||||
LIMIT 10
|
||||
"""
|
||||
|
||||
rows = self._execute_query(query, (action_type.value, threshold))
|
||||
|
||||
anomalies = []
|
||||
for row in rows:
|
||||
anomalies.append({
|
||||
'timestamp': row['timestamp'],
|
||||
'duration_ms': row['duration_ms'],
|
||||
'deviation': (row['duration_ms'] - avg_duration) / std_dev if std_dev > 0 else 0,
|
||||
'success': bool(row['success']),
|
||||
'url': row['url'],
|
||||
'error_message': row['error_message']
|
||||
})
|
||||
|
||||
return anomalies
|
||||
|
||||
def cleanup_old_events(self, older_than: datetime) -> int:
|
||||
"""Bereinigt alte Rate Limit Events"""
|
||||
query = "DELETE FROM rate_limit_events WHERE timestamp < ?"
|
||||
return self._execute_delete(query, (older_than,))
|
||||
254
infrastructure/repositories/rotation_session_repository.py
Normale Datei
254
infrastructure/repositories/rotation_session_repository.py
Normale Datei
@ -0,0 +1,254 @@
|
||||
"""
|
||||
SQLite implementation of rotation session repository.
|
||||
Handles persistence and retrieval of rotation sessions.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
from domain.entities.method_rotation import RotationSession
|
||||
from domain.repositories.method_rotation_repository import IRotationSessionRepository
|
||||
from database.db_manager import DatabaseManager
|
||||
|
||||
|
||||
class RotationSessionRepository(IRotationSessionRepository):
|
||||
"""SQLite implementation of rotation session repository"""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
self.db_manager = db_manager
|
||||
|
||||
def save(self, session: RotationSession) -> None:
|
||||
"""Save or update a rotation session"""
|
||||
query = """
|
||||
INSERT OR REPLACE INTO rotation_sessions (
|
||||
id, platform, account_id, current_method, attempted_methods,
|
||||
session_start, last_rotation, rotation_count, success_count,
|
||||
failure_count, is_active, rotation_reason, fingerprint_id, session_metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
session.session_id,
|
||||
session.platform,
|
||||
session.account_id,
|
||||
session.current_method,
|
||||
json.dumps(session.attempted_methods),
|
||||
session.session_start.isoformat(),
|
||||
session.last_rotation.isoformat() if session.last_rotation else None,
|
||||
session.rotation_count,
|
||||
session.success_count,
|
||||
session.failure_count,
|
||||
session.is_active,
|
||||
session.rotation_reason,
|
||||
session.fingerprint_id,
|
||||
json.dumps(session.session_metadata)
|
||||
)
|
||||
|
||||
self.db_manager.execute_query(query, params)
|
||||
|
||||
def find_by_id(self, session_id: str) -> Optional[RotationSession]:
|
||||
"""Find a session by its ID"""
|
||||
query = "SELECT * FROM rotation_sessions WHERE id = ?"
|
||||
result = self.db_manager.fetch_one(query, (session_id,))
|
||||
return self._row_to_session(result) if result else None
|
||||
|
||||
def find_active_session(self, platform: str, account_id: Optional[str] = None) -> Optional[RotationSession]:
|
||||
"""Find an active session for a platform/account"""
|
||||
if account_id:
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE platform = ? AND account_id = ? AND is_active = 1
|
||||
ORDER BY session_start DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
params = (platform, account_id)
|
||||
else:
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE platform = ? AND is_active = 1
|
||||
ORDER BY session_start DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
params = (platform,)
|
||||
|
||||
result = self.db_manager.fetch_one(query, params)
|
||||
return self._row_to_session(result) if result else None
|
||||
|
||||
def find_active_sessions_by_platform(self, platform: str) -> List[RotationSession]:
|
||||
"""Find all active sessions for a platform"""
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE platform = ? AND is_active = 1
|
||||
ORDER BY session_start DESC
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform,))
|
||||
return [self._row_to_session(row) for row in results]
|
||||
|
||||
def update_session_metrics(self, session_id: str, success: bool,
|
||||
method_name: str, error_message: Optional[str] = None) -> None:
|
||||
"""Update session metrics after a method attempt"""
|
||||
session = self.find_by_id(session_id)
|
||||
if not session:
|
||||
return
|
||||
|
||||
session.add_attempt(method_name, success, error_message)
|
||||
self.save(session)
|
||||
|
||||
def archive_session(self, session_id: str, final_success: bool = False) -> None:
|
||||
"""Mark a session as completed/archived"""
|
||||
session = self.find_by_id(session_id)
|
||||
if not session:
|
||||
return
|
||||
|
||||
session.complete_session(final_success)
|
||||
self.save(session)
|
||||
|
||||
def get_session_history(self, platform: str, limit: int = 100) -> List[RotationSession]:
|
||||
"""Get recent session history for a platform"""
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE platform = ?
|
||||
ORDER BY session_start DESC
|
||||
LIMIT ?
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (platform, limit))
|
||||
return [self._row_to_session(row) for row in results]
|
||||
|
||||
def get_session_statistics(self, platform: str, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get session statistics for a platform over specified days"""
|
||||
cutoff_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_sessions,
|
||||
COUNT(CASE WHEN is_active = 1 THEN 1 END) as active_sessions,
|
||||
COUNT(CASE WHEN is_active = 0 AND JSON_EXTRACT(session_metadata, '$.final_success') = 1 THEN 1 END) as successful_sessions,
|
||||
COUNT(CASE WHEN is_active = 0 AND JSON_EXTRACT(session_metadata, '$.final_success') = 0 THEN 1 END) as failed_sessions,
|
||||
AVG(rotation_count) as avg_rotations,
|
||||
MAX(rotation_count) as max_rotations,
|
||||
AVG(success_count + failure_count) as avg_attempts,
|
||||
AVG(CASE WHEN success_count + failure_count > 0 THEN success_count * 1.0 / (success_count + failure_count) ELSE 0 END) as avg_success_rate
|
||||
FROM rotation_sessions
|
||||
WHERE platform = ? AND session_start >= ?
|
||||
"""
|
||||
|
||||
result = self.db_manager.fetch_one(query, (platform, cutoff_date.isoformat()))
|
||||
|
||||
if not result:
|
||||
return {}
|
||||
|
||||
return {
|
||||
'total_sessions': result[0] or 0,
|
||||
'active_sessions': result[1] or 0,
|
||||
'successful_sessions': result[2] or 0,
|
||||
'failed_sessions': result[3] or 0,
|
||||
'avg_rotations_per_session': round(result[4] or 0.0, 2),
|
||||
'max_rotations_in_session': result[5] or 0,
|
||||
'avg_attempts_per_session': round(result[6] or 0.0, 2),
|
||||
'avg_session_success_rate': round(result[7] or 0.0, 3)
|
||||
}
|
||||
|
||||
def cleanup_old_sessions(self, days_to_keep: int = 30) -> int:
|
||||
"""Clean up old session data and return number of records removed"""
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
|
||||
query = """
|
||||
DELETE FROM rotation_sessions
|
||||
WHERE is_active = 0 AND session_start < ?
|
||||
"""
|
||||
|
||||
cursor = self.db_manager.execute_query(query, (cutoff_date.isoformat(),))
|
||||
return cursor.rowcount if cursor else 0
|
||||
|
||||
def get_method_usage_statistics(self, platform: str, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get method usage statistics from sessions"""
|
||||
cutoff_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
current_method,
|
||||
COUNT(*) as usage_count,
|
||||
AVG(success_count) as avg_success_count,
|
||||
AVG(failure_count) as avg_failure_count,
|
||||
AVG(rotation_count) as avg_rotation_count
|
||||
FROM rotation_sessions
|
||||
WHERE platform = ? AND session_start >= ?
|
||||
GROUP BY current_method
|
||||
ORDER BY usage_count DESC
|
||||
"""
|
||||
|
||||
results = self.db_manager.fetch_all(query, (platform, cutoff_date.isoformat()))
|
||||
|
||||
method_stats = {}
|
||||
for row in results:
|
||||
method_stats[row[0]] = {
|
||||
'usage_count': row[1],
|
||||
'avg_success_count': round(row[2] or 0.0, 2),
|
||||
'avg_failure_count': round(row[3] or 0.0, 2),
|
||||
'avg_rotation_count': round(row[4] or 0.0, 2)
|
||||
}
|
||||
|
||||
return method_stats
|
||||
|
||||
def find_sessions_by_fingerprint(self, fingerprint_id: str) -> List[RotationSession]:
|
||||
"""Find sessions associated with a specific fingerprint"""
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE fingerprint_id = ?
|
||||
ORDER BY session_start DESC
|
||||
"""
|
||||
results = self.db_manager.fetch_all(query, (fingerprint_id,))
|
||||
return [self._row_to_session(row) for row in results]
|
||||
|
||||
def get_long_running_sessions(self, hours: int = 24) -> List[RotationSession]:
|
||||
"""Find sessions that have been running for too long"""
|
||||
cutoff_time = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
query = """
|
||||
SELECT * FROM rotation_sessions
|
||||
WHERE is_active = 1 AND session_start < ?
|
||||
ORDER BY session_start ASC
|
||||
"""
|
||||
|
||||
results = self.db_manager.fetch_all(query, (cutoff_time.isoformat(),))
|
||||
return [self._row_to_session(row) for row in results]
|
||||
|
||||
def force_archive_stale_sessions(self, hours: int = 24) -> int:
|
||||
"""Force archive sessions that have been running too long"""
|
||||
cutoff_time = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
query = """
|
||||
UPDATE rotation_sessions
|
||||
SET is_active = 0,
|
||||
session_metadata = JSON_SET(
|
||||
session_metadata,
|
||||
'$.completed_at', ?,
|
||||
'$.final_success', 0,
|
||||
'$.force_archived', 1
|
||||
)
|
||||
WHERE is_active = 1 AND session_start < ?
|
||||
"""
|
||||
|
||||
cursor = self.db_manager.execute_query(query, (datetime.now().isoformat(), cutoff_time.isoformat()))
|
||||
return cursor.rowcount if cursor else 0
|
||||
|
||||
def _row_to_session(self, row) -> RotationSession:
|
||||
"""Convert database row to RotationSession entity"""
|
||||
return RotationSession(
|
||||
session_id=row[0],
|
||||
platform=row[1],
|
||||
account_id=row[2],
|
||||
current_method=row[3],
|
||||
attempted_methods=json.loads(row[4]) if row[4] else [],
|
||||
session_start=datetime.fromisoformat(row[5]),
|
||||
last_rotation=datetime.fromisoformat(row[6]) if row[6] else None,
|
||||
rotation_count=row[7],
|
||||
success_count=row[8],
|
||||
failure_count=row[9],
|
||||
is_active=bool(row[10]),
|
||||
rotation_reason=row[11],
|
||||
fingerprint_id=row[12],
|
||||
session_metadata=json.loads(row[13]) if row[13] else {}
|
||||
)
|
||||
11
infrastructure/services/__init__.py
Normale Datei
11
infrastructure/services/__init__.py
Normale Datei
@ -0,0 +1,11 @@
|
||||
"""
|
||||
Infrastructure Services - Konkrete Implementierungen der Domain Services
|
||||
"""
|
||||
|
||||
from .instagram_rate_limit_service import InstagramRateLimitService
|
||||
from .advanced_fingerprint_service import AdvancedFingerprintService
|
||||
|
||||
__all__ = [
|
||||
'InstagramRateLimitService',
|
||||
'AdvancedFingerprintService'
|
||||
]
|
||||
868
infrastructure/services/advanced_fingerprint_service.py
Normale Datei
868
infrastructure/services/advanced_fingerprint_service.py
Normale Datei
@ -0,0 +1,868 @@
|
||||
"""
|
||||
Advanced Fingerprint Service - Erweiterte Browser Fingerprinting Implementation
|
||||
"""
|
||||
|
||||
import random
|
||||
import json
|
||||
import logging
|
||||
import hashlib
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
from domain.services.fingerprint_service import IFingerprintService
|
||||
from domain.entities.browser_fingerprint import (
|
||||
BrowserFingerprint, CanvasNoise, WebRTCConfig,
|
||||
HardwareConfig, NavigatorProperties, StaticComponents
|
||||
)
|
||||
from infrastructure.repositories.fingerprint_repository import FingerprintRepository
|
||||
|
||||
logger = logging.getLogger("advanced_fingerprint_service")
|
||||
|
||||
|
||||
class FingerprintProfiles:
|
||||
"""Vordefinierte realistische Fingerprint-Profile"""
|
||||
|
||||
DESKTOP_PROFILES = [
|
||||
{
|
||||
"name": "Windows Chrome User",
|
||||
"platform": "Win32",
|
||||
"hardware_concurrency": [4, 8, 16],
|
||||
"device_memory": [4, 8, 16],
|
||||
"screen_resolution": [(1920, 1080), (2560, 1440), (1366, 768)],
|
||||
"vendor": "Google Inc.",
|
||||
"renderer": ["ANGLE (Intel HD Graphics)", "ANGLE (NVIDIA GeForce GTX)", "ANGLE (AMD Radeon)"]
|
||||
},
|
||||
{
|
||||
"name": "MacOS Safari User",
|
||||
"platform": "MacIntel",
|
||||
"hardware_concurrency": [4, 8, 12],
|
||||
"device_memory": [8, 16, 32],
|
||||
"screen_resolution": [(1440, 900), (2560, 1600), (5120, 2880)],
|
||||
"vendor": "Apple Inc.",
|
||||
"renderer": ["Apple M1", "Intel Iris", "AMD Radeon Pro"]
|
||||
}
|
||||
]
|
||||
|
||||
MOBILE_PROFILES = [
|
||||
{
|
||||
"name": "Android Chrome",
|
||||
"platform": "Linux armv8l",
|
||||
"hardware_concurrency": [4, 6, 8],
|
||||
"device_memory": [3, 4, 6, 8],
|
||||
"screen_resolution": [(360, 740), (375, 812), (414, 896)],
|
||||
"vendor": "Google Inc.",
|
||||
"renderer": ["Adreno", "Mali", "PowerVR"]
|
||||
},
|
||||
{
|
||||
"name": "iOS Safari",
|
||||
"platform": "iPhone",
|
||||
"hardware_concurrency": [2, 4, 6],
|
||||
"device_memory": [2, 3, 4],
|
||||
"screen_resolution": [(375, 667), (375, 812), (414, 896)],
|
||||
"vendor": "Apple Inc.",
|
||||
"renderer": ["Apple GPU"]
|
||||
}
|
||||
]
|
||||
|
||||
COMMON_FONTS = {
|
||||
"windows": [
|
||||
"Arial", "Arial Black", "Comic Sans MS", "Courier New",
|
||||
"Georgia", "Impact", "Times New Roman", "Trebuchet MS",
|
||||
"Verdana", "Webdings", "Wingdings", "Calibri", "Cambria",
|
||||
"Consolas", "Segoe UI", "Tahoma"
|
||||
],
|
||||
"mac": [
|
||||
"Arial", "Arial Black", "Comic Sans MS", "Courier New",
|
||||
"Georgia", "Helvetica", "Helvetica Neue", "Times New Roman",
|
||||
"Trebuchet MS", "Verdana", "American Typewriter", "Avenir",
|
||||
"Baskerville", "Big Caslon", "Futura", "Geneva", "Gill Sans"
|
||||
],
|
||||
"linux": [
|
||||
"Arial", "Courier New", "Times New Roman", "DejaVu Sans",
|
||||
"DejaVu Serif", "DejaVu Sans Mono", "Liberation Sans",
|
||||
"Liberation Serif", "Ubuntu", "Droid Sans", "Noto Sans"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class AdvancedFingerprintService(IFingerprintService):
|
||||
"""Erweiterte Fingerprint-Service Implementation"""
|
||||
|
||||
def __init__(self, repository: FingerprintRepository = None):
|
||||
self.repository = repository or FingerprintRepository()
|
||||
self.profiles = FingerprintProfiles()
|
||||
self.fingerprint_cache = {}
|
||||
|
||||
def generate_fingerprint(self, profile_type: Optional[str] = None,
|
||||
platform: Optional[str] = None,
|
||||
proxy_location: Optional[str] = None,
|
||||
account_id: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""Generiert einen realistischen Fingerprint"""
|
||||
# Wähle Profil-Typ
|
||||
if profile_type == "mobile":
|
||||
profile = random.choice(self.profiles.MOBILE_PROFILES)
|
||||
else:
|
||||
profile = random.choice(self.profiles.DESKTOP_PROFILES)
|
||||
|
||||
# Canvas Noise Configuration
|
||||
canvas_noise = CanvasNoise(
|
||||
noise_level=random.uniform(0.01, 0.05),
|
||||
seed=random.randint(1000, 9999),
|
||||
algorithm=random.choice(["gaussian", "uniform", "perlin"])
|
||||
)
|
||||
|
||||
# WebRTC Configuration
|
||||
webrtc_config = WebRTCConfig(
|
||||
enabled=random.choice([True, False]),
|
||||
ice_servers=["stun:stun.l.google.com:19302"] if random.random() > 0.5 else [],
|
||||
local_ip_mask=f"10.0.{random.randint(0, 255)}.x",
|
||||
disable_webrtc=random.random() < 0.3 # 30% haben WebRTC deaktiviert
|
||||
)
|
||||
|
||||
# Hardware Configuration
|
||||
hardware_config = HardwareConfig(
|
||||
hardware_concurrency=random.choice(profile["hardware_concurrency"]),
|
||||
device_memory=random.choice(profile["device_memory"]),
|
||||
max_touch_points=10 if "mobile" in profile["name"].lower() else 0,
|
||||
screen_resolution=random.choice(profile["screen_resolution"]),
|
||||
color_depth=random.choice([24, 32]),
|
||||
pixel_ratio=random.choice([1.0, 1.5, 2.0, 3.0])
|
||||
)
|
||||
|
||||
# Navigator Properties
|
||||
languages = self._generate_language_list()
|
||||
navigator_props = NavigatorProperties(
|
||||
platform=profile["platform"],
|
||||
vendor=profile["vendor"],
|
||||
language=languages[0],
|
||||
languages=languages,
|
||||
do_not_track=random.choice(["1", "unspecified", None])
|
||||
)
|
||||
|
||||
# User Agent generieren
|
||||
navigator_props.user_agent = self._generate_user_agent(profile, navigator_props)
|
||||
|
||||
# Font List
|
||||
font_list = self._generate_font_list(profile["platform"])
|
||||
|
||||
# WebGL
|
||||
webgl_vendor = profile["vendor"]
|
||||
webgl_renderer = random.choice(profile["renderer"])
|
||||
|
||||
# Audio Context
|
||||
audio_base_latency = random.uniform(0.00, 0.02)
|
||||
audio_output_latency = random.uniform(0.00, 0.05)
|
||||
audio_sample_rate = random.choice([44100, 48000])
|
||||
|
||||
# Timezone - konsistent mit Proxy-Location
|
||||
timezone, offset = self._get_timezone_for_location(proxy_location)
|
||||
|
||||
# Plugins (nur für Desktop)
|
||||
plugins = []
|
||||
if "mobile" not in profile["name"].lower():
|
||||
plugins = self._generate_plugins()
|
||||
|
||||
# Generate rotation seed for account-bound fingerprints
|
||||
rotation_seed = None
|
||||
if account_id:
|
||||
rotation_seed = hashlib.sha256(f"{account_id}:{datetime.now().strftime('%Y%m')}".encode()).hexdigest()[:16]
|
||||
|
||||
# Create static components for persistence
|
||||
static_components = StaticComponents(
|
||||
device_type="mobile" if "mobile" in profile["name"].lower() else "desktop",
|
||||
os_family=self._get_os_family(profile["platform"]),
|
||||
browser_family="chromium" if "Chrome" in navigator_props.user_agent else "safari",
|
||||
gpu_vendor=webgl_vendor,
|
||||
gpu_model=webgl_renderer,
|
||||
cpu_architecture="arm64" if "arm" in profile["platform"].lower() else "x86_64",
|
||||
base_fonts=font_list[:10], # Store base fonts
|
||||
base_resolution=hardware_config.screen_resolution,
|
||||
base_timezone=timezone
|
||||
)
|
||||
|
||||
fingerprint = BrowserFingerprint(
|
||||
fingerprint_id=str(uuid.uuid4()),
|
||||
canvas_noise=canvas_noise,
|
||||
webrtc_config=webrtc_config,
|
||||
font_list=font_list,
|
||||
hardware_config=hardware_config,
|
||||
navigator_props=navigator_props,
|
||||
webgl_vendor=webgl_vendor,
|
||||
webgl_renderer=webgl_renderer,
|
||||
audio_context_base_latency=audio_base_latency,
|
||||
audio_context_output_latency=audio_output_latency,
|
||||
audio_context_sample_rate=audio_sample_rate,
|
||||
timezone=timezone,
|
||||
timezone_offset=offset,
|
||||
plugins=plugins,
|
||||
created_at=datetime.now(),
|
||||
# New persistence fields
|
||||
static_components=static_components if account_id else None,
|
||||
rotation_seed=rotation_seed,
|
||||
account_bound=bool(account_id)
|
||||
)
|
||||
|
||||
# Speichere in Repository
|
||||
self.repository.save(fingerprint)
|
||||
|
||||
# Cache für schnellen Zugriff
|
||||
self.fingerprint_cache[fingerprint.fingerprint_id] = fingerprint
|
||||
|
||||
logger.info(f"Generated new fingerprint: {fingerprint.fingerprint_id}")
|
||||
return fingerprint
|
||||
|
||||
def _get_os_family(self, platform: str) -> str:
|
||||
"""Determine OS family from platform string"""
|
||||
if "Win" in platform:
|
||||
return "windows"
|
||||
elif "Mac" in platform or "iPhone" in platform:
|
||||
return "macos" if "Mac" in platform else "ios"
|
||||
elif "Android" in platform or "Linux" in platform:
|
||||
return "android" if "Android" in platform else "linux"
|
||||
return "unknown"
|
||||
|
||||
def _generate_language_list(self) -> List[str]:
|
||||
"""Generiert realistische Sprachliste"""
|
||||
language_sets = [
|
||||
["de-DE", "de", "en-US", "en"],
|
||||
["en-US", "en"],
|
||||
["en-GB", "en-US", "en"],
|
||||
["fr-FR", "fr", "en-US", "en"],
|
||||
["es-ES", "es", "en-US", "en"],
|
||||
["de-DE", "de", "en-GB", "en"]
|
||||
]
|
||||
return random.choice(language_sets)
|
||||
|
||||
def _generate_user_agent(self, profile: Dict, nav_props: NavigatorProperties) -> str:
|
||||
"""Generiert realistischen User Agent"""
|
||||
chrome_version = random.randint(96, 120)
|
||||
|
||||
if "Windows" in profile["name"]:
|
||||
return f"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version}.0.0.0 Safari/537.36"
|
||||
elif "Mac" in profile["name"]:
|
||||
return f"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version}.0.0.0 Safari/537.36"
|
||||
elif "Android" in profile["name"]:
|
||||
android_version = random.randint(10, 13)
|
||||
return f"Mozilla/5.0 (Linux; Android {android_version}; SM-G991B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version}.0.0.0 Mobile Safari/537.36"
|
||||
elif "iOS" in profile["name"]:
|
||||
ios_version = random.randint(14, 16)
|
||||
return f"Mozilla/5.0 (iPhone; CPU iPhone OS {ios_version}_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{ios_version}.0 Mobile/15E148 Safari/604.1"
|
||||
|
||||
return f"Mozilla/5.0 (compatible; Unknown)"
|
||||
|
||||
def _generate_font_list(self, platform: str) -> List[str]:
|
||||
"""Generiert plattform-spezifische Fontliste"""
|
||||
if "Win" in platform:
|
||||
fonts = self.profiles.COMMON_FONTS["windows"]
|
||||
elif "Mac" in platform or "iPhone" in platform:
|
||||
fonts = self.profiles.COMMON_FONTS["mac"]
|
||||
else:
|
||||
fonts = self.profiles.COMMON_FONTS["linux"]
|
||||
|
||||
# Zufällige Auswahl (60-90% der Fonts)
|
||||
num_fonts = random.randint(int(len(fonts) * 0.6), int(len(fonts) * 0.9))
|
||||
return random.sample(fonts, num_fonts)
|
||||
|
||||
def _generate_plugins(self) -> List[Dict[str, str]]:
|
||||
"""Generiert Plugin-Liste für Desktop"""
|
||||
all_plugins = [
|
||||
{"name": "Chrome PDF Plugin", "filename": "internal-pdf-viewer"},
|
||||
{"name": "Chrome PDF Viewer", "filename": "mhjfbmdgcfjbbpaeojofohoefgiehjai"},
|
||||
{"name": "Native Client", "filename": "internal-nacl-plugin"},
|
||||
{"name": "Shockwave Flash", "filename": "pepperflashplugin.dll"}
|
||||
]
|
||||
|
||||
# 0-3 Plugins
|
||||
num_plugins = random.randint(0, 3)
|
||||
return random.sample(all_plugins, num_plugins)
|
||||
|
||||
def rotate_fingerprint(self, current: BrowserFingerprint,
|
||||
rotation_strategy: str = "gradual") -> BrowserFingerprint:
|
||||
"""Rotiert einen Fingerprint"""
|
||||
if rotation_strategy == "complete":
|
||||
# Komplett neuer Fingerprint
|
||||
return self.generate_fingerprint()
|
||||
|
||||
elif rotation_strategy == "minimal":
|
||||
# Nur kleine Änderungen
|
||||
new_fingerprint = BrowserFingerprint(
|
||||
fingerprint_id=str(uuid.uuid4()),
|
||||
canvas_noise=CanvasNoise(
|
||||
noise_level=current.canvas_noise.noise_level + random.uniform(-0.01, 0.01),
|
||||
seed=random.randint(1000, 9999),
|
||||
algorithm=current.canvas_noise.algorithm
|
||||
),
|
||||
webrtc_config=current.webrtc_config,
|
||||
font_list=current.font_list,
|
||||
hardware_config=current.hardware_config,
|
||||
navigator_props=current.navigator_props,
|
||||
webgl_vendor=current.webgl_vendor,
|
||||
webgl_renderer=current.webgl_renderer,
|
||||
audio_context_base_latency=current.audio_context_base_latency + random.uniform(-0.001, 0.001),
|
||||
audio_context_output_latency=current.audio_context_output_latency,
|
||||
audio_context_sample_rate=current.audio_context_sample_rate,
|
||||
timezone=current.timezone,
|
||||
timezone_offset=current.timezone_offset,
|
||||
plugins=current.plugins,
|
||||
created_at=datetime.now(),
|
||||
last_rotated=datetime.now()
|
||||
)
|
||||
|
||||
else: # gradual
|
||||
# Moderate Änderungen
|
||||
new_fingerprint = BrowserFingerprint(
|
||||
fingerprint_id=str(uuid.uuid4()),
|
||||
canvas_noise=CanvasNoise(
|
||||
noise_level=random.uniform(0.01, 0.05),
|
||||
seed=random.randint(1000, 9999),
|
||||
algorithm=random.choice(["gaussian", "uniform", "perlin"])
|
||||
),
|
||||
webrtc_config=WebRTCConfig(
|
||||
enabled=current.webrtc_config.enabled,
|
||||
ice_servers=current.webrtc_config.ice_servers,
|
||||
local_ip_mask=f"10.0.{random.randint(0, 255)}.x",
|
||||
disable_webrtc=current.webrtc_config.disable_webrtc
|
||||
),
|
||||
font_list=self._slightly_modify_fonts(current.font_list),
|
||||
hardware_config=current.hardware_config,
|
||||
navigator_props=current.navigator_props,
|
||||
webgl_vendor=current.webgl_vendor,
|
||||
webgl_renderer=current.webgl_renderer,
|
||||
audio_context_base_latency=random.uniform(0.00, 0.02),
|
||||
audio_context_output_latency=random.uniform(0.00, 0.05),
|
||||
audio_context_sample_rate=current.audio_context_sample_rate,
|
||||
timezone=current.timezone,
|
||||
timezone_offset=current.timezone_offset,
|
||||
plugins=current.plugins,
|
||||
created_at=current.created_at,
|
||||
last_rotated=datetime.now()
|
||||
)
|
||||
|
||||
# Update last_rotated für alten Fingerprint
|
||||
self.repository.update_last_rotated(current.fingerprint_id, datetime.now())
|
||||
|
||||
# Speichere neuen Fingerprint
|
||||
self.repository.save(new_fingerprint)
|
||||
self.fingerprint_cache[new_fingerprint.fingerprint_id] = new_fingerprint
|
||||
|
||||
logger.info(f"Rotated fingerprint {current.fingerprint_id} -> {new_fingerprint.fingerprint_id} (strategy: {rotation_strategy})")
|
||||
return new_fingerprint
|
||||
|
||||
def _slightly_modify_fonts(self, fonts: List[str]) -> List[str]:
|
||||
"""Modifiziert Fontliste leicht"""
|
||||
new_fonts = fonts.copy()
|
||||
|
||||
# Füge 1-2 Fonts hinzu oder entferne
|
||||
if random.random() > 0.5 and len(new_fonts) > 5:
|
||||
# Entferne 1-2 Fonts
|
||||
for _ in range(random.randint(1, 2)):
|
||||
if new_fonts:
|
||||
new_fonts.pop(random.randint(0, len(new_fonts) - 1))
|
||||
else:
|
||||
# Füge 1-2 Fonts hinzu
|
||||
additional_fonts = ["Consolas", "Monaco", "Menlo", "Ubuntu Mono"]
|
||||
for font in random.sample(additional_fonts, min(2, len(additional_fonts))):
|
||||
if font not in new_fonts:
|
||||
new_fonts.append(font)
|
||||
|
||||
return new_fonts
|
||||
|
||||
def validate_fingerprint(self, fingerprint: BrowserFingerprint) -> Tuple[bool, List[str]]:
|
||||
"""Validiert einen Fingerprint"""
|
||||
issues = []
|
||||
|
||||
# Hardware Konsistenz
|
||||
if fingerprint.hardware_config.hardware_concurrency > fingerprint.hardware_config.device_memory * 2:
|
||||
issues.append("Hardware concurrency zu hoch für device memory")
|
||||
|
||||
# Platform Konsistenz
|
||||
if "Win" in fingerprint.navigator_props.platform and "Mac" in fingerprint.webgl_renderer:
|
||||
issues.append("Windows platform mit Mac renderer inkonsistent")
|
||||
|
||||
# Mobile Konsistenz
|
||||
is_mobile = "iPhone" in fingerprint.navigator_props.platform or "Android" in fingerprint.navigator_props.user_agent
|
||||
if is_mobile and fingerprint.hardware_config.max_touch_points == 0:
|
||||
issues.append("Mobile device ohne touch points")
|
||||
|
||||
# Font Konsistenz
|
||||
if len(fingerprint.font_list) < 5:
|
||||
issues.append("Zu wenige Fonts für realistisches Profil")
|
||||
|
||||
# WebRTC Konsistenz
|
||||
if fingerprint.webrtc_config.disable_webrtc and fingerprint.webrtc_config.ice_servers:
|
||||
issues.append("WebRTC deaktiviert aber ICE servers konfiguriert")
|
||||
|
||||
return len(issues) == 0, issues
|
||||
|
||||
def save_fingerprint(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Speichert einen Fingerprint"""
|
||||
self.repository.save(fingerprint)
|
||||
self.fingerprint_cache[fingerprint.fingerprint_id] = fingerprint
|
||||
|
||||
def load_fingerprint(self, fingerprint_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Lädt einen Fingerprint"""
|
||||
# Check cache first
|
||||
if fingerprint_id in self.fingerprint_cache:
|
||||
return self.fingerprint_cache[fingerprint_id]
|
||||
|
||||
# Load from repository
|
||||
fingerprint = self.repository.find_by_id(fingerprint_id)
|
||||
if fingerprint:
|
||||
self.fingerprint_cache[fingerprint_id] = fingerprint
|
||||
|
||||
return fingerprint
|
||||
|
||||
def get_fingerprint_pool(self, count: int = 10,
|
||||
platform: Optional[str] = None) -> List[BrowserFingerprint]:
|
||||
"""Holt einen Pool von Fingerprints"""
|
||||
# Hole existierende Fingerprints
|
||||
existing = self.repository.get_random_fingerprints(count // 2)
|
||||
|
||||
# Generiere neue für Diversität
|
||||
new_count = count - len(existing)
|
||||
new_fingerprints = []
|
||||
for _ in range(new_count):
|
||||
fp = self.generate_fingerprint(platform=platform)
|
||||
new_fingerprints.append(fp)
|
||||
|
||||
return existing + new_fingerprints
|
||||
|
||||
def _get_timezone_for_location(self, proxy_location: Optional[str] = None) -> Tuple[str, int]:
|
||||
"""Gibt Timezone basierend auf Proxy-Location zurück"""
|
||||
# Location-basierte Timezones
|
||||
location_timezones = {
|
||||
# Deutschland
|
||||
"DE": ("Europe/Berlin", -60), # UTC+1
|
||||
"de": ("Europe/Berlin", -60),
|
||||
"germany": ("Europe/Berlin", -60),
|
||||
"berlin": ("Europe/Berlin", -60),
|
||||
"frankfurt": ("Europe/Berlin", -60),
|
||||
"munich": ("Europe/Berlin", -60),
|
||||
|
||||
# UK
|
||||
"GB": ("Europe/London", 0), # UTC+0
|
||||
"gb": ("Europe/London", 0),
|
||||
"uk": ("Europe/London", 0),
|
||||
"london": ("Europe/London", 0),
|
||||
|
||||
# Frankreich
|
||||
"FR": ("Europe/Paris", -60), # UTC+1
|
||||
"fr": ("Europe/Paris", -60),
|
||||
"france": ("Europe/Paris", -60),
|
||||
"paris": ("Europe/Paris", -60),
|
||||
|
||||
# USA Ostküste
|
||||
"US-NY": ("America/New_York", 300), # UTC-5
|
||||
"us-east": ("America/New_York", 300),
|
||||
"new york": ("America/New_York", 300),
|
||||
"newyork": ("America/New_York", 300),
|
||||
|
||||
# USA Westküste
|
||||
"US-CA": ("America/Los_Angeles", 480), # UTC-8
|
||||
"us-west": ("America/Los_Angeles", 480),
|
||||
"los angeles": ("America/Los_Angeles", 480),
|
||||
"california": ("America/Los_Angeles", 480),
|
||||
|
||||
# Spanien
|
||||
"ES": ("Europe/Madrid", -60), # UTC+1
|
||||
"es": ("Europe/Madrid", -60),
|
||||
"spain": ("Europe/Madrid", -60),
|
||||
"madrid": ("Europe/Madrid", -60),
|
||||
|
||||
# Italien
|
||||
"IT": ("Europe/Rome", -60), # UTC+1
|
||||
"it": ("Europe/Rome", -60),
|
||||
"italy": ("Europe/Rome", -60),
|
||||
"rome": ("Europe/Rome", -60),
|
||||
|
||||
# Niederlande
|
||||
"NL": ("Europe/Amsterdam", -60), # UTC+1
|
||||
"nl": ("Europe/Amsterdam", -60),
|
||||
"netherlands": ("Europe/Amsterdam", -60),
|
||||
"amsterdam": ("Europe/Amsterdam", -60),
|
||||
|
||||
# Kanada
|
||||
"CA": ("America/Toronto", 300), # UTC-5
|
||||
"ca": ("America/Toronto", 300),
|
||||
"canada": ("America/Toronto", 300),
|
||||
"toronto": ("America/Toronto", 300),
|
||||
|
||||
# Australien
|
||||
"AU": ("Australia/Sydney", -660), # UTC+11
|
||||
"au": ("Australia/Sydney", -660),
|
||||
"australia": ("Australia/Sydney", -660),
|
||||
"sydney": ("Australia/Sydney", -660),
|
||||
}
|
||||
|
||||
# Wenn Location angegeben, verwende passende Timezone
|
||||
if proxy_location:
|
||||
# Normalisiere Location (lowercase, entferne Leerzeichen)
|
||||
normalized_location = proxy_location.lower().strip()
|
||||
|
||||
# Suche in Location-Map
|
||||
for key, timezone_data in location_timezones.items():
|
||||
if key.lower() in normalized_location or normalized_location in key.lower():
|
||||
logger.info(f"Using timezone {timezone_data[0]} for location '{proxy_location}'")
|
||||
return timezone_data
|
||||
|
||||
# Fallback: Zufällige Timezone aus häufig genutzten
|
||||
common_timezones = [
|
||||
("Europe/Berlin", -60),
|
||||
("Europe/London", 0),
|
||||
("Europe/Paris", -60),
|
||||
("America/New_York", 300),
|
||||
("America/Los_Angeles", 480),
|
||||
("Europe/Madrid", -60),
|
||||
("America/Toronto", 300)
|
||||
]
|
||||
|
||||
timezone_data = random.choice(common_timezones)
|
||||
logger.info(f"Using random timezone {timezone_data[0]} (no location match for '{proxy_location}')")
|
||||
return timezone_data
|
||||
|
||||
def apply_fingerprint(self, browser_context: Any, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Wendet Fingerprint auf Browser Context an"""
|
||||
# Diese Methode würde JavaScript injection und Browser-Konfiguration durchführen
|
||||
# Beispiel-Implementation für Playwright:
|
||||
|
||||
if hasattr(browser_context, 'add_init_script'):
|
||||
# Canvas Noise Injection
|
||||
canvas_script = self._generate_canvas_noise_script(fingerprint.canvas_noise)
|
||||
browser_context.add_init_script(canvas_script)
|
||||
|
||||
# WebRTC Protection
|
||||
if fingerprint.webrtc_config.disable_webrtc:
|
||||
webrtc_script = self._generate_webrtc_block_script()
|
||||
browser_context.add_init_script(webrtc_script)
|
||||
|
||||
# Navigator Override
|
||||
navigator_script = self._generate_navigator_override_script(fingerprint.navigator_props)
|
||||
browser_context.add_init_script(navigator_script)
|
||||
|
||||
# Hardware Override
|
||||
hardware_script = self._generate_hardware_override_script(fingerprint.hardware_config)
|
||||
browser_context.add_init_script(hardware_script)
|
||||
|
||||
logger.info(f"Applied fingerprint {fingerprint.fingerprint_id} to browser context")
|
||||
|
||||
def _generate_canvas_noise_script(self, canvas_noise: CanvasNoise) -> str:
|
||||
"""Generiert Canvas Noise Injection Script"""
|
||||
return f"""
|
||||
(function() {{
|
||||
const originalGetImageData = CanvasRenderingContext2D.prototype.getImageData;
|
||||
const noiseLevel = {canvas_noise.noise_level};
|
||||
const seed = {canvas_noise.seed};
|
||||
|
||||
CanvasRenderingContext2D.prototype.getImageData = function() {{
|
||||
const imageData = originalGetImageData.apply(this, arguments);
|
||||
|
||||
// Add noise to image data
|
||||
for (let i = 0; i < imageData.data.length; i += 4) {{
|
||||
imageData.data[i] += Math.random() * noiseLevel * 255;
|
||||
imageData.data[i+1] += Math.random() * noiseLevel * 255;
|
||||
imageData.data[i+2] += Math.random() * noiseLevel * 255;
|
||||
}}
|
||||
|
||||
return imageData;
|
||||
}};
|
||||
}})();
|
||||
"""
|
||||
|
||||
def _generate_webrtc_block_script(self) -> str:
|
||||
"""Generiert erweiterten WebRTC Block Script mit IP Leak Prevention"""
|
||||
return """
|
||||
(function() {
|
||||
// Erweiterte WebRTC Leak Prevention
|
||||
|
||||
// 1. Basis WebRTC Blocking
|
||||
const OriginalRTCPeerConnection = window.RTCPeerConnection || window.webkitRTCPeerConnection || window.mozRTCPeerConnection;
|
||||
|
||||
if (OriginalRTCPeerConnection) {
|
||||
// Override RTCPeerConnection
|
||||
window.RTCPeerConnection = function(config, constraints) {
|
||||
// Filtere ICE Server wenn gewünscht
|
||||
if (config && config.iceServers) {
|
||||
config.iceServers = config.iceServers.filter(server => {
|
||||
// Entferne STUN Server die IP leaken könnten
|
||||
if (server.urls) {
|
||||
const urls = Array.isArray(server.urls) ? server.urls : [server.urls];
|
||||
return urls.every(url => !url.includes('stun:'));
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
const pc = new OriginalRTCPeerConnection(config, constraints);
|
||||
|
||||
// Override onicecandidate
|
||||
const originalOnIceCandidate = pc.onicecandidate;
|
||||
Object.defineProperty(pc, 'onicecandidate', {
|
||||
get: function() {
|
||||
return originalOnIceCandidate;
|
||||
},
|
||||
set: function(func) {
|
||||
originalOnIceCandidate = function(event) {
|
||||
if (event.candidate) {
|
||||
// Filtere lokale IP Adressen
|
||||
const candidateStr = event.candidate.candidate;
|
||||
|
||||
// Regex für private IPs
|
||||
const privateIPRegex = /(10\.\d{1,3}\.\d{1,3}\.\d{1,3}|172\.(1[6-9]|2\d|3[01])\.\d{1,3}\.\d{1,3}|192\.168\.\d{1,3}\.\d{1,3}|[a-f0-9:]+::/gi);
|
||||
|
||||
// Wenn private IP gefunden, modifiziere Kandidat
|
||||
if (privateIPRegex.test(candidateStr)) {
|
||||
const modifiedCandidate = candidateStr.replace(privateIPRegex, '10.0.0.x');
|
||||
event.candidate.candidate = modifiedCandidate;
|
||||
}
|
||||
}
|
||||
|
||||
if (func) {
|
||||
func(event);
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// Override createDataChannel
|
||||
const originalCreateDataChannel = pc.createDataChannel;
|
||||
pc.createDataChannel = function(label, options) {
|
||||
// Log für Debugging aber blockiere nicht
|
||||
console.debug('DataChannel created:', label);
|
||||
return originalCreateDataChannel.call(this, label, options);
|
||||
};
|
||||
|
||||
// Override getStats für Fingerprinting Protection
|
||||
const originalGetStats = pc.getStats;
|
||||
pc.getStats = function() {
|
||||
return originalGetStats.call(this).then(stats => {
|
||||
// Modifiziere Stats um Fingerprinting zu erschweren
|
||||
stats.forEach(stat => {
|
||||
if (stat.type === 'candidate-pair') {
|
||||
// Verstecke echte RTT
|
||||
if (stat.currentRoundTripTime) {
|
||||
stat.currentRoundTripTime = Math.random() * 0.1 + 0.05;
|
||||
}
|
||||
}
|
||||
});
|
||||
return stats;
|
||||
});
|
||||
};
|
||||
|
||||
return pc;
|
||||
};
|
||||
|
||||
// Kopiere statische Eigenschaften
|
||||
window.RTCPeerConnection.prototype = OriginalRTCPeerConnection.prototype;
|
||||
window.RTCPeerConnection.generateCertificate = OriginalRTCPeerConnection.generateCertificate;
|
||||
|
||||
// Aliase für andere Browser
|
||||
if (window.webkitRTCPeerConnection) {
|
||||
window.webkitRTCPeerConnection = window.RTCPeerConnection;
|
||||
}
|
||||
if (window.mozRTCPeerConnection) {
|
||||
window.mozRTCPeerConnection = window.RTCPeerConnection;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. MediaDevices Protection
|
||||
if (navigator.mediaDevices && navigator.mediaDevices.enumerateDevices) {
|
||||
const originalEnumerateDevices = navigator.mediaDevices.enumerateDevices;
|
||||
navigator.mediaDevices.enumerateDevices = function() {
|
||||
return originalEnumerateDevices.call(this).then(devices => {
|
||||
// Randomisiere Device IDs
|
||||
return devices.map(device => {
|
||||
return {
|
||||
...device,
|
||||
deviceId: device.deviceId ? btoa(Math.random().toString()).substring(0, 20) : '',
|
||||
groupId: device.groupId ? btoa(Math.random().toString()).substring(0, 20) : ''
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// 3. Block WebRTC komplett wenn gewünscht
|
||||
if (window.__BLOCK_WEBRTC_COMPLETELY__) {
|
||||
delete window.RTCPeerConnection;
|
||||
delete window.webkitRTCPeerConnection;
|
||||
delete window.mozRTCPeerConnection;
|
||||
delete window.RTCSessionDescription;
|
||||
delete window.RTCIceCandidate;
|
||||
delete window.MediaStream;
|
||||
delete window.MediaStreamTrack;
|
||||
}
|
||||
})();
|
||||
"""
|
||||
|
||||
def _generate_navigator_override_script(self, nav_props: NavigatorProperties) -> str:
|
||||
"""Generiert Navigator Override Script"""
|
||||
return f"""
|
||||
(function() {{
|
||||
Object.defineProperty(navigator, 'platform', {{
|
||||
get: () => '{nav_props.platform}'
|
||||
}});
|
||||
Object.defineProperty(navigator, 'vendor', {{
|
||||
get: () => '{nav_props.vendor}'
|
||||
}});
|
||||
Object.defineProperty(navigator, 'language', {{
|
||||
get: () => '{nav_props.language}'
|
||||
}});
|
||||
Object.defineProperty(navigator, 'languages', {{
|
||||
get: () => {json.dumps(nav_props.languages)}
|
||||
}});
|
||||
}})();
|
||||
"""
|
||||
|
||||
def _generate_hardware_override_script(self, hw_config: HardwareConfig) -> str:
|
||||
"""Generiert Hardware Override Script"""
|
||||
return f"""
|
||||
(function() {{
|
||||
Object.defineProperty(navigator, 'hardwareConcurrency', {{
|
||||
get: () => {hw_config.hardware_concurrency}
|
||||
}});
|
||||
Object.defineProperty(navigator, 'deviceMemory', {{
|
||||
get: () => {hw_config.device_memory}
|
||||
}});
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {{
|
||||
get: () => {hw_config.max_touch_points}
|
||||
}});
|
||||
}})();
|
||||
"""
|
||||
|
||||
|
||||
|
||||
def get_fingerprint_score(self, fingerprint: BrowserFingerprint) -> float:
|
||||
"""Bewertet Fingerprint-Qualität"""
|
||||
score = 1.0
|
||||
|
||||
# Validierung
|
||||
valid, issues = self.validate_fingerprint(fingerprint)
|
||||
if not valid:
|
||||
score -= 0.1 * len(issues)
|
||||
|
||||
# Alter des Fingerprints
|
||||
age = datetime.now() - fingerprint.created_at
|
||||
if age > timedelta(days=7):
|
||||
score -= 0.2
|
||||
elif age > timedelta(days=30):
|
||||
score -= 0.4
|
||||
|
||||
# Rotation
|
||||
if fingerprint.last_rotated:
|
||||
time_since_rotation = datetime.now() - fingerprint.last_rotated
|
||||
if time_since_rotation < timedelta(hours=1):
|
||||
score -= 0.3 # Zu häufige Rotation
|
||||
|
||||
# Font-Diversität
|
||||
if len(fingerprint.font_list) < 10:
|
||||
score -= 0.1
|
||||
elif len(fingerprint.font_list) > 50:
|
||||
score -= 0.1 # Zu viele Fonts unrealistisch
|
||||
|
||||
return max(0.0, min(1.0, score))
|
||||
|
||||
|
||||
def create_account_fingerprint(self, account_id: str,
|
||||
profile_type: Optional[str] = None,
|
||||
platform: Optional[str] = None,
|
||||
proxy_location: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""Creates a new fingerprint bound to a specific account"""
|
||||
fingerprint = self.generate_fingerprint(
|
||||
profile_type=profile_type,
|
||||
platform=platform,
|
||||
proxy_location=proxy_location,
|
||||
account_id=account_id
|
||||
)
|
||||
|
||||
# Link fingerprint to account
|
||||
self.repository.link_to_account(fingerprint.fingerprint_id, account_id)
|
||||
|
||||
return fingerprint
|
||||
|
||||
def get_account_fingerprint(self, account_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Get the primary fingerprint for an account"""
|
||||
try:
|
||||
fingerprint_id = self.repository.get_primary_fingerprint_for_account(account_id)
|
||||
if fingerprint_id:
|
||||
logger.debug(f"Found fingerprint {fingerprint_id} for account {account_id}")
|
||||
return self.load_fingerprint(fingerprint_id)
|
||||
else:
|
||||
logger.debug(f"No fingerprint found for account {account_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting fingerprint for account {account_id}: {e}")
|
||||
return None
|
||||
|
||||
def load_for_session(self, fingerprint_id: str,
|
||||
date_str: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""Load fingerprint for a session with deterministic daily variations"""
|
||||
try:
|
||||
fingerprint = self.load_fingerprint(fingerprint_id)
|
||||
if not fingerprint:
|
||||
logger.error(f"Fingerprint {fingerprint_id} not found in repository")
|
||||
raise ValueError(f"Fingerprint {fingerprint_id} not found")
|
||||
|
||||
logger.debug(f"Loading fingerprint {fingerprint_id} for session")
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading fingerprint {fingerprint_id}: {e}")
|
||||
raise
|
||||
|
||||
if not fingerprint.rotation_seed:
|
||||
# No seed means no deterministic variation
|
||||
return fingerprint
|
||||
|
||||
# Apply deterministic variations based on date
|
||||
if date_str is None:
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
# Create a copy with daily variations
|
||||
session_fingerprint = BrowserFingerprint.from_dict(fingerprint.to_dict())
|
||||
|
||||
# Apply deterministic noise to canvas seed
|
||||
hash_input = f"{fingerprint.rotation_seed}:canvas:{date_str}"
|
||||
canvas_seed = int(hashlib.sha256(hash_input.encode()).hexdigest()[:8], 16) % 1000000
|
||||
session_fingerprint.canvas_noise.seed = canvas_seed
|
||||
|
||||
# Slight audio variations
|
||||
hash_input = f"{fingerprint.rotation_seed}:audio:{date_str}"
|
||||
audio_var = int(hashlib.sha256(hash_input.encode()).hexdigest()[:8], 16) / 0xFFFFFFFF
|
||||
session_fingerprint.audio_context_base_latency += (audio_var - 0.5) * 0.002
|
||||
|
||||
return session_fingerprint
|
||||
|
||||
|
||||
|
||||
def update_fingerprint_stats(self, fingerprint_id: str, account_id: str, success: bool) -> None:
|
||||
"""Update fingerprint usage statistics for an account"""
|
||||
self.repository.update_fingerprint_stats(fingerprint_id, account_id, success)
|
||||
|
||||
def cleanup_old_fingerprints(self, older_than: datetime) -> int:
|
||||
"""Bereinigt alte Fingerprints - Dummy implementation"""
|
||||
# Removed functionality, just return 0
|
||||
return 0
|
||||
|
||||
def detect_fingerprinting(self, page_content: str) -> Dict[str, Any]:
|
||||
"""Erkennt Fingerprinting-Versuche - Dummy implementation"""
|
||||
# Removed functionality, return empty detection
|
||||
return {
|
||||
"canvas": False,
|
||||
"webrtc": False,
|
||||
"fonts": False,
|
||||
"audio": False,
|
||||
"webgl": False,
|
||||
"hardware": False,
|
||||
"techniques": [],
|
||||
"total_techniques": 0,
|
||||
"risk_level": "none"
|
||||
}
|
||||
|
||||
def get_fingerprint_pool(self, count: int = 10,
|
||||
platform: Optional[str] = None) -> List[BrowserFingerprint]:
|
||||
"""Holt einen Pool von Fingerprints - Simple implementation"""
|
||||
# Just generate new fingerprints
|
||||
fingerprints = []
|
||||
for _ in range(count):
|
||||
fp = self.generate_fingerprint(platform=platform)
|
||||
fingerprints.append(fp)
|
||||
return fingerprints
|
||||
229
infrastructure/services/browser_protection_service.py
Normale Datei
229
infrastructure/services/browser_protection_service.py
Normale Datei
@ -0,0 +1,229 @@
|
||||
"""Service for applying browser protection during automation."""
|
||||
from typing import Optional
|
||||
from playwright.sync_api import Page
|
||||
|
||||
from domain.value_objects.browser_protection_style import BrowserProtectionStyle, ProtectionLevel
|
||||
|
||||
|
||||
class BrowserProtectionService:
|
||||
"""Handles browser protection during automation to prevent user interference."""
|
||||
|
||||
SHIELD_ELEMENT_ID = "accountforge-shield"
|
||||
PROTECTION_STYLE_ID = "accountforge-protection-styles"
|
||||
|
||||
def protect_browser(self, page: Page, style: BrowserProtectionStyle) -> None:
|
||||
"""Apply protection to the browser page based on the configured style."""
|
||||
if style.level == ProtectionLevel.NONE:
|
||||
return
|
||||
|
||||
# Generate and inject protection script
|
||||
script = self._generate_protection_script(style)
|
||||
page.evaluate(script)
|
||||
|
||||
# Speichere Script für Wiederanwendung
|
||||
escaped_script = script.replace('`', '\\`')
|
||||
page.evaluate(f"""
|
||||
window.__accountforge_protection = `{escaped_script}`;
|
||||
""")
|
||||
|
||||
def remove_protection(self, page: Page) -> None:
|
||||
"""Remove all protection from the browser page."""
|
||||
page.evaluate(f"""
|
||||
// Remove shield element
|
||||
const shield = document.getElementById('{self.SHIELD_ELEMENT_ID}');
|
||||
if (shield) shield.remove();
|
||||
|
||||
// Remove style element
|
||||
const styles = document.getElementById('{self.PROTECTION_STYLE_ID}');
|
||||
if (styles) styles.remove();
|
||||
|
||||
// Note: Event listeners will be removed on page navigation
|
||||
""")
|
||||
|
||||
def _generate_protection_script(self, style: BrowserProtectionStyle) -> str:
|
||||
"""Generate JavaScript code for protection based on style configuration."""
|
||||
script_parts = []
|
||||
|
||||
# Create shield overlay
|
||||
if style.level in [ProtectionLevel.MEDIUM, ProtectionLevel.STRONG]:
|
||||
script_parts.append(self._create_shield_script(style))
|
||||
|
||||
# Add visual effects
|
||||
if style.show_border or (style.level == ProtectionLevel.LIGHT):
|
||||
script_parts.append(self._create_visual_effects_script(style))
|
||||
|
||||
# Block interactions
|
||||
if style.level in [ProtectionLevel.MEDIUM, ProtectionLevel.STRONG]:
|
||||
script_parts.append(self._create_interaction_blocker_script())
|
||||
|
||||
return "\n".join(script_parts)
|
||||
|
||||
def _create_shield_script(self, style: BrowserProtectionStyle) -> str:
|
||||
"""Create the main shield overlay element."""
|
||||
badge_positions = {
|
||||
"top-left": "top: 20px; left: 20px;",
|
||||
"top-right": "top: 20px; right: 20px;",
|
||||
"bottom-left": "bottom: 20px; left: 20px;",
|
||||
"bottom-right": "bottom: 20px; right: 20px;"
|
||||
}
|
||||
badge_position_css = badge_positions.get(style.badge_position, badge_positions["top-right"])
|
||||
|
||||
blur_css = "backdrop-filter: blur(2px);" if style.blur_effect else ""
|
||||
|
||||
return f"""
|
||||
// Create shield overlay
|
||||
const shield = document.createElement('div');
|
||||
shield.id = '{self.SHIELD_ELEMENT_ID}';
|
||||
shield.style.cssText = `
|
||||
position: fixed;
|
||||
top: 0; left: 0;
|
||||
width: 100vw; height: 100vh;
|
||||
background: {style.get_overlay_color()};
|
||||
{blur_css}
|
||||
z-index: 2147483647;
|
||||
cursor: not-allowed;
|
||||
user-select: none;
|
||||
pointer-events: all;
|
||||
`;
|
||||
|
||||
// Add info badge if configured
|
||||
if ({str(style.show_badge).lower()}) {{
|
||||
const badge = document.createElement('div');
|
||||
badge.style.cssText = `
|
||||
position: absolute;
|
||||
{badge_position_css}
|
||||
background: rgba(220, 38, 38, 0.95);
|
||||
color: white;
|
||||
padding: 12px 24px;
|
||||
border-radius: 8px;
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
|
||||
animation: fadeIn 0.3s ease-in;
|
||||
`;
|
||||
badge.textContent = '{style.badge_text}';
|
||||
shield.appendChild(badge);
|
||||
}}
|
||||
|
||||
document.documentElement.appendChild(shield);
|
||||
"""
|
||||
|
||||
def _create_visual_effects_script(self, style: BrowserProtectionStyle) -> str:
|
||||
"""Create visual effects like animated border."""
|
||||
return f"""
|
||||
// Add styles for visual effects
|
||||
const styleElement = document.createElement('style');
|
||||
styleElement.id = '{self.PROTECTION_STYLE_ID}';
|
||||
styleElement.textContent = `
|
||||
@keyframes pulse {{
|
||||
0% {{ opacity: 0.4; }}
|
||||
50% {{ opacity: 0.8; }}
|
||||
100% {{ opacity: 0.4; }}
|
||||
}}
|
||||
|
||||
@keyframes fadeIn {{
|
||||
from {{ opacity: 0; transform: translateY(-10px); }}
|
||||
to {{ opacity: 1; transform: translateY(0); }}
|
||||
}}
|
||||
|
||||
{self._get_border_css(style) if style.show_border else ''}
|
||||
`;
|
||||
document.head.appendChild(styleElement);
|
||||
|
||||
{self._get_border_element_script(style) if style.show_border else ''}
|
||||
"""
|
||||
|
||||
def _get_border_css(self, style: BrowserProtectionStyle) -> str:
|
||||
"""Get CSS for animated border."""
|
||||
return f"""
|
||||
#accountforge-border {{
|
||||
position: fixed;
|
||||
top: 0; left: 0;
|
||||
width: 100%; height: 100%;
|
||||
border: 3px solid {style.border_color};
|
||||
box-shadow: inset 0 0 30px rgba(255, 0, 0, 0.2);
|
||||
pointer-events: none;
|
||||
animation: pulse 2s infinite;
|
||||
z-index: 2147483646;
|
||||
}}
|
||||
"""
|
||||
|
||||
def _get_border_element_script(self, style: BrowserProtectionStyle) -> str:
|
||||
"""Get script to create border element."""
|
||||
return f"""
|
||||
// Create animated border
|
||||
const border = document.createElement('div');
|
||||
border.id = 'accountforge-border';
|
||||
document.body.appendChild(border);
|
||||
"""
|
||||
|
||||
def _create_interaction_blocker_script(self) -> str:
|
||||
"""Create script to block all user interactions."""
|
||||
return f"""
|
||||
// Verhindere das Shield selbst entfernt zu werden
|
||||
(function() {{
|
||||
// Prüfe ob Shield schon existiert
|
||||
if (document.getElementById('{self.SHIELD_ELEMENT_ID}')) {{
|
||||
return;
|
||||
}}
|
||||
|
||||
// Block all interaction events
|
||||
const blockedEvents = [
|
||||
'click', 'dblclick', 'mousedown', 'mouseup', 'mousemove',
|
||||
'keydown', 'keypress', 'keyup',
|
||||
'touchstart', 'touchend', 'touchmove',
|
||||
'contextmenu', 'wheel', 'scroll', 'input', 'change', 'focus'
|
||||
];
|
||||
|
||||
const eventBlocker = function(e) {{
|
||||
// Prüfe ob das Event vom Shield selbst kommt
|
||||
const shield = document.getElementById('{self.SHIELD_ELEMENT_ID}');
|
||||
if (shield && (e.target === shield || shield.contains(e.target))) {{
|
||||
return;
|
||||
}}
|
||||
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
e.stopImmediatePropagation();
|
||||
return false;
|
||||
}};
|
||||
|
||||
// Add event listeners with capture phase
|
||||
blockedEvents.forEach(eventType => {{
|
||||
document.addEventListener(eventType, eventBlocker, true);
|
||||
window.addEventListener(eventType, eventBlocker, true);
|
||||
}});
|
||||
|
||||
// Disable text selection
|
||||
document.body.style.userSelect = 'none';
|
||||
document.body.style.webkitUserSelect = 'none';
|
||||
document.body.style.mozUserSelect = 'none';
|
||||
document.body.style.msUserSelect = 'none';
|
||||
|
||||
// Disable all input fields
|
||||
const disableInputs = () => {{
|
||||
document.querySelectorAll('input, textarea, select, button').forEach(el => {{
|
||||
el.style.pointerEvents = 'none';
|
||||
el.setAttribute('disabled', 'true');
|
||||
el.setAttribute('readonly', 'true');
|
||||
}});
|
||||
}};
|
||||
|
||||
// Initial disable
|
||||
disableInputs();
|
||||
|
||||
// Re-disable on DOM changes
|
||||
const inputObserver = new MutationObserver(disableInputs);
|
||||
inputObserver.observe(document.body, {{ childList: true, subtree: true }});
|
||||
|
||||
// Disable drag
|
||||
document.ondragstart = function() {{ return false; }};
|
||||
document.onselectstart = function() {{ return false; }};
|
||||
|
||||
// Prevent focus on any element
|
||||
document.addEventListener('focusin', function(e) {{
|
||||
e.target.blur();
|
||||
}}, true);
|
||||
}})();
|
||||
"""
|
||||
27
infrastructure/services/fingerprint/__init__.py
Normale Datei
27
infrastructure/services/fingerprint/__init__.py
Normale Datei
@ -0,0 +1,27 @@
|
||||
"""
|
||||
Fingerprint services package.
|
||||
|
||||
This package contains modular services for browser fingerprinting,
|
||||
split from the original AdvancedFingerprintService for better
|
||||
maintainability and testability.
|
||||
"""
|
||||
|
||||
from .fingerprint_profile_service import FingerprintProfileService
|
||||
from .fingerprint_generator_service import FingerprintGeneratorService
|
||||
from .fingerprint_rotation_service import FingerprintRotationService
|
||||
from .fingerprint_validation_service import FingerprintValidationService
|
||||
from .browser_injection_service import BrowserInjectionService
|
||||
from .timezone_location_service import TimezoneLocationService
|
||||
from .account_fingerprint_service import AccountFingerprintService
|
||||
from .fingerprint_persistence_service import FingerprintPersistenceService
|
||||
|
||||
__all__ = [
|
||||
'FingerprintProfileService',
|
||||
'FingerprintGeneratorService',
|
||||
'FingerprintRotationService',
|
||||
'FingerprintValidationService',
|
||||
'BrowserInjectionService',
|
||||
'TimezoneLocationService',
|
||||
'AccountFingerprintService',
|
||||
'FingerprintPersistenceService'
|
||||
]
|
||||
217
infrastructure/services/fingerprint/account_fingerprint_service.py
Normale Datei
217
infrastructure/services/fingerprint/account_fingerprint_service.py
Normale Datei
@ -0,0 +1,217 @@
|
||||
"""
|
||||
Account Fingerprint Service - Manages account-bound fingerprints.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import random
|
||||
from typing import Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
from .fingerprint_generator_service import FingerprintGeneratorService
|
||||
from .fingerprint_rotation_service import FingerprintRotationService, RotationStrategy
|
||||
|
||||
|
||||
class AccountFingerprintService:
|
||||
"""Service for managing account-bound fingerprints."""
|
||||
|
||||
def __init__(self,
|
||||
generator_service: Optional[FingerprintGeneratorService] = None,
|
||||
rotation_service: Optional[FingerprintRotationService] = None):
|
||||
self.generator_service = generator_service or FingerprintGeneratorService()
|
||||
self.rotation_service = rotation_service or FingerprintRotationService()
|
||||
|
||||
def generate_account_fingerprint(self,
|
||||
account_id: str,
|
||||
platform: str,
|
||||
proxy_location: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""Generate a fingerprint bound to a specific account."""
|
||||
# Generate base fingerprint with account binding
|
||||
fingerprint = self.generator_service.generate_fingerprint(
|
||||
platform=platform,
|
||||
proxy_location=proxy_location,
|
||||
account_id=account_id
|
||||
)
|
||||
|
||||
# Apply deterministic variations based on account ID
|
||||
self._apply_account_variations(fingerprint, account_id)
|
||||
|
||||
return fingerprint
|
||||
|
||||
def get_daily_fingerprint(self,
|
||||
base_fingerprint: BrowserFingerprint,
|
||||
account_id: str) -> BrowserFingerprint:
|
||||
"""Get deterministic daily variation of account fingerprint."""
|
||||
if not base_fingerprint.account_bound:
|
||||
raise ValueError("Fingerprint must be account-bound for daily variations")
|
||||
|
||||
# Calculate days since creation
|
||||
days_since_creation = (datetime.now() - base_fingerprint.created_at).days
|
||||
|
||||
# Generate deterministic seed for today
|
||||
today_seed = self._generate_daily_seed(account_id, days_since_creation)
|
||||
|
||||
# Apply deterministic variations
|
||||
varied = self._apply_daily_variations(base_fingerprint, today_seed)
|
||||
|
||||
return varied
|
||||
|
||||
def _apply_account_variations(self, fingerprint: BrowserFingerprint, account_id: str) -> None:
|
||||
"""Apply account-specific variations to fingerprint."""
|
||||
# Use account ID to seed variations
|
||||
account_hash = int(hashlib.md5(account_id.encode()).hexdigest()[:8], 16)
|
||||
|
||||
# Deterministic but unique variations
|
||||
random.seed(account_hash)
|
||||
|
||||
# Vary canvas noise seed within range
|
||||
base_seed = fingerprint.canvas_noise.seed
|
||||
fingerprint.canvas_noise.seed = base_seed + (account_hash % 1000)
|
||||
|
||||
# Vary audio latencies slightly
|
||||
fingerprint.audio_context_base_latency += (account_hash % 10) * 0.0001
|
||||
fingerprint.audio_context_output_latency += (account_hash % 10) * 0.0002
|
||||
|
||||
# Select subset of fonts deterministically
|
||||
if len(fingerprint.font_list) > 5:
|
||||
num_to_remove = account_hash % 3 + 1
|
||||
for _ in range(num_to_remove):
|
||||
fingerprint.font_list.pop(random.randint(0, len(fingerprint.font_list) - 1))
|
||||
|
||||
# Reset random seed
|
||||
random.seed()
|
||||
|
||||
def _generate_daily_seed(self, account_id: str, day_number: int) -> int:
|
||||
"""Generate deterministic seed for a specific day."""
|
||||
# Combine account ID with day number
|
||||
seed_string = f"{account_id}:{day_number}"
|
||||
seed_hash = hashlib.sha256(seed_string.encode()).hexdigest()
|
||||
|
||||
# Convert to integer seed
|
||||
return int(seed_hash[:8], 16)
|
||||
|
||||
def _apply_daily_variations(self, fingerprint: BrowserFingerprint, daily_seed: int) -> BrowserFingerprint:
|
||||
"""Apply deterministic daily variations."""
|
||||
# Use rotation service with controlled randomness
|
||||
original_seed = random.getstate()
|
||||
random.seed(daily_seed)
|
||||
|
||||
# Minimal rotation for daily changes
|
||||
varied = self.rotation_service.rotate_fingerprint(fingerprint, RotationStrategy.MINIMAL)
|
||||
|
||||
# Additional deterministic changes
|
||||
self._apply_time_based_changes(varied, daily_seed)
|
||||
|
||||
# Restore original random state
|
||||
random.setstate(original_seed)
|
||||
|
||||
return varied
|
||||
|
||||
def _apply_time_based_changes(self, fingerprint: BrowserFingerprint, seed: int) -> None:
|
||||
"""Apply time-based changes that would naturally occur."""
|
||||
# Browser version might update weekly
|
||||
week_number = seed % 52
|
||||
if week_number % 4 == 0: # Every 4 weeks
|
||||
self._increment_browser_version(fingerprint)
|
||||
|
||||
# System uptime affects audio latency
|
||||
hour_of_day = datetime.now().hour
|
||||
fingerprint.audio_context_base_latency += (hour_of_day / 24) * 0.001
|
||||
|
||||
# Network conditions affect WebRTC
|
||||
if seed % 3 == 0:
|
||||
# Change local IP mask (different network)
|
||||
fingerprint.webrtc_config.local_ip_mask = f"192.168.{seed % 255}.x"
|
||||
|
||||
def _increment_browser_version(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Increment browser version number."""
|
||||
import re
|
||||
user_agent = fingerprint.navigator_props.user_agent
|
||||
|
||||
# Find Chrome version
|
||||
match = re.search(r'Chrome/(\d+)\.(\d+)\.(\d+)\.(\d+)', user_agent)
|
||||
if match:
|
||||
major = int(match.group(1))
|
||||
minor = int(match.group(2))
|
||||
build = int(match.group(3))
|
||||
patch = int(match.group(4))
|
||||
|
||||
# Increment build number
|
||||
build += 1
|
||||
|
||||
# Update user agent
|
||||
old_version = match.group(0)
|
||||
new_version = f"Chrome/{major}.{minor}.{build}.{patch}"
|
||||
fingerprint.navigator_props.user_agent = user_agent.replace(old_version, new_version)
|
||||
|
||||
def validate_account_binding(self, fingerprint: BrowserFingerprint, account_id: str) -> bool:
|
||||
"""Validate that a fingerprint is properly bound to an account."""
|
||||
if not fingerprint.account_bound:
|
||||
return False
|
||||
|
||||
if not fingerprint.static_components:
|
||||
return False
|
||||
|
||||
if not fingerprint.rotation_seed:
|
||||
return False
|
||||
|
||||
# Could add more validation here (e.g., check against database)
|
||||
return True
|
||||
|
||||
def get_fingerprint_age_days(self, fingerprint: BrowserFingerprint) -> int:
|
||||
"""Get age of fingerprint in days."""
|
||||
if not fingerprint.created_at:
|
||||
return 0
|
||||
|
||||
return (datetime.now() - fingerprint.created_at).days
|
||||
|
||||
def should_rotate_fingerprint(self, fingerprint: BrowserFingerprint) -> bool:
|
||||
"""Determine if fingerprint should be rotated."""
|
||||
age_days = self.get_fingerprint_age_days(fingerprint)
|
||||
|
||||
# Rotate after 30 days
|
||||
if age_days > 30:
|
||||
return True
|
||||
|
||||
# Check last rotation
|
||||
if fingerprint.last_rotated:
|
||||
days_since_rotation = (datetime.now() - fingerprint.last_rotated).days
|
||||
if days_since_rotation > 7: # Weekly rotation check
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def prepare_session_fingerprint(self,
|
||||
fingerprint: BrowserFingerprint,
|
||||
session_data: Dict[str, Any]) -> BrowserFingerprint:
|
||||
"""Prepare fingerprint for use with existing session."""
|
||||
# Sessions might have slightly different characteristics
|
||||
session_fp = self._deep_copy_fingerprint(fingerprint)
|
||||
|
||||
# Apply session-specific adjustments
|
||||
if "browser_version" in session_data:
|
||||
self._update_to_browser_version(session_fp, session_data["browser_version"])
|
||||
|
||||
if "screen_resolution" in session_data:
|
||||
session_fp.hardware_config.screen_resolution = tuple(session_data["screen_resolution"])
|
||||
|
||||
return session_fp
|
||||
|
||||
def _update_to_browser_version(self, fingerprint: BrowserFingerprint, version: str) -> None:
|
||||
"""Update fingerprint to specific browser version."""
|
||||
import re
|
||||
user_agent = fingerprint.navigator_props.user_agent
|
||||
|
||||
# Replace Chrome version
|
||||
user_agent = re.sub(r'Chrome/[\d.]+', f'Chrome/{version}', user_agent)
|
||||
fingerprint.navigator_props.user_agent = user_agent
|
||||
|
||||
# Update app version
|
||||
app_version = fingerprint.navigator_props.app_version
|
||||
app_version = re.sub(r'Chrome/[\d.]+', f'Chrome/{version}', app_version)
|
||||
fingerprint.navigator_props.app_version = app_version
|
||||
|
||||
def _deep_copy_fingerprint(self, fingerprint: BrowserFingerprint) -> BrowserFingerprint:
|
||||
"""Create a deep copy of fingerprint."""
|
||||
# Delegate to rotation service's implementation
|
||||
return self.rotation_service._deep_copy_fingerprint(fingerprint)
|
||||
481
infrastructure/services/fingerprint/browser_injection_service.py
Normale Datei
481
infrastructure/services/fingerprint/browser_injection_service.py
Normale Datei
@ -0,0 +1,481 @@
|
||||
"""
|
||||
Browser Injection Service - Handles fingerprint injection into browser contexts.
|
||||
"""
|
||||
|
||||
import json
|
||||
import base64
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
|
||||
class BrowserInjectionService:
|
||||
"""Service for injecting fingerprints into browser contexts."""
|
||||
|
||||
def generate_fingerprint_scripts(self, fingerprint: BrowserFingerprint) -> Dict[str, str]:
|
||||
"""Generate all fingerprint injection scripts."""
|
||||
return {
|
||||
"canvas_protection": self._generate_canvas_script(fingerprint),
|
||||
"webgl_protection": self._generate_webgl_script(fingerprint),
|
||||
"webrtc_protection": self._generate_webrtc_script(fingerprint),
|
||||
"navigator_override": self._generate_navigator_script(fingerprint),
|
||||
"hardware_override": self._generate_hardware_script(fingerprint),
|
||||
"timezone_override": self._generate_timezone_script(fingerprint),
|
||||
"audio_protection": self._generate_audio_script(fingerprint),
|
||||
"font_detection": self._generate_font_script(fingerprint),
|
||||
"plugin_override": self._generate_plugin_script(fingerprint)
|
||||
}
|
||||
|
||||
def _generate_canvas_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate canvas fingerprint protection script."""
|
||||
return f'''
|
||||
(function() {{
|
||||
const seed = {fingerprint.canvas_noise.seed};
|
||||
const noiseLevel = {fingerprint.canvas_noise.noise_level};
|
||||
const algorithm = "{fingerprint.canvas_noise.algorithm}";
|
||||
|
||||
// Deterministic random based on seed
|
||||
let randomSeed = seed;
|
||||
function seededRandom() {{
|
||||
randomSeed = (randomSeed * 9301 + 49297) % 233280;
|
||||
return randomSeed / 233280;
|
||||
}}
|
||||
|
||||
// Override toDataURL
|
||||
const originalToDataURL = HTMLCanvasElement.prototype.toDataURL;
|
||||
HTMLCanvasElement.prototype.toDataURL = function(...args) {{
|
||||
const context = this.getContext('2d');
|
||||
if (context) {{
|
||||
const imageData = context.getImageData(0, 0, this.width, this.height);
|
||||
const data = imageData.data;
|
||||
|
||||
// Apply noise based on algorithm
|
||||
for (let i = 0; i < data.length; i += 4) {{
|
||||
if (algorithm === 'gaussian') {{
|
||||
// Gaussian noise
|
||||
const noise = (seededRandom() - 0.5) * 2 * noiseLevel * 255;
|
||||
data[i] = Math.max(0, Math.min(255, data[i] + noise));
|
||||
data[i+1] = Math.max(0, Math.min(255, data[i+1] + noise));
|
||||
data[i+2] = Math.max(0, Math.min(255, data[i+2] + noise));
|
||||
}} else if (algorithm === 'uniform') {{
|
||||
// Uniform noise
|
||||
const noise = (seededRandom() - 0.5) * noiseLevel * 255;
|
||||
data[i] = Math.max(0, Math.min(255, data[i] + noise));
|
||||
}}
|
||||
}}
|
||||
|
||||
context.putImageData(imageData, 0, 0);
|
||||
}}
|
||||
|
||||
return originalToDataURL.apply(this, args);
|
||||
}};
|
||||
|
||||
// Override getImageData
|
||||
const originalGetImageData = CanvasRenderingContext2D.prototype.getImageData;
|
||||
CanvasRenderingContext2D.prototype.getImageData = function(...args) {{
|
||||
const imageData = originalGetImageData.apply(this, args);
|
||||
const data = imageData.data;
|
||||
|
||||
// Apply same noise
|
||||
for (let i = 0; i < data.length; i += 4) {{
|
||||
const noise = (seededRandom() - 0.5) * noiseLevel * 255;
|
||||
data[i] = Math.max(0, Math.min(255, data[i] + noise));
|
||||
}}
|
||||
|
||||
return imageData;
|
||||
}};
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_webgl_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate WebGL fingerprint override script."""
|
||||
return f'''
|
||||
(function() {{
|
||||
const overrides = {{
|
||||
vendor: "{fingerprint.webgl_vendor}",
|
||||
renderer: "{fingerprint.webgl_renderer}"
|
||||
}};
|
||||
|
||||
// Override WebGL getParameter
|
||||
const getParameter = WebGLRenderingContext.prototype.getParameter;
|
||||
WebGLRenderingContext.prototype.getParameter = function(parameter) {{
|
||||
if (parameter === 37445) {{ // UNMASKED_VENDOR_WEBGL
|
||||
return overrides.vendor;
|
||||
}}
|
||||
if (parameter === 37446) {{ // UNMASKED_RENDERER_WEBGL
|
||||
return overrides.renderer;
|
||||
}}
|
||||
return getParameter.apply(this, arguments);
|
||||
}};
|
||||
|
||||
// Same for WebGL2
|
||||
if (typeof WebGL2RenderingContext !== 'undefined') {{
|
||||
const getParameter2 = WebGL2RenderingContext.prototype.getParameter;
|
||||
WebGL2RenderingContext.prototype.getParameter = function(parameter) {{
|
||||
if (parameter === 37445) return overrides.vendor;
|
||||
if (parameter === 37446) return overrides.renderer;
|
||||
return getParameter2.apply(this, arguments);
|
||||
}};
|
||||
}}
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_webrtc_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate WebRTC protection script."""
|
||||
if fingerprint.webrtc_config.disable_webrtc:
|
||||
return '''
|
||||
(function() {
|
||||
// Completely disable WebRTC
|
||||
window.RTCPeerConnection = undefined;
|
||||
window.RTCSessionDescription = undefined;
|
||||
window.RTCIceCandidate = undefined;
|
||||
window.webkitRTCPeerConnection = undefined;
|
||||
window.mozRTCPeerConnection = undefined;
|
||||
})();
|
||||
'''
|
||||
else:
|
||||
return f'''
|
||||
(function() {{
|
||||
const localIPMask = "{fingerprint.webrtc_config.local_ip_mask}";
|
||||
|
||||
// Override RTCPeerConnection
|
||||
const OriginalRTCPeerConnection = window.RTCPeerConnection ||
|
||||
window.webkitRTCPeerConnection ||
|
||||
window.mozRTCPeerConnection;
|
||||
|
||||
if (OriginalRTCPeerConnection) {{
|
||||
window.RTCPeerConnection = function(config, constraints) {{
|
||||
const pc = new OriginalRTCPeerConnection(config, constraints);
|
||||
|
||||
// Override createDataChannel to prevent IP leak
|
||||
const originalCreateDataChannel = pc.createDataChannel;
|
||||
pc.createDataChannel = function(...args) {{
|
||||
return originalCreateDataChannel.apply(pc, args);
|
||||
}};
|
||||
|
||||
// Monitor ICE candidates
|
||||
pc.addEventListener('icecandidate', function(event) {{
|
||||
if (event.candidate && event.candidate.candidate) {{
|
||||
// Mask local IP addresses
|
||||
event.candidate.candidate = event.candidate.candidate.replace(
|
||||
/([0-9]{{1,3}}\.){{3}}[0-9]{{1,3}}/g,
|
||||
function(match) {{
|
||||
if (match.startsWith('10.') ||
|
||||
match.startsWith('192.168.') ||
|
||||
match.startsWith('172.')) {{
|
||||
return localIPMask;
|
||||
}}
|
||||
return match;
|
||||
}}
|
||||
);
|
||||
}}
|
||||
}});
|
||||
|
||||
return pc;
|
||||
}};
|
||||
|
||||
// Copy static properties
|
||||
Object.keys(OriginalRTCPeerConnection).forEach(key => {{
|
||||
window.RTCPeerConnection[key] = OriginalRTCPeerConnection[key];
|
||||
}});
|
||||
}}
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_navigator_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate navigator properties override script."""
|
||||
nav = fingerprint.navigator_props
|
||||
languages_json = json.dumps(nav.languages) if nav.languages else '["en-US", "en"]'
|
||||
|
||||
return f'''
|
||||
(function() {{
|
||||
// Navigator overrides
|
||||
Object.defineProperty(navigator, 'platform', {{
|
||||
get: () => "{nav.platform}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'vendor', {{
|
||||
get: () => "{nav.vendor}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'vendorSub', {{
|
||||
get: () => "{nav.vendor_sub}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'product', {{
|
||||
get: () => "{nav.product}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'productSub', {{
|
||||
get: () => "{nav.product_sub}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'appName', {{
|
||||
get: () => "{nav.app_name}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'appVersion', {{
|
||||
get: () => "{nav.app_version}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'userAgent', {{
|
||||
get: () => "{nav.user_agent}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'language', {{
|
||||
get: () => "{nav.language}"
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'languages', {{
|
||||
get: () => {languages_json}
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'onLine', {{
|
||||
get: () => {str(nav.online).lower()}
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'doNotTrack', {{
|
||||
get: () => "{nav.do_not_track}"
|
||||
}});
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_hardware_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate hardware properties override script."""
|
||||
hw = fingerprint.hardware_config
|
||||
|
||||
return f'''
|
||||
(function() {{
|
||||
// Hardware overrides
|
||||
Object.defineProperty(navigator, 'hardwareConcurrency', {{
|
||||
get: () => {hw.hardware_concurrency}
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'deviceMemory', {{
|
||||
get: () => {hw.device_memory}
|
||||
}});
|
||||
|
||||
Object.defineProperty(navigator, 'maxTouchPoints', {{
|
||||
get: () => {hw.max_touch_points}
|
||||
}});
|
||||
|
||||
// Screen overrides
|
||||
Object.defineProperty(screen, 'width', {{
|
||||
get: () => {hw.screen_resolution[0]}
|
||||
}});
|
||||
|
||||
Object.defineProperty(screen, 'height', {{
|
||||
get: () => {hw.screen_resolution[1]}
|
||||
}});
|
||||
|
||||
Object.defineProperty(screen, 'availWidth', {{
|
||||
get: () => {hw.screen_resolution[0]}
|
||||
}});
|
||||
|
||||
Object.defineProperty(screen, 'availHeight', {{
|
||||
get: () => {hw.screen_resolution[1] - 40} // Taskbar
|
||||
}});
|
||||
|
||||
Object.defineProperty(screen, 'colorDepth', {{
|
||||
get: () => {hw.color_depth}
|
||||
}});
|
||||
|
||||
Object.defineProperty(screen, 'pixelDepth', {{
|
||||
get: () => {hw.color_depth}
|
||||
}});
|
||||
|
||||
Object.defineProperty(window, 'devicePixelRatio', {{
|
||||
get: () => {hw.pixel_ratio}
|
||||
}});
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_timezone_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate timezone override script."""
|
||||
return f'''
|
||||
(function() {{
|
||||
const timezone = "{fingerprint.timezone}";
|
||||
const timezoneOffset = {fingerprint.timezone_offset};
|
||||
|
||||
// Override Date.prototype.getTimezoneOffset
|
||||
Date.prototype.getTimezoneOffset = function() {{
|
||||
return timezoneOffset;
|
||||
}};
|
||||
|
||||
// Override Intl.DateTimeFormat
|
||||
const OriginalDateTimeFormat = Intl.DateTimeFormat;
|
||||
Intl.DateTimeFormat = function(...args) {{
|
||||
if (args.length === 0 || !args[1] || !args[1].timeZone) {{
|
||||
if (!args[1]) args[1] = {{}};
|
||||
args[1].timeZone = timezone;
|
||||
}}
|
||||
return new OriginalDateTimeFormat(...args);
|
||||
}};
|
||||
|
||||
// Copy static methods
|
||||
Object.keys(OriginalDateTimeFormat).forEach(key => {{
|
||||
Intl.DateTimeFormat[key] = OriginalDateTimeFormat[key];
|
||||
}});
|
||||
|
||||
// Override resolvedOptions
|
||||
Intl.DateTimeFormat.prototype.resolvedOptions = function() {{
|
||||
const options = OriginalDateTimeFormat.prototype.resolvedOptions.call(this);
|
||||
options.timeZone = timezone;
|
||||
return options;
|
||||
}};
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_audio_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate audio context override script."""
|
||||
return f'''
|
||||
(function() {{
|
||||
const audioParams = {{
|
||||
baseLatency: {fingerprint.audio_context_base_latency},
|
||||
outputLatency: {fingerprint.audio_context_output_latency},
|
||||
sampleRate: {fingerprint.audio_context_sample_rate}
|
||||
}};
|
||||
|
||||
// Override AudioContext
|
||||
const OriginalAudioContext = window.AudioContext || window.webkitAudioContext;
|
||||
|
||||
if (OriginalAudioContext) {{
|
||||
window.AudioContext = function(...args) {{
|
||||
const context = new OriginalAudioContext(...args);
|
||||
|
||||
Object.defineProperty(context, 'baseLatency', {{
|
||||
get: () => audioParams.baseLatency
|
||||
}});
|
||||
|
||||
Object.defineProperty(context, 'outputLatency', {{
|
||||
get: () => audioParams.outputLatency
|
||||
}});
|
||||
|
||||
Object.defineProperty(context, 'sampleRate', {{
|
||||
get: () => audioParams.sampleRate
|
||||
}});
|
||||
|
||||
return context;
|
||||
}};
|
||||
|
||||
// Copy static properties
|
||||
Object.keys(OriginalAudioContext).forEach(key => {{
|
||||
window.AudioContext[key] = OriginalAudioContext[key];
|
||||
}});
|
||||
}}
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_font_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate font detection override script."""
|
||||
fonts_json = json.dumps(fingerprint.font_list)
|
||||
|
||||
return f'''
|
||||
(function() {{
|
||||
const allowedFonts = {fonts_json};
|
||||
|
||||
// Override font detection methods
|
||||
const originalGetComputedStyle = window.getComputedStyle;
|
||||
window.getComputedStyle = function(element, pseudoElt) {{
|
||||
const style = originalGetComputedStyle.apply(this, arguments);
|
||||
const originalPropertyGetter = style.getPropertyValue;
|
||||
|
||||
style.getPropertyValue = function(prop) {{
|
||||
if (prop === 'font-family') {{
|
||||
const value = originalPropertyGetter.apply(this, arguments);
|
||||
// Filter out non-allowed fonts
|
||||
const fonts = value.split(',').map(f => f.trim());
|
||||
const filtered = fonts.filter(f => {{
|
||||
const fontName = f.replace(/['"]/g, '');
|
||||
return allowedFonts.some(allowed =>
|
||||
fontName.toLowerCase().includes(allowed.toLowerCase())
|
||||
);
|
||||
}});
|
||||
return filtered.join(', ');
|
||||
}}
|
||||
return originalPropertyGetter.apply(this, arguments);
|
||||
}};
|
||||
|
||||
return style;
|
||||
}};
|
||||
}})();
|
||||
'''
|
||||
|
||||
def _generate_plugin_script(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Generate plugin list override script."""
|
||||
plugins_data = []
|
||||
for plugin in fingerprint.plugins:
|
||||
plugins_data.append({
|
||||
"name": plugin.get("name", ""),
|
||||
"filename": plugin.get("filename", ""),
|
||||
"description": plugin.get("description", ""),
|
||||
"version": plugin.get("version", "")
|
||||
})
|
||||
|
||||
plugins_json = json.dumps(plugins_data)
|
||||
|
||||
return f'''
|
||||
(function() {{
|
||||
const pluginData = {plugins_json};
|
||||
|
||||
// Create fake PluginArray
|
||||
const fakePlugins = {{}};
|
||||
fakePlugins.length = pluginData.length;
|
||||
|
||||
pluginData.forEach((plugin, index) => {{
|
||||
const fakePlugin = {{
|
||||
name: plugin.name,
|
||||
filename: plugin.filename,
|
||||
description: plugin.description,
|
||||
version: plugin.version,
|
||||
length: 1,
|
||||
item: function(index) {{ return this; }},
|
||||
namedItem: function(name) {{ return this; }}
|
||||
}};
|
||||
|
||||
fakePlugins[index] = fakePlugin;
|
||||
fakePlugins[plugin.name] = fakePlugin;
|
||||
}});
|
||||
|
||||
fakePlugins.item = function(index) {{
|
||||
return this[index] || null;
|
||||
}};
|
||||
|
||||
fakePlugins.namedItem = function(name) {{
|
||||
return this[name] || null;
|
||||
}};
|
||||
|
||||
fakePlugins.refresh = function() {{}};
|
||||
|
||||
// Override navigator.plugins
|
||||
Object.defineProperty(navigator, 'plugins', {{
|
||||
get: () => fakePlugins
|
||||
}});
|
||||
}})();
|
||||
'''
|
||||
|
||||
def apply_to_browser_context(self, context: Any, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Apply fingerprint to a Playwright browser context."""
|
||||
# Generate all scripts
|
||||
scripts = self.generate_fingerprint_scripts(fingerprint)
|
||||
|
||||
# Combine all scripts
|
||||
combined_script = '\n'.join(scripts.values())
|
||||
|
||||
# Add script to context
|
||||
context.add_init_script(combined_script)
|
||||
|
||||
# Set viewport
|
||||
context.set_viewport_size({
|
||||
'width': fingerprint.hardware_config.screen_resolution[0],
|
||||
'height': fingerprint.hardware_config.screen_resolution[1]
|
||||
})
|
||||
|
||||
# Set locale
|
||||
context.set_locale(fingerprint.navigator_props.language)
|
||||
|
||||
# Set timezone
|
||||
context.set_timezone_id(fingerprint.timezone)
|
||||
|
||||
# Set user agent
|
||||
context.set_user_agent(fingerprint.navigator_props.user_agent)
|
||||
243
infrastructure/services/fingerprint/fingerprint_generator_service.py
Normale Datei
243
infrastructure/services/fingerprint/fingerprint_generator_service.py
Normale Datei
@ -0,0 +1,243 @@
|
||||
"""
|
||||
Fingerprint Generator Service - Core fingerprint generation logic.
|
||||
"""
|
||||
|
||||
import random
|
||||
import uuid
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
|
||||
from domain.entities.browser_fingerprint import (
|
||||
BrowserFingerprint, CanvasNoise, WebRTCConfig,
|
||||
HardwareConfig, NavigatorProperties, StaticComponents
|
||||
)
|
||||
from .fingerprint_profile_service import FingerprintProfileService
|
||||
from .timezone_location_service import TimezoneLocationService
|
||||
|
||||
|
||||
class FingerprintGeneratorService:
|
||||
"""Service for generating browser fingerprints."""
|
||||
|
||||
def __init__(self,
|
||||
profile_service: Optional[FingerprintProfileService] = None,
|
||||
timezone_service: Optional[TimezoneLocationService] = None):
|
||||
self.profile_service = profile_service or FingerprintProfileService()
|
||||
self.timezone_service = timezone_service or TimezoneLocationService()
|
||||
|
||||
def generate_fingerprint(self,
|
||||
profile_type: Optional[str] = None,
|
||||
platform: Optional[str] = None,
|
||||
proxy_location: Optional[str] = None,
|
||||
account_id: Optional[str] = None) -> BrowserFingerprint:
|
||||
"""Generate a new browser fingerprint."""
|
||||
|
||||
# Get base profile
|
||||
profile = self.profile_service.get_profile(profile_type)
|
||||
|
||||
# Get location data
|
||||
location_data = self.timezone_service.get_consistent_location_data(proxy_location)
|
||||
|
||||
# Generate components
|
||||
fingerprint_id = str(uuid.uuid4())
|
||||
hardware_config = self._generate_hardware_config(profile)
|
||||
navigator_props = self._generate_navigator_properties(profile, location_data)
|
||||
canvas_noise = self._generate_canvas_noise()
|
||||
webrtc_config = self._generate_webrtc_config(profile["platform"])
|
||||
webgl_info = self._generate_webgl_info(profile)
|
||||
audio_context = self._generate_audio_context(profile_type)
|
||||
fonts = self.profile_service.get_fonts_for_platform(profile["platform"])
|
||||
plugins = self.profile_service.get_plugin_list(profile["platform"])
|
||||
|
||||
# Generate static components if account-bound
|
||||
static_components = None
|
||||
rotation_seed = None
|
||||
if account_id:
|
||||
static_components = self._generate_static_components(profile, location_data)
|
||||
rotation_seed = random.randint(1000000, 9999999)
|
||||
|
||||
# Platform-specific config
|
||||
platform_config = self._generate_platform_specific_config(platform, profile)
|
||||
|
||||
return BrowserFingerprint(
|
||||
fingerprint_id=fingerprint_id,
|
||||
canvas_noise=canvas_noise,
|
||||
webrtc_config=webrtc_config,
|
||||
font_list=fonts,
|
||||
hardware_config=hardware_config,
|
||||
navigator_props=navigator_props,
|
||||
webgl_vendor=webgl_info["vendor"],
|
||||
webgl_renderer=webgl_info["renderer"],
|
||||
audio_context_base_latency=audio_context["base_latency"],
|
||||
audio_context_output_latency=audio_context["output_latency"],
|
||||
audio_context_sample_rate=audio_context["sample_rate"],
|
||||
timezone=location_data["timezone"],
|
||||
timezone_offset=location_data["timezone_offset"],
|
||||
plugins=plugins,
|
||||
created_at=datetime.now(),
|
||||
last_rotated=datetime.now(),
|
||||
static_components=static_components,
|
||||
rotation_seed=rotation_seed,
|
||||
account_bound=bool(account_id),
|
||||
platform_specific_config=platform_config
|
||||
)
|
||||
|
||||
def _generate_hardware_config(self, profile: Dict[str, Any]) -> HardwareConfig:
|
||||
"""Generate hardware configuration."""
|
||||
return HardwareConfig(
|
||||
hardware_concurrency=random.choice(profile["hardware_concurrency"]),
|
||||
device_memory=random.choice(profile["device_memory"]),
|
||||
max_touch_points=10 if "mobile" in profile["name"].lower() else 0,
|
||||
screen_resolution=random.choice(profile["screen_resolution"]),
|
||||
color_depth=random.choice([24, 32]),
|
||||
pixel_ratio=random.choice([1.0, 1.5, 2.0, 3.0])
|
||||
)
|
||||
|
||||
def _generate_navigator_properties(self, profile: Dict[str, Any],
|
||||
location_data: Dict[str, Any]) -> NavigatorProperties:
|
||||
"""Generate navigator properties."""
|
||||
ua_components = self.profile_service.get_user_agent_components(profile["platform"])
|
||||
|
||||
# Build user agent
|
||||
if "Chrome" in ua_components["browser"]:
|
||||
user_agent = f"Mozilla/5.0 ({ua_components['os']}) {ua_components['engine']} {ua_components['browser']} Safari/537.36"
|
||||
else:
|
||||
user_agent = f"Mozilla/5.0 ({ua_components['os']}) {ua_components['engine']} {ua_components['browser']}"
|
||||
|
||||
return NavigatorProperties(
|
||||
platform=profile["platform"],
|
||||
vendor=profile["vendor"],
|
||||
vendor_sub="",
|
||||
product="Gecko",
|
||||
product_sub="20030107",
|
||||
app_name="Netscape",
|
||||
app_version="5.0 ({})".format(ua_components["os"]),
|
||||
user_agent=user_agent,
|
||||
language=location_data["language"],
|
||||
languages=location_data["languages"],
|
||||
online=True,
|
||||
do_not_track=random.choice(["1", "unspecified"])
|
||||
)
|
||||
|
||||
def _generate_canvas_noise(self) -> CanvasNoise:
|
||||
"""Generate canvas noise configuration."""
|
||||
config = self.profile_service.get_canvas_noise_config()
|
||||
return CanvasNoise(
|
||||
noise_level=config["noise_level"],
|
||||
seed=random.randint(1000, 99999),
|
||||
algorithm=config["algorithm"]
|
||||
)
|
||||
|
||||
def _generate_webrtc_config(self, platform: str) -> WebRTCConfig:
|
||||
"""Generate WebRTC configuration."""
|
||||
config = self.profile_service.get_webrtc_config(platform)
|
||||
return WebRTCConfig(
|
||||
enabled=config["enabled"],
|
||||
ice_servers=config["ice_servers"],
|
||||
local_ip_mask=config["local_ip_mask"],
|
||||
disable_webrtc=config["disable_webrtc"]
|
||||
)
|
||||
|
||||
def _generate_webgl_info(self, profile: Dict[str, Any]) -> Dict[str, str]:
|
||||
"""Generate WebGL vendor and renderer."""
|
||||
renderer = random.choice(profile["renderer"])
|
||||
|
||||
# Ensure vendor matches renderer
|
||||
if "Intel" in renderer:
|
||||
vendor = "Intel Inc."
|
||||
elif "NVIDIA" in renderer or "GeForce" in renderer:
|
||||
vendor = "NVIDIA Corporation"
|
||||
elif "AMD" in renderer or "Radeon" in renderer:
|
||||
vendor = "AMD"
|
||||
elif "Apple" in renderer:
|
||||
vendor = "Apple Inc."
|
||||
else:
|
||||
vendor = profile["vendor"]
|
||||
|
||||
return {
|
||||
"vendor": vendor,
|
||||
"renderer": renderer
|
||||
}
|
||||
|
||||
def _generate_audio_context(self, profile_type: Optional[str]) -> Dict[str, Any]:
|
||||
"""Generate audio context parameters."""
|
||||
audio_type = "mobile" if profile_type == "mobile" else "default"
|
||||
base_config = self.profile_service.get_audio_context(audio_type)
|
||||
|
||||
# Add slight variations
|
||||
return {
|
||||
"base_latency": base_config["base_latency"] + random.uniform(-0.001, 0.001),
|
||||
"output_latency": base_config["output_latency"] + random.uniform(-0.002, 0.002),
|
||||
"sample_rate": base_config["sample_rate"]
|
||||
}
|
||||
|
||||
def _generate_static_components(self, profile: Dict[str, Any],
|
||||
location_data: Dict[str, Any]) -> StaticComponents:
|
||||
"""Generate static components for account-bound fingerprints."""
|
||||
# Determine device type
|
||||
if "mobile" in profile["name"].lower():
|
||||
device_type = "mobile"
|
||||
elif "tablet" in profile["name"].lower():
|
||||
device_type = "tablet"
|
||||
else:
|
||||
device_type = "desktop"
|
||||
|
||||
# Determine OS family
|
||||
if "Win" in profile["platform"]:
|
||||
os_family = "windows"
|
||||
elif "Mac" in profile["platform"] or "iPhone" in profile["platform"]:
|
||||
os_family = "macos"
|
||||
elif "Android" in profile["platform"] or "Linux" in profile["platform"]:
|
||||
os_family = "linux"
|
||||
else:
|
||||
os_family = "other"
|
||||
|
||||
# Determine browser family
|
||||
browser_family = "chromium" # Most common
|
||||
|
||||
# Select base fonts (these won't change during rotation)
|
||||
all_fonts = self.profile_service.get_fonts_for_platform(profile["platform"])
|
||||
base_fonts = random.sample(all_fonts, min(10, len(all_fonts)))
|
||||
|
||||
return StaticComponents(
|
||||
device_type=device_type,
|
||||
os_family=os_family,
|
||||
browser_family=browser_family,
|
||||
gpu_vendor=profile["vendor"],
|
||||
gpu_model=profile["renderer"][0] if profile["renderer"] else "Unknown",
|
||||
cpu_architecture="x86_64" if device_type == "desktop" else "arm64",
|
||||
base_fonts=base_fonts,
|
||||
base_resolution=profile["screen_resolution"][0],
|
||||
base_timezone=location_data["timezone"]
|
||||
)
|
||||
|
||||
def _generate_platform_specific_config(self, platform: Optional[str],
|
||||
profile: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Generate platform-specific configuration."""
|
||||
config = {
|
||||
"platform": platform or "unknown",
|
||||
"profile_name": profile["name"],
|
||||
"protection_level": "standard"
|
||||
}
|
||||
|
||||
if platform == "instagram":
|
||||
config.update({
|
||||
"app_id": "936619743392459",
|
||||
"ajax_id": "1234567890",
|
||||
"ig_did": str(uuid.uuid4()).upper(),
|
||||
"claim": "0"
|
||||
})
|
||||
elif platform == "facebook":
|
||||
config.update({
|
||||
"fb_api_version": "v18.0",
|
||||
"fb_app_id": "256281040558",
|
||||
"fb_locale": "en_US"
|
||||
})
|
||||
elif platform == "tiktok":
|
||||
config.update({
|
||||
"tt_webid": str(random.randint(10**18, 10**19)),
|
||||
"tt_csrf_token": str(uuid.uuid4()),
|
||||
"browser_name": "chrome",
|
||||
"browser_version": "120"
|
||||
})
|
||||
|
||||
return config
|
||||
259
infrastructure/services/fingerprint/fingerprint_persistence_service.py
Normale Datei
259
infrastructure/services/fingerprint/fingerprint_persistence_service.py
Normale Datei
@ -0,0 +1,259 @@
|
||||
"""
|
||||
Fingerprint Persistence Service - Handles fingerprint storage and retrieval.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
from domain.repositories.fingerprint_repository import IFingerprintRepository
|
||||
|
||||
logger = logging.getLogger("fingerprint_persistence_service")
|
||||
|
||||
|
||||
class FingerprintPersistenceService:
|
||||
"""Service for fingerprint persistence operations."""
|
||||
|
||||
def __init__(self, repository: IFingerprintRepository):
|
||||
self.repository = repository
|
||||
self._cache = {} # Simple in-memory cache
|
||||
self._cache_ttl = 300 # 5 minutes
|
||||
|
||||
def save_fingerprint(self, fingerprint: BrowserFingerprint) -> str:
|
||||
"""Save a fingerprint and return its ID."""
|
||||
try:
|
||||
fingerprint_id = self.repository.save(fingerprint)
|
||||
|
||||
# Update cache
|
||||
self._cache[fingerprint_id] = {
|
||||
'fingerprint': fingerprint,
|
||||
'timestamp': datetime.now()
|
||||
}
|
||||
|
||||
logger.info(f"Saved fingerprint: {fingerprint_id}")
|
||||
return fingerprint_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save fingerprint: {e}")
|
||||
raise
|
||||
|
||||
def load_fingerprint(self, fingerprint_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Load a fingerprint by ID."""
|
||||
# Check cache first
|
||||
if fingerprint_id in self._cache:
|
||||
cache_entry = self._cache[fingerprint_id]
|
||||
if (datetime.now() - cache_entry['timestamp']).seconds < self._cache_ttl:
|
||||
logger.debug(f"Loaded fingerprint from cache: {fingerprint_id}")
|
||||
return cache_entry['fingerprint']
|
||||
|
||||
# Load from repository
|
||||
try:
|
||||
fingerprint = self.repository.find_by_id(fingerprint_id)
|
||||
|
||||
if fingerprint:
|
||||
# Update cache
|
||||
self._cache[fingerprint_id] = {
|
||||
'fingerprint': fingerprint,
|
||||
'timestamp': datetime.now()
|
||||
}
|
||||
logger.info(f"Loaded fingerprint from repository: {fingerprint_id}")
|
||||
else:
|
||||
logger.warning(f"Fingerprint not found: {fingerprint_id}")
|
||||
|
||||
return fingerprint
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load fingerprint {fingerprint_id}: {e}")
|
||||
return None
|
||||
|
||||
def load_fingerprint_for_account(self, account_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Load fingerprint associated with an account."""
|
||||
try:
|
||||
fingerprint = self.repository.find_by_account_id(account_id)
|
||||
|
||||
if fingerprint:
|
||||
# Update cache
|
||||
self._cache[fingerprint.fingerprint_id] = {
|
||||
'fingerprint': fingerprint,
|
||||
'timestamp': datetime.now()
|
||||
}
|
||||
logger.info(f"Loaded fingerprint for account {account_id}")
|
||||
else:
|
||||
logger.warning(f"No fingerprint found for account {account_id}")
|
||||
|
||||
return fingerprint
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load fingerprint for account {account_id}: {e}")
|
||||
return None
|
||||
|
||||
def update_fingerprint(self, fingerprint: BrowserFingerprint) -> bool:
|
||||
"""Update an existing fingerprint."""
|
||||
try:
|
||||
success = self.repository.update(fingerprint)
|
||||
|
||||
if success:
|
||||
# Update cache
|
||||
self._cache[fingerprint.fingerprint_id] = {
|
||||
'fingerprint': fingerprint,
|
||||
'timestamp': datetime.now()
|
||||
}
|
||||
logger.info(f"Updated fingerprint: {fingerprint.fingerprint_id}")
|
||||
else:
|
||||
logger.warning(f"Failed to update fingerprint: {fingerprint.fingerprint_id}")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update fingerprint {fingerprint.fingerprint_id}: {e}")
|
||||
return False
|
||||
|
||||
def delete_fingerprint(self, fingerprint_id: str) -> bool:
|
||||
"""Delete a fingerprint."""
|
||||
try:
|
||||
success = self.repository.delete(fingerprint_id)
|
||||
|
||||
if success:
|
||||
# Remove from cache
|
||||
self._cache.pop(fingerprint_id, None)
|
||||
logger.info(f"Deleted fingerprint: {fingerprint_id}")
|
||||
else:
|
||||
logger.warning(f"Failed to delete fingerprint: {fingerprint_id}")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete fingerprint {fingerprint_id}: {e}")
|
||||
return False
|
||||
|
||||
def list_fingerprints(self, limit: int = 100) -> List[BrowserFingerprint]:
|
||||
"""List all fingerprints."""
|
||||
try:
|
||||
fingerprints = self.repository.find_all()
|
||||
|
||||
# Limit results
|
||||
if len(fingerprints) > limit:
|
||||
fingerprints = fingerprints[:limit]
|
||||
|
||||
logger.info(f"Listed {len(fingerprints)} fingerprints")
|
||||
return fingerprints
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list fingerprints: {e}")
|
||||
return []
|
||||
|
||||
def list_recent_fingerprints(self, limit: int = 10) -> List[BrowserFingerprint]:
|
||||
"""List recently created fingerprints."""
|
||||
try:
|
||||
fingerprints = self.repository.find_recent(limit)
|
||||
logger.info(f"Listed {len(fingerprints)} recent fingerprints")
|
||||
return fingerprints
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list recent fingerprints: {e}")
|
||||
return []
|
||||
|
||||
def list_fingerprints_by_platform(self, platform: str) -> List[BrowserFingerprint]:
|
||||
"""List fingerprints for a specific platform."""
|
||||
try:
|
||||
fingerprints = self.repository.find_by_platform(platform)
|
||||
logger.info(f"Listed {len(fingerprints)} fingerprints for platform {platform}")
|
||||
return fingerprints
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list fingerprints for platform {platform}: {e}")
|
||||
return []
|
||||
|
||||
def get_fingerprint_pool(self, size: int = 10) -> List[BrowserFingerprint]:
|
||||
"""Get a pool of random fingerprints."""
|
||||
try:
|
||||
# Get more than needed to filter
|
||||
candidates = self.repository.find_recent(size * 3)
|
||||
|
||||
# Filter for quality
|
||||
quality_fingerprints = []
|
||||
for fp in candidates:
|
||||
# Skip if too old
|
||||
if fp.created_at:
|
||||
age_days = (datetime.now() - fp.created_at).days
|
||||
if age_days > 30:
|
||||
continue
|
||||
|
||||
# Skip if account-bound
|
||||
if fp.account_bound:
|
||||
continue
|
||||
|
||||
quality_fingerprints.append(fp)
|
||||
|
||||
if len(quality_fingerprints) >= size:
|
||||
break
|
||||
|
||||
logger.info(f"Created fingerprint pool of size {len(quality_fingerprints)}")
|
||||
return quality_fingerprints
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create fingerprint pool: {e}")
|
||||
return []
|
||||
|
||||
def cleanup_old_fingerprints(self, days_to_keep: int = 90) -> int:
|
||||
"""Clean up fingerprints older than specified days."""
|
||||
try:
|
||||
# Calculate cutoff date
|
||||
cutoff = datetime.now() - timedelta(days=days_to_keep)
|
||||
|
||||
# Get all fingerprints to check
|
||||
all_fingerprints = self.repository.find_all()
|
||||
deleted_count = 0
|
||||
|
||||
for fp in all_fingerprints:
|
||||
if fp.created_at and fp.created_at < cutoff:
|
||||
# Skip if account-bound
|
||||
if fp.account_bound:
|
||||
continue
|
||||
|
||||
if self.repository.delete(fp.fingerprint_id):
|
||||
deleted_count += 1
|
||||
# Remove from cache
|
||||
self._cache.pop(fp.fingerprint_id, None)
|
||||
|
||||
logger.info(f"Cleaned up {deleted_count} old fingerprints")
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup old fingerprints: {e}")
|
||||
return 0
|
||||
|
||||
def clear_cache(self) -> None:
|
||||
"""Clear the in-memory cache."""
|
||||
self._cache.clear()
|
||||
logger.info("Cleared fingerprint cache")
|
||||
|
||||
def get_statistics(self) -> Dict[str, Any]:
|
||||
"""Get fingerprint statistics."""
|
||||
try:
|
||||
total = self.repository.count()
|
||||
recent = len(self.repository.find_recent(100))
|
||||
|
||||
# Platform breakdown
|
||||
platforms = {}
|
||||
for platform in ['instagram', 'facebook', 'tiktok', 'twitter']:
|
||||
count = len(self.repository.find_by_platform(platform))
|
||||
if count > 0:
|
||||
platforms[platform] = count
|
||||
|
||||
return {
|
||||
'total_fingerprints': total,
|
||||
'recent_fingerprints': recent,
|
||||
'platforms': platforms,
|
||||
'cache_size': len(self._cache)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get statistics: {e}")
|
||||
return {
|
||||
'total_fingerprints': 0,
|
||||
'recent_fingerprints': 0,
|
||||
'platforms': {},
|
||||
'cache_size': len(self._cache)
|
||||
}
|
||||
182
infrastructure/services/fingerprint/fingerprint_profile_service.py
Normale Datei
182
infrastructure/services/fingerprint/fingerprint_profile_service.py
Normale Datei
@ -0,0 +1,182 @@
|
||||
"""
|
||||
Fingerprint Profile Service - Manages predefined fingerprint profiles and configurations.
|
||||
"""
|
||||
|
||||
import random
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
|
||||
|
||||
class FingerprintProfileService:
|
||||
"""Service for managing fingerprint profiles and configurations."""
|
||||
|
||||
DESKTOP_PROFILES = [
|
||||
{
|
||||
"name": "Windows Chrome User",
|
||||
"platform": "Win32",
|
||||
"hardware_concurrency": [4, 8, 16],
|
||||
"device_memory": [4, 8, 16],
|
||||
"screen_resolution": [(1920, 1080), (2560, 1440), (1366, 768)],
|
||||
"vendor": "Google Inc.",
|
||||
"renderer": ["ANGLE (Intel HD Graphics)", "ANGLE (NVIDIA GeForce GTX)", "ANGLE (AMD Radeon)"]
|
||||
},
|
||||
{
|
||||
"name": "MacOS Safari User",
|
||||
"platform": "MacIntel",
|
||||
"hardware_concurrency": [4, 8, 12],
|
||||
"device_memory": [8, 16, 32],
|
||||
"screen_resolution": [(1440, 900), (2560, 1600), (5120, 2880)],
|
||||
"vendor": "Apple Inc.",
|
||||
"renderer": ["Apple M1", "Intel Iris", "AMD Radeon Pro"]
|
||||
}
|
||||
]
|
||||
|
||||
MOBILE_PROFILES = [
|
||||
{
|
||||
"name": "Android Chrome",
|
||||
"platform": "Linux armv8l",
|
||||
"hardware_concurrency": [4, 6, 8],
|
||||
"device_memory": [3, 4, 6, 8],
|
||||
"screen_resolution": [(360, 740), (375, 812), (414, 896)],
|
||||
"vendor": "Google Inc.",
|
||||
"renderer": ["Adreno", "Mali", "PowerVR"]
|
||||
},
|
||||
{
|
||||
"name": "iOS Safari",
|
||||
"platform": "iPhone",
|
||||
"hardware_concurrency": [2, 4, 6],
|
||||
"device_memory": [2, 3, 4],
|
||||
"screen_resolution": [(375, 667), (375, 812), (414, 896)],
|
||||
"vendor": "Apple Inc.",
|
||||
"renderer": ["Apple GPU"]
|
||||
}
|
||||
]
|
||||
|
||||
COMMON_FONTS = {
|
||||
"windows": [
|
||||
"Arial", "Arial Black", "Comic Sans MS", "Courier New",
|
||||
"Georgia", "Impact", "Times New Roman", "Trebuchet MS",
|
||||
"Verdana", "Webdings", "Wingdings", "Calibri", "Cambria",
|
||||
"Consolas", "Segoe UI", "Tahoma"
|
||||
],
|
||||
"mac": [
|
||||
"Arial", "Arial Black", "Comic Sans MS", "Courier New",
|
||||
"Georgia", "Helvetica", "Helvetica Neue", "Times New Roman",
|
||||
"Trebuchet MS", "Verdana", "American Typewriter", "Avenir",
|
||||
"Baskerville", "Big Caslon", "Futura", "Geneva", "Gill Sans"
|
||||
],
|
||||
"linux": [
|
||||
"Arial", "Courier New", "Times New Roman", "DejaVu Sans",
|
||||
"DejaVu Serif", "DejaVu Sans Mono", "Liberation Sans",
|
||||
"Liberation Serif", "Ubuntu", "Droid Sans", "Noto Sans"
|
||||
]
|
||||
}
|
||||
|
||||
AUDIO_CONTEXTS = {
|
||||
"default": {
|
||||
"base_latency": 0.01,
|
||||
"output_latency": 0.02,
|
||||
"sample_rate": 48000
|
||||
},
|
||||
"high_quality": {
|
||||
"base_latency": 0.005,
|
||||
"output_latency": 0.01,
|
||||
"sample_rate": 96000
|
||||
},
|
||||
"mobile": {
|
||||
"base_latency": 0.02,
|
||||
"output_latency": 0.04,
|
||||
"sample_rate": 44100
|
||||
}
|
||||
}
|
||||
|
||||
def get_profile(self, profile_type: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Get a fingerprint profile based on type."""
|
||||
if profile_type == "mobile":
|
||||
return random.choice(self.MOBILE_PROFILES)
|
||||
else:
|
||||
return random.choice(self.DESKTOP_PROFILES)
|
||||
|
||||
def get_fonts_for_platform(self, platform: str) -> List[str]:
|
||||
"""Get common fonts for a specific platform."""
|
||||
if "Win" in platform:
|
||||
base_fonts = self.COMMON_FONTS["windows"]
|
||||
elif "Mac" in platform or "iPhone" in platform:
|
||||
base_fonts = self.COMMON_FONTS["mac"]
|
||||
else:
|
||||
base_fonts = self.COMMON_FONTS["linux"]
|
||||
|
||||
# Randomly select 80-95% of fonts to add variation
|
||||
num_fonts = random.randint(int(len(base_fonts) * 0.8), int(len(base_fonts) * 0.95))
|
||||
return random.sample(base_fonts, num_fonts)
|
||||
|
||||
def get_audio_context(self, profile_type: str = "default") -> Dict[str, Any]:
|
||||
"""Get audio context configuration."""
|
||||
return self.AUDIO_CONTEXTS.get(profile_type, self.AUDIO_CONTEXTS["default"])
|
||||
|
||||
def get_user_agent_components(self, platform: str) -> Dict[str, str]:
|
||||
"""Get user agent components for a platform."""
|
||||
components = {
|
||||
"Win32": {
|
||||
"os": "Windows NT 10.0; Win64; x64",
|
||||
"browser": "Chrome/120.0.0.0",
|
||||
"engine": "AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
},
|
||||
"MacIntel": {
|
||||
"os": "Macintosh; Intel Mac OS X 10_15_7",
|
||||
"browser": "Chrome/120.0.0.0",
|
||||
"engine": "AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
},
|
||||
"Linux armv8l": {
|
||||
"os": "Linux; Android 13",
|
||||
"browser": "Chrome/120.0.0.0 Mobile",
|
||||
"engine": "AppleWebKit/537.36 (KHTML, like Gecko)"
|
||||
},
|
||||
"iPhone": {
|
||||
"os": "iPhone; CPU iPhone OS 17_0 like Mac OS X",
|
||||
"browser": "Version/17.0 Mobile/15E148",
|
||||
"engine": "AppleWebKit/605.1.15 (KHTML, like Gecko)"
|
||||
}
|
||||
}
|
||||
return components.get(platform, components["Win32"])
|
||||
|
||||
def get_canvas_noise_config(self, profile_type: str = "default") -> Dict[str, Any]:
|
||||
"""Get canvas noise configuration."""
|
||||
configs = {
|
||||
"default": {"noise_level": 0.02, "algorithm": "gaussian"},
|
||||
"aggressive": {"noise_level": 0.05, "algorithm": "perlin"},
|
||||
"minimal": {"noise_level": 0.01, "algorithm": "uniform"}
|
||||
}
|
||||
return configs.get(profile_type, configs["default"])
|
||||
|
||||
def get_webrtc_config(self, platform: str) -> Dict[str, Any]:
|
||||
"""Get WebRTC configuration for platform."""
|
||||
if "mobile" in platform.lower() or "android" in platform.lower() or "iphone" in platform.lower():
|
||||
return {
|
||||
"enabled": True,
|
||||
"ice_servers": ["stun:stun.l.google.com:19302"],
|
||||
"local_ip_mask": "192.168.1.x",
|
||||
"disable_webrtc": False
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"enabled": True,
|
||||
"ice_servers": ["stun:stun.l.google.com:19302", "stun:stun1.l.google.com:19302"],
|
||||
"local_ip_mask": "10.0.0.x",
|
||||
"disable_webrtc": False
|
||||
}
|
||||
|
||||
def get_plugin_list(self, platform: str) -> List[Dict[str, str]]:
|
||||
"""Get plugin list for platform."""
|
||||
if "Win" in platform:
|
||||
return [
|
||||
{"name": "Chrome PDF Plugin", "filename": "internal-pdf-viewer"},
|
||||
{"name": "Chrome PDF Viewer", "filename": "mhjfbmdgcfjbbpaeojofohoefgiehjai"},
|
||||
{"name": "Native Client", "filename": "internal-nacl-plugin"}
|
||||
]
|
||||
elif "Mac" in platform:
|
||||
return [
|
||||
{"name": "Chrome PDF Plugin", "filename": "internal-pdf-viewer"},
|
||||
{"name": "Chrome PDF Viewer", "filename": "mhjfbmdgcfjbbpaeojofohoefgiehjai"}
|
||||
]
|
||||
else:
|
||||
return []
|
||||
356
infrastructure/services/fingerprint/fingerprint_rotation_service.py
Normale Datei
356
infrastructure/services/fingerprint/fingerprint_rotation_service.py
Normale Datei
@ -0,0 +1,356 @@
|
||||
"""
|
||||
Fingerprint Rotation Service - Handles fingerprint rotation and modification.
|
||||
"""
|
||||
|
||||
import random
|
||||
import copy
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
|
||||
from domain.entities.browser_fingerprint import (
|
||||
BrowserFingerprint, CanvasNoise, WebRTCConfig,
|
||||
HardwareConfig, NavigatorProperties
|
||||
)
|
||||
|
||||
|
||||
class RotationStrategy(Enum):
|
||||
"""Fingerprint rotation strategies."""
|
||||
MINIMAL = "minimal" # Only rotate most volatile attributes
|
||||
GRADUAL = "gradual" # Gradual changes over time
|
||||
COMPLETE = "complete" # Complete regeneration (new device)
|
||||
|
||||
|
||||
class FingerprintRotationService:
|
||||
"""Service for rotating and modifying fingerprints."""
|
||||
|
||||
def __init__(self):
|
||||
self.rotation_history = {} # Track rotation history per fingerprint
|
||||
|
||||
def rotate_fingerprint(self,
|
||||
fingerprint: BrowserFingerprint,
|
||||
strategy: RotationStrategy = RotationStrategy.MINIMAL) -> BrowserFingerprint:
|
||||
"""Rotate a fingerprint based on the specified strategy."""
|
||||
|
||||
# Create a deep copy to avoid modifying the original
|
||||
rotated = self._deep_copy_fingerprint(fingerprint)
|
||||
|
||||
# Track rotation
|
||||
self._track_rotation(fingerprint.fingerprint_id, strategy)
|
||||
|
||||
# Apply rotation based on strategy
|
||||
if strategy == RotationStrategy.MINIMAL:
|
||||
self._apply_minimal_rotation(rotated)
|
||||
elif strategy == RotationStrategy.GRADUAL:
|
||||
self._apply_gradual_rotation(rotated)
|
||||
elif strategy == RotationStrategy.COMPLETE:
|
||||
self._apply_complete_rotation(rotated)
|
||||
|
||||
# Update rotation timestamp
|
||||
rotated.last_rotated = datetime.now()
|
||||
|
||||
return rotated
|
||||
|
||||
def _apply_minimal_rotation(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Apply minimal rotation - only most volatile attributes."""
|
||||
|
||||
# Rotate canvas noise seed (most commonly changed)
|
||||
fingerprint.canvas_noise.seed = random.randint(1000, 99999)
|
||||
|
||||
# Slight audio context variations
|
||||
fingerprint.audio_context_base_latency += random.uniform(-0.001, 0.001)
|
||||
fingerprint.audio_context_output_latency += random.uniform(-0.001, 0.001)
|
||||
|
||||
# Update timezone offset if DST might have changed
|
||||
if self._should_update_dst(fingerprint):
|
||||
fingerprint.timezone_offset += 60 if fingerprint.timezone_offset > 0 else -60
|
||||
|
||||
# Minor font list changes (add/remove 1-2 fonts)
|
||||
self._rotate_fonts_minimal(fingerprint)
|
||||
|
||||
def _apply_gradual_rotation(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Apply gradual rotation - simulate natural changes over time."""
|
||||
|
||||
# All minimal changes
|
||||
self._apply_minimal_rotation(fingerprint)
|
||||
|
||||
# WebGL renderer might get driver updates
|
||||
if random.random() < 0.3:
|
||||
self._update_webgl_version(fingerprint)
|
||||
|
||||
# Browser version update
|
||||
if random.random() < 0.4:
|
||||
self._update_browser_version(fingerprint)
|
||||
|
||||
# Screen resolution might change (external monitor)
|
||||
if random.random() < 0.1:
|
||||
self._rotate_screen_resolution(fingerprint)
|
||||
|
||||
# More significant font changes
|
||||
self._rotate_fonts_gradual(fingerprint)
|
||||
|
||||
def _apply_complete_rotation(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Apply complete rotation - simulate device change."""
|
||||
|
||||
# Keep only static components if account-bound
|
||||
if fingerprint.account_bound and fingerprint.static_components:
|
||||
# Maintain same device class but change specifics
|
||||
self._rotate_within_device_class(fingerprint)
|
||||
else:
|
||||
# Complete change - new device simulation
|
||||
self._rotate_to_new_device(fingerprint)
|
||||
|
||||
# New canvas noise configuration
|
||||
fingerprint.canvas_noise = CanvasNoise(
|
||||
noise_level=random.choice([0.01, 0.02, 0.03]),
|
||||
seed=random.randint(1000, 99999),
|
||||
algorithm=random.choice(["gaussian", "uniform"])
|
||||
)
|
||||
|
||||
# New audio context
|
||||
fingerprint.audio_context_base_latency = random.uniform(0.005, 0.02)
|
||||
fingerprint.audio_context_output_latency = random.uniform(0.01, 0.04)
|
||||
|
||||
# Complete font list regeneration
|
||||
self._rotate_fonts_complete(fingerprint)
|
||||
|
||||
def _rotate_fonts_minimal(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Minimal font rotation - add/remove 1-2 fonts."""
|
||||
current_fonts = fingerprint.font_list.copy()
|
||||
|
||||
# Remove 1-2 random fonts
|
||||
if len(current_fonts) > 10:
|
||||
for _ in range(random.randint(0, 2)):
|
||||
if current_fonts:
|
||||
current_fonts.remove(random.choice(current_fonts))
|
||||
|
||||
# Add 1-2 new fonts
|
||||
possible_additions = ["Segoe UI Light", "Segoe UI Semibold", "Arial Narrow",
|
||||
"Century Gothic", "Franklin Gothic Medium"]
|
||||
for _ in range(random.randint(0, 2)):
|
||||
new_font = random.choice(possible_additions)
|
||||
if new_font not in current_fonts:
|
||||
current_fonts.append(new_font)
|
||||
|
||||
fingerprint.font_list = current_fonts
|
||||
|
||||
def _rotate_fonts_gradual(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Gradual font rotation - change 20-30% of fonts."""
|
||||
current_fonts = fingerprint.font_list.copy()
|
||||
num_to_change = int(len(current_fonts) * random.uniform(0.2, 0.3))
|
||||
|
||||
# Remove some fonts
|
||||
for _ in range(num_to_change // 2):
|
||||
if current_fonts:
|
||||
current_fonts.remove(random.choice(current_fonts))
|
||||
|
||||
# Add new fonts
|
||||
base_fonts = self._get_base_fonts_for_platform(fingerprint.navigator_props.platform)
|
||||
for _ in range(num_to_change // 2):
|
||||
available = [f for f in base_fonts if f not in current_fonts]
|
||||
if available:
|
||||
current_fonts.append(random.choice(available))
|
||||
|
||||
fingerprint.font_list = current_fonts
|
||||
|
||||
def _rotate_fonts_complete(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Complete font rotation - regenerate font list."""
|
||||
base_fonts = self._get_base_fonts_for_platform(fingerprint.navigator_props.platform)
|
||||
num_fonts = random.randint(int(len(base_fonts) * 0.7), int(len(base_fonts) * 0.9))
|
||||
fingerprint.font_list = random.sample(base_fonts, num_fonts)
|
||||
|
||||
def _update_webgl_version(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Update WebGL renderer version (driver update)."""
|
||||
renderer = fingerprint.webgl_renderer
|
||||
|
||||
# Update version numbers in renderer string
|
||||
if "ANGLE" in renderer:
|
||||
# Update Direct3D version
|
||||
if "Direct3D11" in renderer:
|
||||
renderer = renderer.replace("Direct3D11", "Direct3D11.1")
|
||||
elif "Direct3D9" in renderer:
|
||||
renderer = renderer.replace("Direct3D9", "Direct3D11")
|
||||
|
||||
# Update driver versions
|
||||
import re
|
||||
version_pattern = r'\d+\.\d+\.\d+\.\d+'
|
||||
match = re.search(version_pattern, renderer)
|
||||
if match:
|
||||
old_version = match.group()
|
||||
parts = old_version.split('.')
|
||||
# Increment minor version
|
||||
parts[2] = str(int(parts[2]) + random.randint(1, 10))
|
||||
new_version = '.'.join(parts)
|
||||
renderer = renderer.replace(old_version, new_version)
|
||||
|
||||
fingerprint.webgl_renderer = renderer
|
||||
|
||||
def _update_browser_version(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Update browser version in user agent."""
|
||||
user_agent = fingerprint.navigator_props.user_agent
|
||||
|
||||
# Update Chrome version
|
||||
if "Chrome/" in user_agent:
|
||||
import re
|
||||
match = re.search(r'Chrome/(\d+)\.', user_agent)
|
||||
if match:
|
||||
current_version = int(match.group(1))
|
||||
new_version = current_version + random.randint(1, 3)
|
||||
user_agent = user_agent.replace(f'Chrome/{current_version}', f'Chrome/{new_version}')
|
||||
fingerprint.navigator_props.user_agent = user_agent
|
||||
|
||||
def _rotate_screen_resolution(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Rotate screen resolution (external monitor change)."""
|
||||
common_resolutions = [
|
||||
(1920, 1080), (2560, 1440), (3840, 2160), # 16:9
|
||||
(1920, 1200), (2560, 1600), # 16:10
|
||||
(1366, 768), (1600, 900) # Laptop
|
||||
]
|
||||
|
||||
current = fingerprint.hardware_config.screen_resolution
|
||||
available = [res for res in common_resolutions if res != current]
|
||||
|
||||
if available:
|
||||
fingerprint.hardware_config.screen_resolution = random.choice(available)
|
||||
|
||||
def _rotate_within_device_class(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Rotate within the same device class (for account-bound fingerprints)."""
|
||||
static = fingerprint.static_components
|
||||
|
||||
if static.device_type == "desktop":
|
||||
# Change to different desktop configuration
|
||||
fingerprint.hardware_config.hardware_concurrency = random.choice([4, 8, 12, 16])
|
||||
fingerprint.hardware_config.device_memory = random.choice([8, 16, 32])
|
||||
elif static.device_type == "mobile":
|
||||
# Change to different mobile configuration
|
||||
fingerprint.hardware_config.hardware_concurrency = random.choice([4, 6, 8])
|
||||
fingerprint.hardware_config.device_memory = random.choice([3, 4, 6])
|
||||
|
||||
# Update renderer within same GPU vendor
|
||||
if "Intel" in static.gpu_vendor:
|
||||
fingerprint.webgl_renderer = random.choice([
|
||||
"ANGLE (Intel HD Graphics 620)",
|
||||
"ANGLE (Intel UHD Graphics 630)",
|
||||
"ANGLE (Intel Iris Xe Graphics)"
|
||||
])
|
||||
elif "NVIDIA" in static.gpu_vendor:
|
||||
fingerprint.webgl_renderer = random.choice([
|
||||
"ANGLE (NVIDIA GeForce GTX 1060)",
|
||||
"ANGLE (NVIDIA GeForce RTX 3060)",
|
||||
"ANGLE (NVIDIA GeForce GTX 1660)"
|
||||
])
|
||||
|
||||
def _rotate_to_new_device(self, fingerprint: BrowserFingerprint) -> None:
|
||||
"""Rotate to completely new device."""
|
||||
# This would typically regenerate most components
|
||||
# For now, we'll do significant changes
|
||||
|
||||
# New hardware configuration
|
||||
fingerprint.hardware_config = HardwareConfig(
|
||||
hardware_concurrency=random.choice([4, 8, 12, 16]),
|
||||
device_memory=random.choice([4, 8, 16, 32]),
|
||||
max_touch_points=0,
|
||||
screen_resolution=random.choice([(1920, 1080), (2560, 1440)]),
|
||||
color_depth=random.choice([24, 32]),
|
||||
pixel_ratio=random.choice([1.0, 1.5, 2.0])
|
||||
)
|
||||
|
||||
# New WebGL
|
||||
vendors = ["Intel Inc.", "NVIDIA Corporation", "AMD"]
|
||||
fingerprint.webgl_vendor = random.choice(vendors)
|
||||
|
||||
if fingerprint.webgl_vendor == "Intel Inc.":
|
||||
fingerprint.webgl_renderer = "ANGLE (Intel HD Graphics)"
|
||||
elif fingerprint.webgl_vendor == "NVIDIA Corporation":
|
||||
fingerprint.webgl_renderer = "ANGLE (NVIDIA GeForce GTX)"
|
||||
else:
|
||||
fingerprint.webgl_renderer = "ANGLE (AMD Radeon)"
|
||||
|
||||
def _should_update_dst(self, fingerprint: BrowserFingerprint) -> bool:
|
||||
"""Check if DST update might be needed."""
|
||||
# Simple check - in reality would check actual DST dates
|
||||
if fingerprint.last_rotated:
|
||||
days_since_rotation = (datetime.now() - fingerprint.last_rotated).days
|
||||
# DST changes roughly every 6 months
|
||||
return days_since_rotation > 180
|
||||
return False
|
||||
|
||||
def _get_base_fonts_for_platform(self, platform: str) -> List[str]:
|
||||
"""Get base fonts for platform."""
|
||||
if "Win" in platform:
|
||||
return ["Arial", "Times New Roman", "Verdana", "Tahoma", "Segoe UI",
|
||||
"Calibri", "Consolas", "Georgia", "Impact", "Comic Sans MS"]
|
||||
elif "Mac" in platform:
|
||||
return ["Arial", "Helvetica", "Times New Roman", "Georgia",
|
||||
"Verdana", "Monaco", "Courier", "Geneva", "Futura"]
|
||||
else:
|
||||
return ["Arial", "Times New Roman", "Liberation Sans", "DejaVu Sans",
|
||||
"Ubuntu", "Droid Sans", "Noto Sans"]
|
||||
|
||||
def _track_rotation(self, fingerprint_id: str, strategy: RotationStrategy) -> None:
|
||||
"""Track rotation history."""
|
||||
if fingerprint_id not in self.rotation_history:
|
||||
self.rotation_history[fingerprint_id] = []
|
||||
|
||||
self.rotation_history[fingerprint_id].append({
|
||||
"timestamp": datetime.now(),
|
||||
"strategy": strategy.value
|
||||
})
|
||||
|
||||
# Keep only last 100 rotations
|
||||
self.rotation_history[fingerprint_id] = self.rotation_history[fingerprint_id][-100:]
|
||||
|
||||
def _deep_copy_fingerprint(self, fingerprint: BrowserFingerprint) -> BrowserFingerprint:
|
||||
"""Create a deep copy of a fingerprint."""
|
||||
# Manual deep copy to ensure all nested objects are copied
|
||||
return BrowserFingerprint(
|
||||
fingerprint_id=fingerprint.fingerprint_id,
|
||||
canvas_noise=CanvasNoise(
|
||||
noise_level=fingerprint.canvas_noise.noise_level,
|
||||
seed=fingerprint.canvas_noise.seed,
|
||||
algorithm=fingerprint.canvas_noise.algorithm
|
||||
),
|
||||
webrtc_config=WebRTCConfig(
|
||||
enabled=fingerprint.webrtc_config.enabled,
|
||||
ice_servers=fingerprint.webrtc_config.ice_servers.copy(),
|
||||
local_ip_mask=fingerprint.webrtc_config.local_ip_mask,
|
||||
disable_webrtc=fingerprint.webrtc_config.disable_webrtc
|
||||
),
|
||||
font_list=fingerprint.font_list.copy(),
|
||||
hardware_config=HardwareConfig(
|
||||
hardware_concurrency=fingerprint.hardware_config.hardware_concurrency,
|
||||
device_memory=fingerprint.hardware_config.device_memory,
|
||||
max_touch_points=fingerprint.hardware_config.max_touch_points,
|
||||
screen_resolution=fingerprint.hardware_config.screen_resolution,
|
||||
color_depth=fingerprint.hardware_config.color_depth,
|
||||
pixel_ratio=fingerprint.hardware_config.pixel_ratio
|
||||
),
|
||||
navigator_props=NavigatorProperties(
|
||||
platform=fingerprint.navigator_props.platform,
|
||||
vendor=fingerprint.navigator_props.vendor,
|
||||
vendor_sub=fingerprint.navigator_props.vendor_sub,
|
||||
product=fingerprint.navigator_props.product,
|
||||
product_sub=fingerprint.navigator_props.product_sub,
|
||||
app_name=fingerprint.navigator_props.app_name,
|
||||
app_version=fingerprint.navigator_props.app_version,
|
||||
user_agent=fingerprint.navigator_props.user_agent,
|
||||
language=fingerprint.navigator_props.language,
|
||||
languages=fingerprint.navigator_props.languages.copy() if fingerprint.navigator_props.languages else [],
|
||||
online=fingerprint.navigator_props.online,
|
||||
do_not_track=fingerprint.navigator_props.do_not_track
|
||||
),
|
||||
webgl_vendor=fingerprint.webgl_vendor,
|
||||
webgl_renderer=fingerprint.webgl_renderer,
|
||||
audio_context_base_latency=fingerprint.audio_context_base_latency,
|
||||
audio_context_output_latency=fingerprint.audio_context_output_latency,
|
||||
audio_context_sample_rate=fingerprint.audio_context_sample_rate,
|
||||
timezone=fingerprint.timezone,
|
||||
timezone_offset=fingerprint.timezone_offset,
|
||||
plugins=fingerprint.plugins.copy() if fingerprint.plugins else [],
|
||||
created_at=fingerprint.created_at,
|
||||
last_rotated=fingerprint.last_rotated,
|
||||
static_components=fingerprint.static_components, # This is immutable
|
||||
rotation_seed=fingerprint.rotation_seed,
|
||||
account_bound=fingerprint.account_bound,
|
||||
platform_specific_config=fingerprint.platform_specific_config.copy() if fingerprint.platform_specific_config else {}
|
||||
)
|
||||
245
infrastructure/services/fingerprint/fingerprint_validation_service.py
Normale Datei
245
infrastructure/services/fingerprint/fingerprint_validation_service.py
Normale Datei
@ -0,0 +1,245 @@
|
||||
"""
|
||||
Fingerprint Validation Service - Validates fingerprint consistency and quality.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Tuple, Optional, Any
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
logger = logging.getLogger("fingerprint_validation_service")
|
||||
|
||||
|
||||
class FingerprintValidationService:
|
||||
"""Service for validating fingerprint consistency and quality."""
|
||||
|
||||
def validate_fingerprint(self, fingerprint: BrowserFingerprint) -> Tuple[bool, List[str]]:
|
||||
"""Validate a fingerprint for consistency and realism."""
|
||||
errors = []
|
||||
|
||||
# Hardware consistency
|
||||
hw_errors = self._validate_hardware_consistency(fingerprint)
|
||||
errors.extend(hw_errors)
|
||||
|
||||
# Platform consistency
|
||||
platform_errors = self._validate_platform_consistency(fingerprint)
|
||||
errors.extend(platform_errors)
|
||||
|
||||
# WebGL consistency
|
||||
webgl_errors = self._validate_webgl_consistency(fingerprint)
|
||||
errors.extend(webgl_errors)
|
||||
|
||||
# Canvas consistency
|
||||
canvas_errors = self._validate_canvas_consistency(fingerprint)
|
||||
errors.extend(canvas_errors)
|
||||
|
||||
# Timezone consistency
|
||||
tz_errors = self._validate_timezone_consistency(fingerprint)
|
||||
errors.extend(tz_errors)
|
||||
|
||||
# Mobile-specific validation
|
||||
if self._is_mobile_fingerprint(fingerprint):
|
||||
mobile_errors = self._validate_mobile_fingerprint(fingerprint)
|
||||
errors.extend(mobile_errors)
|
||||
|
||||
is_valid = len(errors) == 0
|
||||
return is_valid, errors
|
||||
|
||||
def _validate_hardware_consistency(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate hardware configuration consistency."""
|
||||
errors = []
|
||||
|
||||
# CPU cores should be power of 2 or common values
|
||||
valid_cores = [1, 2, 4, 6, 8, 10, 12, 16, 20, 24, 32]
|
||||
if fp.hardware_config.hardware_concurrency not in valid_cores:
|
||||
errors.append(f"Unusual CPU core count: {fp.hardware_config.hardware_concurrency}")
|
||||
|
||||
# Device memory should be reasonable
|
||||
valid_memory = [0.5, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, 64]
|
||||
if fp.hardware_config.device_memory not in valid_memory:
|
||||
errors.append(f"Unusual device memory: {fp.hardware_config.device_memory}GB")
|
||||
|
||||
# Screen resolution should be common
|
||||
width, height = fp.hardware_config.screen_resolution
|
||||
common_resolutions = [
|
||||
(1366, 768), (1920, 1080), (2560, 1440), (3840, 2160), # Desktop
|
||||
(375, 667), (375, 812), (414, 896), (390, 844), # Mobile
|
||||
(1440, 900), (2560, 1600), (2880, 1800) # Mac
|
||||
]
|
||||
if (width, height) not in common_resolutions:
|
||||
# Check if it's at least a reasonable aspect ratio
|
||||
aspect_ratio = width / height
|
||||
if aspect_ratio < 1.2 or aspect_ratio > 2.5:
|
||||
errors.append(f"Unusual screen resolution: {width}x{height}")
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_platform_consistency(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate platform and navigator consistency."""
|
||||
errors = []
|
||||
|
||||
platform = fp.navigator_props.platform
|
||||
user_agent = fp.navigator_props.user_agent
|
||||
|
||||
# Check platform matches user agent
|
||||
if "Win" in platform and "Windows" not in user_agent:
|
||||
errors.append("Platform claims Windows but user agent doesn't")
|
||||
elif "Mac" in platform and "Mac" not in user_agent:
|
||||
errors.append("Platform claims Mac but user agent doesn't")
|
||||
elif "Linux" in platform and "Android" not in user_agent and "Linux" not in user_agent:
|
||||
errors.append("Platform claims Linux but user agent doesn't match")
|
||||
|
||||
# Check vendor consistency
|
||||
if fp.navigator_props.vendor == "Google Inc." and "Chrome" not in user_agent:
|
||||
errors.append("Google vendor but not Chrome browser")
|
||||
elif fp.navigator_props.vendor == "Apple Inc." and "Safari" not in user_agent:
|
||||
errors.append("Apple vendor but not Safari browser")
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_webgl_consistency(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate WebGL renderer and vendor consistency."""
|
||||
errors = []
|
||||
|
||||
vendor = fp.webgl_vendor
|
||||
renderer = fp.webgl_renderer
|
||||
|
||||
# Common vendor/renderer pairs
|
||||
if "Intel" in vendor and "Intel" not in renderer:
|
||||
errors.append("WebGL vendor/renderer mismatch for Intel")
|
||||
elif "NVIDIA" in vendor and "NVIDIA" not in renderer and "GeForce" not in renderer:
|
||||
errors.append("WebGL vendor/renderer mismatch for NVIDIA")
|
||||
elif "AMD" in vendor and "AMD" not in renderer and "Radeon" not in renderer:
|
||||
errors.append("WebGL vendor/renderer mismatch for AMD")
|
||||
|
||||
# Platform-specific WebGL
|
||||
if "Mac" in fp.navigator_props.platform:
|
||||
if "ANGLE" in renderer:
|
||||
errors.append("ANGLE renderer unexpected on Mac")
|
||||
elif "Win" in fp.navigator_props.platform:
|
||||
if "ANGLE" not in renderer and "Direct3D" not in renderer:
|
||||
errors.append("Expected ANGLE or Direct3D renderer on Windows")
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_canvas_consistency(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate canvas noise configuration."""
|
||||
errors = []
|
||||
|
||||
noise_level = fp.canvas_noise.noise_level
|
||||
if noise_level < 0 or noise_level > 0.1:
|
||||
errors.append(f"Canvas noise level out of range: {noise_level}")
|
||||
|
||||
algorithm = fp.canvas_noise.algorithm
|
||||
valid_algorithms = ["gaussian", "uniform", "perlin"]
|
||||
if algorithm not in valid_algorithms:
|
||||
errors.append(f"Invalid canvas noise algorithm: {algorithm}")
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_timezone_consistency(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate timezone consistency with locale."""
|
||||
errors = []
|
||||
|
||||
timezone = fp.timezone
|
||||
language = fp.navigator_props.language
|
||||
|
||||
# Basic timezone/language consistency
|
||||
tz_lang_map = {
|
||||
"Europe/Berlin": ["de", "de-DE"],
|
||||
"Europe/London": ["en-GB"],
|
||||
"America/New_York": ["en-US"],
|
||||
"Asia/Tokyo": ["ja", "ja-JP"]
|
||||
}
|
||||
|
||||
for tz, langs in tz_lang_map.items():
|
||||
if timezone == tz:
|
||||
if not any(lang in language for lang in langs):
|
||||
# It's okay if it's not a perfect match, just log it
|
||||
logger.debug(f"Timezone {timezone} with language {language} might be unusual")
|
||||
|
||||
return errors
|
||||
|
||||
def _is_mobile_fingerprint(self, fp: BrowserFingerprint) -> bool:
|
||||
"""Check if fingerprint is for a mobile device."""
|
||||
mobile_indicators = ["Android", "iPhone", "iPad", "Mobile", "Tablet"]
|
||||
platform = fp.navigator_props.platform
|
||||
user_agent = fp.navigator_props.user_agent
|
||||
|
||||
return any(indicator in platform or indicator in user_agent for indicator in mobile_indicators)
|
||||
|
||||
def _validate_mobile_fingerprint(self, fp: BrowserFingerprint) -> List[str]:
|
||||
"""Validate mobile-specific fingerprint attributes."""
|
||||
errors = []
|
||||
|
||||
# Mobile should have touch points
|
||||
if fp.hardware_config.max_touch_points == 0:
|
||||
errors.append("Mobile device with no touch points")
|
||||
|
||||
# Mobile screen should be portrait or square-ish
|
||||
width, height = fp.hardware_config.screen_resolution
|
||||
if width > height:
|
||||
errors.append("Mobile device with landscape resolution")
|
||||
|
||||
# Mobile typically has less memory
|
||||
if fp.hardware_config.device_memory > 8:
|
||||
errors.append("Mobile device with unusually high memory")
|
||||
|
||||
return errors
|
||||
|
||||
def calculate_fingerprint_quality_score(self, fp: BrowserFingerprint) -> float:
|
||||
"""Calculate a quality score for the fingerprint (0.0 to 1.0)."""
|
||||
score = 1.0
|
||||
is_valid, errors = self.validate_fingerprint(fp)
|
||||
|
||||
# Deduct points for each error
|
||||
score -= len(errors) * 0.1
|
||||
|
||||
# Bonus points for completeness
|
||||
if fp.static_components:
|
||||
score += 0.1
|
||||
if fp.platform_specific_config:
|
||||
score += 0.05
|
||||
if fp.rotation_seed:
|
||||
score += 0.05
|
||||
|
||||
# Ensure score is between 0 and 1
|
||||
return max(0.0, min(1.0, score))
|
||||
|
||||
def assess_fingerprint_risk(self, fp: BrowserFingerprint) -> Dict[str, Any]:
|
||||
"""Assess the risk level of using this fingerprint."""
|
||||
risk_factors = []
|
||||
risk_score = 0.0
|
||||
|
||||
# Check age
|
||||
if fp.created_at:
|
||||
age_days = (datetime.now() - fp.created_at).days
|
||||
if age_days > 30:
|
||||
risk_factors.append("Fingerprint is over 30 days old")
|
||||
risk_score += 0.2
|
||||
|
||||
# Check validation
|
||||
is_valid, errors = self.validate_fingerprint(fp)
|
||||
if not is_valid:
|
||||
risk_factors.extend(errors)
|
||||
risk_score += len(errors) * 0.1
|
||||
|
||||
# Check for common/overused values
|
||||
if fp.webgl_renderer == "ANGLE (Intel HD Graphics)":
|
||||
risk_factors.append("Very common WebGL renderer")
|
||||
risk_score += 0.1
|
||||
|
||||
# Determine risk level
|
||||
if risk_score < 0.3:
|
||||
risk_level = "low"
|
||||
elif risk_score < 0.6:
|
||||
risk_level = "medium"
|
||||
else:
|
||||
risk_level = "high"
|
||||
|
||||
return {
|
||||
"risk_level": risk_level,
|
||||
"risk_score": min(1.0, risk_score),
|
||||
"risk_factors": risk_factors
|
||||
}
|
||||
160
infrastructure/services/fingerprint/timezone_location_service.py
Normale Datei
160
infrastructure/services/fingerprint/timezone_location_service.py
Normale Datei
@ -0,0 +1,160 @@
|
||||
"""
|
||||
Timezone Location Service - Manages timezone and location consistency.
|
||||
"""
|
||||
|
||||
import random
|
||||
from typing import Dict, Optional, Tuple, Any
|
||||
|
||||
|
||||
class TimezoneLocationService:
|
||||
"""Service for managing timezone and location relationships."""
|
||||
|
||||
TIMEZONE_MAPPING = {
|
||||
"de": {
|
||||
"timezones": ["Europe/Berlin", "Europe/Munich"],
|
||||
"offset": -60, # UTC+1
|
||||
"dst_offset": -120 # UTC+2 during DST
|
||||
},
|
||||
"us": {
|
||||
"timezones": ["America/New_York", "America/Chicago", "America/Los_Angeles", "America/Denver"],
|
||||
"offset": 300, # UTC-5 (New York)
|
||||
"dst_offset": 240 # UTC-4 during DST
|
||||
},
|
||||
"uk": {
|
||||
"timezones": ["Europe/London"],
|
||||
"offset": 0, # UTC
|
||||
"dst_offset": -60 # UTC+1 during DST
|
||||
},
|
||||
"jp": {
|
||||
"timezones": ["Asia/Tokyo"],
|
||||
"offset": -540, # UTC+9
|
||||
"dst_offset": -540 # No DST
|
||||
},
|
||||
"au": {
|
||||
"timezones": ["Australia/Sydney", "Australia/Melbourne"],
|
||||
"offset": -600, # UTC+10
|
||||
"dst_offset": -660 # UTC+11 during DST
|
||||
}
|
||||
}
|
||||
|
||||
CITY_TO_TIMEZONE = {
|
||||
"Berlin": "Europe/Berlin",
|
||||
"Munich": "Europe/Munich",
|
||||
"Frankfurt": "Europe/Berlin",
|
||||
"Hamburg": "Europe/Berlin",
|
||||
"New York": "America/New_York",
|
||||
"Los Angeles": "America/Los_Angeles",
|
||||
"Chicago": "America/Chicago",
|
||||
"London": "Europe/London",
|
||||
"Tokyo": "Asia/Tokyo",
|
||||
"Sydney": "Australia/Sydney"
|
||||
}
|
||||
|
||||
LANGUAGE_TO_LOCATION = {
|
||||
"de-DE": ["de", "at", "ch"],
|
||||
"en-US": ["us"],
|
||||
"en-GB": ["uk"],
|
||||
"ja-JP": ["jp"],
|
||||
"en-AU": ["au"],
|
||||
"fr-FR": ["fr"],
|
||||
"es-ES": ["es"],
|
||||
"it-IT": ["it"]
|
||||
}
|
||||
|
||||
def get_timezone_for_location(self, location: Optional[str] = None) -> Tuple[str, int]:
|
||||
"""Get timezone and offset for a location."""
|
||||
if not location:
|
||||
location = random.choice(list(self.TIMEZONE_MAPPING.keys()))
|
||||
|
||||
location_lower = location.lower()
|
||||
|
||||
# Check if it's a city
|
||||
for city, tz in self.CITY_TO_TIMEZONE.items():
|
||||
if city.lower() in location_lower:
|
||||
# Find the offset from the mapping
|
||||
for country, data in self.TIMEZONE_MAPPING.items():
|
||||
if tz in data["timezones"]:
|
||||
return tz, data["offset"]
|
||||
return tz, 0
|
||||
|
||||
# Check if it's a country code
|
||||
if location_lower in self.TIMEZONE_MAPPING:
|
||||
data = self.TIMEZONE_MAPPING[location_lower]
|
||||
timezone = random.choice(data["timezones"])
|
||||
return timezone, data["offset"]
|
||||
|
||||
# Default to Berlin
|
||||
return "Europe/Berlin", -60
|
||||
|
||||
def get_location_for_language(self, language: str) -> str:
|
||||
"""Get a suitable location for a language."""
|
||||
if language in self.LANGUAGE_TO_LOCATION:
|
||||
locations = self.LANGUAGE_TO_LOCATION[language]
|
||||
return random.choice(locations)
|
||||
|
||||
# Extract base language
|
||||
base_lang = language.split('-')[0]
|
||||
for lang, locations in self.LANGUAGE_TO_LOCATION.items():
|
||||
if lang.startswith(base_lang):
|
||||
return random.choice(locations)
|
||||
|
||||
# Default
|
||||
return "us"
|
||||
|
||||
def validate_timezone_consistency(self, timezone: str, language: str) -> bool:
|
||||
"""Validate if timezone is consistent with language."""
|
||||
expected_location = self.get_location_for_language(language)
|
||||
|
||||
# Get timezones for the expected location
|
||||
if expected_location in self.TIMEZONE_MAPPING:
|
||||
expected_timezones = self.TIMEZONE_MAPPING[expected_location]["timezones"]
|
||||
return timezone in expected_timezones
|
||||
|
||||
# If we can't determine, assume it's valid
|
||||
return True
|
||||
|
||||
def get_locale_for_timezone(self, timezone: str) -> str:
|
||||
"""Get appropriate locale for a timezone."""
|
||||
tz_to_locale = {
|
||||
"Europe/Berlin": "de-DE",
|
||||
"Europe/Munich": "de-DE",
|
||||
"America/New_York": "en-US",
|
||||
"America/Los_Angeles": "en-US",
|
||||
"America/Chicago": "en-US",
|
||||
"Europe/London": "en-GB",
|
||||
"Asia/Tokyo": "ja-JP",
|
||||
"Australia/Sydney": "en-AU"
|
||||
}
|
||||
|
||||
return tz_to_locale.get(timezone, "en-US")
|
||||
|
||||
def calculate_timezone_offset(self, timezone: str, is_dst: bool = False) -> int:
|
||||
"""Calculate timezone offset in minutes from UTC."""
|
||||
# Find the timezone in our mapping
|
||||
for country, data in self.TIMEZONE_MAPPING.items():
|
||||
if timezone in data["timezones"]:
|
||||
return data["dst_offset"] if is_dst else data["offset"]
|
||||
|
||||
# Default to UTC
|
||||
return 0
|
||||
|
||||
def get_consistent_location_data(self, proxy_location: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Get consistent location data including timezone, locale, and language."""
|
||||
timezone, offset = self.get_timezone_for_location(proxy_location)
|
||||
locale = self.get_locale_for_timezone(timezone)
|
||||
|
||||
# Extract language from locale
|
||||
language = locale.split('-')[0]
|
||||
languages = [locale, language]
|
||||
|
||||
# Add fallback languages
|
||||
if language != "en":
|
||||
languages.extend(["en-US", "en"])
|
||||
|
||||
return {
|
||||
"timezone": timezone,
|
||||
"timezone_offset": offset,
|
||||
"locale": locale,
|
||||
"language": language,
|
||||
"languages": languages
|
||||
}
|
||||
330
infrastructure/services/fingerprint_cache_service.py
Normale Datei
330
infrastructure/services/fingerprint_cache_service.py
Normale Datei
@ -0,0 +1,330 @@
|
||||
"""
|
||||
Fingerprint Cache Service - Thread-safe caching for race condition prevention
|
||||
Non-intrusive caching layer that enhances existing fingerprint logic
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
import weakref
|
||||
from typing import Optional, Dict, Any, Callable, TypeVar, Generic
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from domain.entities.browser_fingerprint import BrowserFingerprint
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class ThreadSafeCache(Generic[T]):
|
||||
"""
|
||||
Thread-sichere Cache-Implementierung mit TTL und LRU-Features
|
||||
"""
|
||||
|
||||
def __init__(self, max_size: int = 1000, default_ttl: timedelta = timedelta(hours=1)):
|
||||
self.max_size = max_size
|
||||
self.default_ttl = default_ttl
|
||||
self._cache: Dict[str, Dict[str, Any]] = {}
|
||||
self._access_times: Dict[str, datetime] = {}
|
||||
self._locks: Dict[str, threading.RLock] = {}
|
||||
self._main_lock = threading.RLock()
|
||||
|
||||
# Weak references für automatische Cleanup
|
||||
self._cleanup_callbacks: Dict[str, Callable] = {}
|
||||
|
||||
def get(self, key: str) -> Optional[T]:
|
||||
"""Holt einen Wert aus dem Cache"""
|
||||
with self._main_lock:
|
||||
if key not in self._cache:
|
||||
return None
|
||||
|
||||
cache_entry = self._cache[key]
|
||||
|
||||
# TTL prüfen
|
||||
if self._is_expired(cache_entry):
|
||||
self._remove_key(key)
|
||||
return None
|
||||
|
||||
# Access time aktualisieren für LRU
|
||||
self._access_times[key] = datetime.now()
|
||||
return cache_entry['value']
|
||||
|
||||
def put(self, key: str, value: T, ttl: Optional[timedelta] = None) -> None:
|
||||
"""Speichert einen Wert im Cache"""
|
||||
with self._main_lock:
|
||||
# Cache-Größe prüfen und ggf. LRU entfernen
|
||||
if len(self._cache) >= self.max_size and key not in self._cache:
|
||||
self._evict_lru()
|
||||
|
||||
expiry = datetime.now() + (ttl or self.default_ttl)
|
||||
|
||||
self._cache[key] = {
|
||||
'value': value,
|
||||
'created_at': datetime.now(),
|
||||
'expires_at': expiry
|
||||
}
|
||||
self._access_times[key] = datetime.now()
|
||||
|
||||
def get_or_compute(self, key: str, compute_func: Callable[[], T],
|
||||
ttl: Optional[timedelta] = None) -> T:
|
||||
"""
|
||||
Holt Wert aus Cache oder berechnet ihn thread-safe
|
||||
"""
|
||||
# Fast path - Cache hit ohne Locks
|
||||
cached_value = self.get(key)
|
||||
if cached_value is not None:
|
||||
return cached_value
|
||||
|
||||
# Slow path - mit per-key Lock
|
||||
with self._main_lock:
|
||||
# Per-key Lock erstellen falls nicht vorhanden
|
||||
if key not in self._locks:
|
||||
self._locks[key] = threading.RLock()
|
||||
key_lock = self._locks[key]
|
||||
|
||||
# Außerhalb des Main-Locks arbeiten für bessere Parallelität
|
||||
with key_lock:
|
||||
# Double-checked locking - könnte zwischenzeitlich gesetzt worden sein
|
||||
cached_value = self.get(key)
|
||||
if cached_value is not None:
|
||||
return cached_value
|
||||
|
||||
# Wert berechnen
|
||||
logger.debug(f"Computing value for cache key: {key}")
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
computed_value = compute_func()
|
||||
computation_time = time.time() - start_time
|
||||
|
||||
# Nur cachen wenn Berechnung erfolgreich
|
||||
if computed_value is not None:
|
||||
self.put(key, computed_value, ttl)
|
||||
logger.debug(f"Cached value for key {key} (computation took {computation_time:.3f}s)")
|
||||
|
||||
return computed_value
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to compute value for cache key {key}: {e}")
|
||||
raise
|
||||
|
||||
def invalidate(self, key: str) -> bool:
|
||||
"""Entfernt einen Schlüssel aus dem Cache"""
|
||||
with self._main_lock:
|
||||
if key in self._cache:
|
||||
self._remove_key(key)
|
||||
return True
|
||||
return False
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Leert den gesamten Cache"""
|
||||
with self._main_lock:
|
||||
self._cache.clear()
|
||||
self._access_times.clear()
|
||||
self._locks.clear()
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""Gibt Cache-Statistiken zurück"""
|
||||
with self._main_lock:
|
||||
total_entries = len(self._cache)
|
||||
expired_entries = sum(1 for entry in self._cache.values() if self._is_expired(entry))
|
||||
|
||||
return {
|
||||
'total_entries': total_entries,
|
||||
'active_entries': total_entries - expired_entries,
|
||||
'expired_entries': expired_entries,
|
||||
'max_size': self.max_size,
|
||||
'active_locks': len(self._locks),
|
||||
'cache_keys': list(self._cache.keys())
|
||||
}
|
||||
|
||||
def _is_expired(self, cache_entry: Dict[str, Any]) -> bool:
|
||||
"""Prüft ob ein Cache-Eintrag abgelaufen ist"""
|
||||
return datetime.now() > cache_entry['expires_at']
|
||||
|
||||
def _evict_lru(self) -> None:
|
||||
"""Entfernt den am längsten nicht verwendeten Eintrag"""
|
||||
if not self._access_times:
|
||||
return
|
||||
|
||||
lru_key = min(self._access_times.keys(), key=lambda k: self._access_times[k])
|
||||
self._remove_key(lru_key)
|
||||
logger.debug(f"Evicted LRU cache entry: {lru_key}")
|
||||
|
||||
def _remove_key(self, key: str) -> None:
|
||||
"""Entfernt einen Schlüssel und alle zugehörigen Daten"""
|
||||
self._cache.pop(key, None)
|
||||
self._access_times.pop(key, None)
|
||||
|
||||
# Lock nicht sofort entfernen - könnte noch in Verwendung sein
|
||||
# Wird durch schwache Referenzen automatisch bereinigt
|
||||
|
||||
|
||||
class FingerprintCache:
|
||||
"""
|
||||
Spezialisierter Cache für Browser-Fingerprints mit Account-Binding
|
||||
"""
|
||||
|
||||
def __init__(self, max_size: int = 500, fingerprint_ttl: timedelta = timedelta(hours=24)):
|
||||
self.cache = ThreadSafeCache[BrowserFingerprint](max_size, fingerprint_ttl)
|
||||
self.generation_stats = {
|
||||
'cache_hits': 0,
|
||||
'cache_misses': 0,
|
||||
'generations': 0,
|
||||
'race_conditions_prevented': 0
|
||||
}
|
||||
self._stats_lock = threading.RLock()
|
||||
|
||||
def get_account_fingerprint(self, account_id: str,
|
||||
generator_func: Callable[[], BrowserFingerprint],
|
||||
ttl: Optional[timedelta] = None) -> BrowserFingerprint:
|
||||
"""
|
||||
Holt oder erstellt Account-gebundenen Fingerprint thread-safe
|
||||
"""
|
||||
cache_key = f"account_{account_id}"
|
||||
|
||||
def generate_and_track():
|
||||
with self._stats_lock:
|
||||
self.generation_stats['generations'] += 1
|
||||
self.generation_stats['cache_misses'] += 1
|
||||
|
||||
logger.info(f"Generating new fingerprint for account {account_id}")
|
||||
return generator_func()
|
||||
|
||||
# Cache-Zugriff mit Statistik-Tracking
|
||||
fingerprint = self.cache.get(cache_key)
|
||||
if fingerprint is not None:
|
||||
with self._stats_lock:
|
||||
self.generation_stats['cache_hits'] += 1
|
||||
logger.debug(f"Cache hit for account fingerprint: {account_id}")
|
||||
return fingerprint
|
||||
|
||||
# Thread-safe generation
|
||||
return self.cache.get_or_compute(cache_key, generate_and_track, ttl)
|
||||
|
||||
def get_anonymous_fingerprint(self, session_id: str,
|
||||
generator_func: Callable[[], BrowserFingerprint],
|
||||
ttl: Optional[timedelta] = None) -> BrowserFingerprint:
|
||||
"""
|
||||
Holt oder erstellt Session-gebundenen anonymen Fingerprint
|
||||
"""
|
||||
cache_key = f"session_{session_id}"
|
||||
return self.cache.get_or_compute(cache_key, generator_func, ttl)
|
||||
|
||||
def get_platform_fingerprint(self, platform: str, profile_type: str,
|
||||
generator_func: Callable[[], BrowserFingerprint],
|
||||
ttl: Optional[timedelta] = None) -> BrowserFingerprint:
|
||||
"""
|
||||
Holt oder erstellt platform-spezifischen Fingerprint
|
||||
"""
|
||||
cache_key = f"platform_{platform}_{profile_type}"
|
||||
return self.cache.get_or_compute(cache_key, generator_func, ttl)
|
||||
|
||||
def invalidate_account_fingerprint(self, account_id: str) -> bool:
|
||||
"""Invalidiert Account-Fingerprint im Cache"""
|
||||
return self.cache.invalidate(f"account_{account_id}")
|
||||
|
||||
def invalidate_session_fingerprint(self, session_id: str) -> bool:
|
||||
"""Invalidiert Session-Fingerprint im Cache"""
|
||||
return self.cache.invalidate(f"session_{session_id}")
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Gibt detaillierte Cache-Statistiken zurück"""
|
||||
with self._stats_lock:
|
||||
stats = self.generation_stats.copy()
|
||||
|
||||
cache_stats = self.cache.get_stats()
|
||||
|
||||
# Hit rate berechnen
|
||||
total_requests = stats['cache_hits'] + stats['cache_misses']
|
||||
hit_rate = stats['cache_hits'] / total_requests if total_requests > 0 else 0
|
||||
|
||||
return {
|
||||
**stats,
|
||||
**cache_stats,
|
||||
'hit_rate': hit_rate,
|
||||
'total_requests': total_requests
|
||||
}
|
||||
|
||||
def cleanup_expired(self) -> int:
|
||||
"""Manuelle Bereinigung abgelaufener Einträge"""
|
||||
initial_size = len(self.cache._cache)
|
||||
|
||||
with self.cache._main_lock:
|
||||
expired_keys = [
|
||||
key for key, entry in self.cache._cache.items()
|
||||
if self.cache._is_expired(entry)
|
||||
]
|
||||
|
||||
for key in expired_keys:
|
||||
self.cache._remove_key(key)
|
||||
|
||||
removed_count = len(expired_keys)
|
||||
if removed_count > 0:
|
||||
logger.info(f"Cleaned up {removed_count} expired fingerprint cache entries")
|
||||
|
||||
return removed_count
|
||||
|
||||
|
||||
# Global Cache Instance - Singleton Pattern
|
||||
_global_fingerprint_cache: Optional[FingerprintCache] = None
|
||||
_cache_init_lock = threading.RLock()
|
||||
|
||||
|
||||
def get_fingerprint_cache() -> FingerprintCache:
|
||||
"""
|
||||
Holt die globale Fingerprint-Cache-Instanz (Singleton)
|
||||
"""
|
||||
global _global_fingerprint_cache
|
||||
|
||||
if _global_fingerprint_cache is None:
|
||||
with _cache_init_lock:
|
||||
if _global_fingerprint_cache is None:
|
||||
_global_fingerprint_cache = FingerprintCache()
|
||||
logger.info("Initialized global fingerprint cache")
|
||||
|
||||
return _global_fingerprint_cache
|
||||
|
||||
|
||||
def reset_fingerprint_cache() -> None:
|
||||
"""
|
||||
Setzt den globalen Cache zurück (für Tests)
|
||||
"""
|
||||
global _global_fingerprint_cache
|
||||
with _cache_init_lock:
|
||||
if _global_fingerprint_cache is not None:
|
||||
_global_fingerprint_cache.cache.clear()
|
||||
_global_fingerprint_cache = None
|
||||
logger.info("Reset global fingerprint cache")
|
||||
|
||||
|
||||
class CachedFingerprintMixin:
|
||||
"""
|
||||
Mixin-Klasse die zu bestehenden Fingerprint-Services hinzugefügt werden kann
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._cache = get_fingerprint_cache()
|
||||
|
||||
def get_cached_account_fingerprint(self, account_id: str) -> Optional[BrowserFingerprint]:
|
||||
"""Holt Account-Fingerprint aus Cache ohne Generation"""
|
||||
return self._cache.cache.get(f"account_{account_id}")
|
||||
|
||||
def create_cached_account_fingerprint(self, account_id: str) -> BrowserFingerprint:
|
||||
"""
|
||||
Erstellt Account-Fingerprint mit Cache-Integration
|
||||
Muss von Subklassen implementiert werden
|
||||
"""
|
||||
def generator():
|
||||
# Delegiert an Original-Implementierung
|
||||
if hasattr(super(), 'create_account_fingerprint'):
|
||||
return super().create_account_fingerprint(account_id)
|
||||
else:
|
||||
raise NotImplementedError("Subclass must implement create_account_fingerprint")
|
||||
|
||||
return self._cache.get_account_fingerprint(account_id, generator)
|
||||
|
||||
def get_cache_statistics(self) -> Dict[str, Any]:
|
||||
"""Gibt Cache-Statistiken zurück"""
|
||||
return self._cache.get_cache_stats()
|
||||
320
infrastructure/services/instagram_rate_limit_service.py
Normale Datei
320
infrastructure/services/instagram_rate_limit_service.py
Normale Datei
@ -0,0 +1,320 @@
|
||||
"""
|
||||
Instagram Rate Limit Service - Konkrete Implementation für Instagram
|
||||
"""
|
||||
|
||||
import time
|
||||
import random
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, List
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
from domain.services.rate_limit_service import IRateLimitService
|
||||
from domain.entities.rate_limit_policy import RateLimitPolicy
|
||||
from domain.value_objects.action_timing import ActionTiming, ActionType
|
||||
from infrastructure.repositories.rate_limit_repository import RateLimitRepository
|
||||
|
||||
logger = logging.getLogger("instagram_rate_limit_service")
|
||||
|
||||
|
||||
class EmailVerificationTiming:
|
||||
"""Erweiterte E-Mail-Verifizierungs-Wartezeiten"""
|
||||
DEFAULT_TIMEOUT = 300 # 5 Minuten statt 2
|
||||
RETRY_INTERVAL = 10 # Alle 10 Sekunden prüfen
|
||||
MAX_RETRIES = 30 # Maximal 30 Versuche
|
||||
|
||||
# Adaptive Wartezeiten basierend auf Provider
|
||||
PROVIDER_DELAYS = {
|
||||
"gmail.com": 180, # 3 Minuten
|
||||
"outlook.com": 240, # 4 Minuten
|
||||
"yahoo.com": 300, # 5 Minuten
|
||||
"custom": 360, # 6 Minuten für eigene Domains
|
||||
"default": 300 # 5 Minuten Standard
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_timeout_for_provider(cls, email_domain: str) -> int:
|
||||
"""Gibt optimale Wartezeit für E-Mail-Provider zurück"""
|
||||
for provider, timeout in cls.PROVIDER_DELAYS.items():
|
||||
if provider in email_domain:
|
||||
return timeout
|
||||
return cls.PROVIDER_DELAYS["default"]
|
||||
|
||||
@classmethod
|
||||
def should_retry(cls, attempt: int, elapsed_time: float) -> bool:
|
||||
"""Entscheidet ob weiter auf E-Mail gewartet werden soll"""
|
||||
if attempt < 5:
|
||||
return True # Erste 5 Versuche immer
|
||||
elif attempt < 15:
|
||||
return elapsed_time < 180 # Bis 3 Minuten
|
||||
else:
|
||||
return elapsed_time < cls.DEFAULT_TIMEOUT
|
||||
|
||||
|
||||
class InstagramRateLimitService(IRateLimitService):
|
||||
"""Instagram-spezifische Rate Limit Implementation"""
|
||||
|
||||
def __init__(self, repository: RateLimitRepository = None):
|
||||
self.repository = repository or RateLimitRepository()
|
||||
self.last_action_time = defaultdict(lambda: 0.0)
|
||||
self.action_count = defaultdict(int)
|
||||
self.error_count = defaultdict(int)
|
||||
|
||||
# Lade gespeicherte Policies oder verwende Defaults
|
||||
self._load_or_init_policies()
|
||||
|
||||
# Email Verification Timing Helper
|
||||
self.email_timing = EmailVerificationTiming()
|
||||
|
||||
def _load_or_init_policies(self):
|
||||
"""Lädt Policies aus DB oder initialisiert mit Defaults"""
|
||||
self.policies = self.repository.get_all_policies()
|
||||
|
||||
# Instagram-spezifische Default-Policies
|
||||
defaults = {
|
||||
ActionType.PAGE_LOAD: RateLimitPolicy(1.0, 3.0, True, 1.5, 3),
|
||||
ActionType.FORM_FILL: RateLimitPolicy(0.5, 2.0, True, 1.3, 3),
|
||||
ActionType.BUTTON_CLICK: RateLimitPolicy(0.8, 2.5, True, 1.4, 3),
|
||||
ActionType.INPUT_TYPE: RateLimitPolicy(0.1, 0.5, True, 1.2, 5),
|
||||
ActionType.EMAIL_CHECK: RateLimitPolicy(10.0, 30.0, True, 1.5, 30),
|
||||
ActionType.CAPTCHA_SOLVE: RateLimitPolicy(5.0, 15.0, True, 2.0, 3),
|
||||
ActionType.REGISTRATION_START: RateLimitPolicy(2.0, 5.0, True, 1.5, 1),
|
||||
ActionType.API_REQUEST: RateLimitPolicy(1.0, 3.0, True, 2.0, 3),
|
||||
}
|
||||
|
||||
# Füge fehlende Defaults hinzu
|
||||
for action_type, default_policy in defaults.items():
|
||||
if action_type not in self.policies:
|
||||
self.policies[action_type] = default_policy
|
||||
self.repository.save_policy(action_type, default_policy)
|
||||
|
||||
def calculate_delay(self, action_type: ActionType, context: Optional[Dict[str, Any]] = None) -> float:
|
||||
"""Berechnet adaptive Verzögerung basierend auf Kontext"""
|
||||
policy = self.get_policy(action_type)
|
||||
|
||||
# Basis-Delay
|
||||
base_delay = policy.min_delay
|
||||
|
||||
# Faktoren für Anpassung
|
||||
time_factor = self._calculate_time_factor()
|
||||
error_factor = self._calculate_error_factor(action_type)
|
||||
load_factor = self._calculate_load_factor(action_type)
|
||||
|
||||
# Spezielle Behandlung für E-Mail-Verifizierung
|
||||
if action_type == ActionType.EMAIL_CHECK and context:
|
||||
email = context.get('email', '')
|
||||
if '@' in email:
|
||||
domain = email.split('@')[1]
|
||||
provider_delay = self.email_timing.get_timeout_for_provider(domain)
|
||||
base_delay = provider_delay / self.email_timing.MAX_RETRIES
|
||||
|
||||
# Adaptive Berechnung
|
||||
if policy.adaptive:
|
||||
optimal_delay = base_delay * time_factor * error_factor * load_factor
|
||||
else:
|
||||
optimal_delay = base_delay
|
||||
|
||||
# Begrenze auf Min/Max
|
||||
optimal_delay = max(policy.min_delay, min(optimal_delay, policy.max_delay))
|
||||
|
||||
# Füge menschliche Varianz hinzu (±20%)
|
||||
variance = random.uniform(0.8, 1.2)
|
||||
final_delay = optimal_delay * variance
|
||||
|
||||
logger.debug(f"Calculated delay for {action_type.value}: {final_delay:.2f}s "
|
||||
f"(time_factor={time_factor:.2f}, error_factor={error_factor:.2f}, "
|
||||
f"load_factor={load_factor:.2f})")
|
||||
|
||||
return final_delay
|
||||
|
||||
def _calculate_time_factor(self) -> float:
|
||||
"""Berechnet Zeitfaktor basierend auf Tageszeit"""
|
||||
hour = datetime.now().hour
|
||||
|
||||
# Nachts (0-6 Uhr): Langsamer
|
||||
if 0 <= hour < 6:
|
||||
return 1.5
|
||||
# Hauptzeiten (18-22 Uhr): Langsamer
|
||||
elif 18 <= hour < 22:
|
||||
return 1.3
|
||||
# Normale Zeiten
|
||||
else:
|
||||
return 1.0
|
||||
|
||||
def _calculate_error_factor(self, action_type: ActionType) -> float:
|
||||
"""Berechnet Fehlerfaktor basierend auf kürzlichen Fehlern"""
|
||||
recent_errors = self.error_count.get(action_type, 0)
|
||||
|
||||
if recent_errors == 0:
|
||||
return 1.0
|
||||
elif recent_errors < 3:
|
||||
return 1.2
|
||||
elif recent_errors < 5:
|
||||
return 1.5
|
||||
else:
|
||||
return 2.0
|
||||
|
||||
def _calculate_load_factor(self, action_type: ActionType) -> float:
|
||||
"""Berechnet Lastfaktor basierend auf Aktivität"""
|
||||
recent_actions = self.action_count.get(action_type, 0)
|
||||
|
||||
if recent_actions < 10:
|
||||
return 1.0
|
||||
elif recent_actions < 50:
|
||||
return 1.1
|
||||
elif recent_actions < 100:
|
||||
return 1.3
|
||||
else:
|
||||
return 1.5
|
||||
|
||||
def record_action(self, timing: ActionTiming) -> None:
|
||||
"""Zeichnet eine Aktion auf und passt Strategie an"""
|
||||
# Speichere in Repository
|
||||
self.repository.save_timing(timing)
|
||||
|
||||
# Update lokale Statistiken
|
||||
self.last_action_time[timing.action_type] = time.time()
|
||||
self.action_count[timing.action_type] += 1
|
||||
|
||||
if not timing.success:
|
||||
self.error_count[timing.action_type] += 1
|
||||
else:
|
||||
# Reset error count bei Erfolg
|
||||
self.error_count[timing.action_type] = max(0, self.error_count[timing.action_type] - 1)
|
||||
|
||||
# Adaptive Policy-Anpassung
|
||||
if timing.action_type in self.policies:
|
||||
self._adapt_policy(timing)
|
||||
|
||||
def _adapt_policy(self, timing: ActionTiming):
|
||||
"""Passt Policy basierend auf Timing an"""
|
||||
policy = self.policies[timing.action_type]
|
||||
|
||||
if not policy.adaptive:
|
||||
return
|
||||
|
||||
# Hole Statistiken der letzten Stunde
|
||||
stats = self.repository.get_statistics(
|
||||
timing.action_type,
|
||||
timedelta(hours=1)
|
||||
)
|
||||
|
||||
if not stats or 'avg_duration_ms' not in stats:
|
||||
return
|
||||
|
||||
avg_duration = stats['avg_duration_ms'] / 1000.0
|
||||
success_rate = stats.get('success_rate', 1.0)
|
||||
|
||||
# Passe Min/Max Delays an
|
||||
if success_rate < 0.8: # Viele Fehler
|
||||
# Erhöhe Delays
|
||||
new_min = min(policy.min_delay * 1.1, policy.max_delay)
|
||||
new_max = policy.max_delay * 1.1
|
||||
|
||||
policy.min_delay = new_min
|
||||
policy.max_delay = new_max
|
||||
|
||||
logger.info(f"Increased delays for {timing.action_type.value} due to low success rate")
|
||||
elif success_rate > 0.95 and avg_duration < policy.min_delay:
|
||||
# Verringere Delays vorsichtig
|
||||
new_min = max(policy.min_delay * 0.95, 0.1)
|
||||
new_max = max(policy.max_delay * 0.95, new_min * 2)
|
||||
|
||||
policy.min_delay = new_min
|
||||
policy.max_delay = new_max
|
||||
|
||||
logger.info(f"Decreased delays for {timing.action_type.value} due to high success rate")
|
||||
|
||||
# Speichere angepasste Policy
|
||||
self.repository.save_policy(timing.action_type, policy)
|
||||
|
||||
def detect_rate_limit(self, response: Any) -> bool:
|
||||
"""Erkennt Instagram Rate Limits"""
|
||||
# String-basierte Erkennung für HTML-Content
|
||||
if isinstance(response, str):
|
||||
rate_limit_indicators = [
|
||||
"Bitte warte einige Minuten",
|
||||
"Please wait a few minutes",
|
||||
"Try again later",
|
||||
"Versuche es später erneut",
|
||||
"too many requests",
|
||||
"zu viele Anfragen",
|
||||
"rate limit",
|
||||
"temporarily blocked",
|
||||
"vorübergehend gesperrt"
|
||||
]
|
||||
|
||||
response_lower = response.lower()
|
||||
return any(indicator.lower() in response_lower for indicator in rate_limit_indicators)
|
||||
|
||||
# Playwright Page object
|
||||
elif hasattr(response, 'content'):
|
||||
try:
|
||||
content = response.content()
|
||||
return self.detect_rate_limit(content)
|
||||
except:
|
||||
pass
|
||||
|
||||
# HTTP Response Status
|
||||
elif hasattr(response, 'status'):
|
||||
return response.status in [429, 420] # Rate limit status codes
|
||||
|
||||
return False
|
||||
|
||||
def get_policy(self, action_type: ActionType) -> RateLimitPolicy:
|
||||
"""Holt Policy für Action Type"""
|
||||
return self.policies.get(action_type, RateLimitPolicy(1.0, 3.0))
|
||||
|
||||
def update_policy(self, action_type: ActionType, policy: RateLimitPolicy) -> None:
|
||||
"""Aktualisiert Policy"""
|
||||
self.policies[action_type] = policy
|
||||
self.repository.save_policy(action_type, policy)
|
||||
|
||||
def get_statistics(self, action_type: Optional[ActionType] = None,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""Holt Statistiken"""
|
||||
stats = self.repository.get_statistics(action_type, timeframe)
|
||||
|
||||
# Füge aktuelle Session-Statistiken hinzu
|
||||
if action_type:
|
||||
stats['current_session'] = {
|
||||
'action_count': self.action_count.get(action_type, 0),
|
||||
'error_count': self.error_count.get(action_type, 0),
|
||||
'last_action': self.last_action_time.get(action_type, 0)
|
||||
}
|
||||
|
||||
return stats
|
||||
|
||||
def reset_statistics(self) -> None:
|
||||
"""Reset lokale Statistiken"""
|
||||
self.action_count.clear()
|
||||
self.error_count.clear()
|
||||
self.last_action_time.clear()
|
||||
|
||||
def is_action_allowed(self, action_type: ActionType) -> bool:
|
||||
"""Prüft ob Aktion erlaubt ist"""
|
||||
last_time = self.last_action_time.get(action_type, 0)
|
||||
if last_time == 0:
|
||||
return True
|
||||
|
||||
policy = self.get_policy(action_type)
|
||||
elapsed = time.time() - last_time
|
||||
|
||||
return elapsed >= policy.min_delay
|
||||
|
||||
def wait_if_needed(self, action_type: ActionType) -> float:
|
||||
"""Wartet wenn nötig"""
|
||||
last_time = self.last_action_time.get(action_type, 0)
|
||||
if last_time == 0:
|
||||
return 0.0
|
||||
|
||||
delay = self.calculate_delay(action_type)
|
||||
elapsed = time.time() - last_time
|
||||
|
||||
if elapsed < delay:
|
||||
wait_time = delay - elapsed
|
||||
logger.debug(f"Waiting {wait_time:.2f}s for {action_type.value}")
|
||||
time.sleep(wait_time)
|
||||
return wait_time
|
||||
|
||||
return 0.0
|
||||
281
infrastructure/services/structured_analytics_service.py
Normale Datei
281
infrastructure/services/structured_analytics_service.py
Normale Datei
@ -0,0 +1,281 @@
|
||||
"""
|
||||
Structured Analytics Service - Konkrete Implementation für Analytics
|
||||
"""
|
||||
|
||||
import logging
|
||||
import json
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
from domain.services.analytics_service import IAnalyticsService
|
||||
from domain.entities.account_creation_event import AccountCreationEvent
|
||||
from domain.entities.error_event import ErrorEvent
|
||||
from domain.value_objects.error_summary import ErrorSummary
|
||||
from domain.value_objects.report import Report, ReportType, Metric, PlatformStats, TimeSeriesData
|
||||
from infrastructure.repositories.analytics_repository import AnalyticsRepository
|
||||
|
||||
logger = logging.getLogger("structured_analytics_service")
|
||||
|
||||
|
||||
class StructuredAnalyticsService(IAnalyticsService):
|
||||
"""Konkrete Implementation des Analytics Service"""
|
||||
|
||||
def __init__(self, repository: AnalyticsRepository = None):
|
||||
self.repository = repository or AnalyticsRepository()
|
||||
|
||||
def log_event(self, event: Union[AccountCreationEvent, ErrorEvent, Any]) -> None:
|
||||
"""Loggt ein Event für spätere Analyse"""
|
||||
if isinstance(event, AccountCreationEvent):
|
||||
self.repository.save_account_creation_event(event)
|
||||
logger.debug(f"Logged account creation event {event.event_id}")
|
||||
elif isinstance(event, ErrorEvent):
|
||||
self.repository.save_error_event(event)
|
||||
logger.debug(f"Logged error event {event.error_id}")
|
||||
else:
|
||||
logger.warning(f"Unknown event type: {type(event)}")
|
||||
|
||||
def get_success_rate(self,
|
||||
timeframe: Optional[timedelta] = None,
|
||||
platform: Optional[str] = None) -> float:
|
||||
"""Berechnet die Erfolgsrate"""
|
||||
return self.repository.get_success_rate(timeframe, platform)
|
||||
|
||||
def get_common_errors(self,
|
||||
limit: int = 10,
|
||||
timeframe: Optional[timedelta] = None) -> List[ErrorSummary]:
|
||||
"""Holt die häufigsten Fehler"""
|
||||
return self.repository.get_common_errors(limit, timeframe)
|
||||
|
||||
def generate_report(self,
|
||||
report_type: ReportType,
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
platforms: Optional[List[str]] = None) -> Report:
|
||||
"""Generiert einen Report"""
|
||||
# Hole Basis-Metriken
|
||||
timeframe = end - start
|
||||
success_rate = self.get_success_rate(timeframe, platforms[0] if platforms else None)
|
||||
|
||||
# Hole Platform-Statistiken
|
||||
platform_stats_data = self.repository.get_platform_stats(timeframe)
|
||||
platform_stats = []
|
||||
|
||||
for platform, stats in platform_stats_data.items():
|
||||
if not platforms or platform in platforms:
|
||||
platform_stats.append(PlatformStats(
|
||||
platform=platform,
|
||||
total_attempts=stats['total_attempts'],
|
||||
successful_accounts=stats['successful_accounts'],
|
||||
failed_attempts=stats['failed_attempts'],
|
||||
avg_duration_seconds=stats['avg_duration_seconds'],
|
||||
error_distribution={} # TODO: Implementiere Error Distribution
|
||||
))
|
||||
|
||||
# Berechne Gesamt-Statistiken
|
||||
total_attempts = sum(ps.total_attempts for ps in platform_stats)
|
||||
total_accounts = sum(ps.successful_accounts for ps in platform_stats)
|
||||
avg_duration = sum(ps.avg_duration_seconds * ps.total_attempts for ps in platform_stats) / total_attempts if total_attempts > 0 else 0
|
||||
|
||||
# Erstelle Metriken
|
||||
metrics = [
|
||||
Metric("success_rate", success_rate, "percentage", 0.0),
|
||||
Metric("total_accounts", float(total_accounts), "count", 0.0),
|
||||
Metric("avg_duration", avg_duration, "seconds", 0.0)
|
||||
]
|
||||
|
||||
# Hole Timeline-Daten
|
||||
timeline_data = self.repository.get_timeline_data('success_rate', 24, platforms[0] if platforms else None)
|
||||
|
||||
success_timeline = None
|
||||
if timeline_data:
|
||||
timestamps = [datetime.fromisoformat(d['timestamp']) for d in timeline_data]
|
||||
values = [d['success_rate'] for d in timeline_data]
|
||||
success_timeline = TimeSeriesData(timestamps, values, "Success Rate")
|
||||
|
||||
# Hole Error Summaries
|
||||
error_summaries = []
|
||||
common_errors = self.get_common_errors(10, timeframe)
|
||||
for error in common_errors:
|
||||
error_summaries.append(error.to_dict())
|
||||
|
||||
# Erstelle Report
|
||||
return Report(
|
||||
report_id=str(uuid.uuid4()),
|
||||
report_type=report_type,
|
||||
start_date=start,
|
||||
end_date=end,
|
||||
generated_at=datetime.now(),
|
||||
total_accounts_created=total_accounts,
|
||||
total_attempts=total_attempts,
|
||||
overall_success_rate=success_rate,
|
||||
avg_creation_time=avg_duration,
|
||||
metrics=metrics,
|
||||
platform_stats=platform_stats,
|
||||
error_summaries=error_summaries,
|
||||
success_rate_timeline=success_timeline
|
||||
)
|
||||
|
||||
def get_real_time_metrics(self) -> Dict[str, Any]:
|
||||
"""Holt Echtzeit-Metriken"""
|
||||
# Letzte Stunde
|
||||
one_hour_ago = datetime.now() - timedelta(hours=1)
|
||||
|
||||
# Timeline für letzte Stunde
|
||||
timeline = self.repository.get_timeline_data('success_rate', 1)
|
||||
|
||||
# Berechne Metriken
|
||||
total_attempts = sum(d['total'] for d in timeline)
|
||||
successful = sum(d['successful'] for d in timeline)
|
||||
success_rate = successful / total_attempts if total_attempts > 0 else 0
|
||||
|
||||
# Platform Stats
|
||||
platform_stats = self.repository.get_platform_stats(timedelta(hours=1))
|
||||
|
||||
return {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'active_sessions': len(self.repository._execute_query(
|
||||
"SELECT DISTINCT session_id FROM account_creation_analytics WHERE timestamp > ?",
|
||||
(one_hour_ago,)
|
||||
)),
|
||||
'accounts_last_hour': successful,
|
||||
'attempts_last_hour': total_attempts,
|
||||
'success_rate_last_hour': success_rate,
|
||||
'avg_creation_time': sum(
|
||||
stats.get('avg_duration_seconds', 0)
|
||||
for stats in platform_stats.values()
|
||||
) / len(platform_stats) if platform_stats else 0,
|
||||
'platform_breakdown': platform_stats,
|
||||
'hourly_trend': self._calculate_trend(timeline)
|
||||
}
|
||||
|
||||
def _calculate_trend(self, timeline: List[Dict[str, Any]]) -> float:
|
||||
"""Berechnet Trend aus Timeline"""
|
||||
if len(timeline) < 2:
|
||||
return 0.0
|
||||
|
||||
# Vergleiche erste und letzte Hälfte
|
||||
mid = len(timeline) // 2
|
||||
first_half = timeline[:mid]
|
||||
second_half = timeline[mid:]
|
||||
|
||||
first_rate = sum(d.get('success_rate', 0) for d in first_half) / len(first_half) if first_half else 0
|
||||
second_rate = sum(d.get('success_rate', 0) for d in second_half) / len(second_half) if second_half else 0
|
||||
|
||||
if first_rate > 0:
|
||||
return ((second_rate - first_rate) / first_rate) * 100
|
||||
return 0.0
|
||||
|
||||
def track_performance(self,
|
||||
metric_name: str,
|
||||
value: float,
|
||||
tags: Optional[Dict[str, str]] = None) -> None:
|
||||
"""Trackt Performance-Metrik"""
|
||||
# Würde in echter Implementation in separater Tabelle gespeichert
|
||||
logger.info(f"Performance metric: {metric_name}={value} tags={tags}")
|
||||
|
||||
def get_account_creation_timeline(self,
|
||||
hours: int = 24,
|
||||
platform: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Holt Account Creation Timeline"""
|
||||
timeline = self.repository.get_timeline_data('accounts', hours, platform)
|
||||
|
||||
return {
|
||||
'hours': hours,
|
||||
'platform': platform,
|
||||
'data_points': timeline,
|
||||
'total': sum(d['successful'] for d in timeline),
|
||||
'peak_hour': max(timeline, key=lambda d: d['successful'])['timestamp'] if timeline else None
|
||||
}
|
||||
|
||||
def analyze_failure_patterns(self,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""Analysiert Fehler-Muster"""
|
||||
errors = self.get_common_errors(50, timeframe)
|
||||
|
||||
patterns = {
|
||||
'timeframe': str(timeframe) if timeframe else 'all',
|
||||
'total_error_types': len(errors),
|
||||
'critical_errors': [],
|
||||
'recurring_errors': [],
|
||||
'error_clusters': []
|
||||
}
|
||||
|
||||
# Identifiziere kritische Fehler
|
||||
for error in errors:
|
||||
if error.severity_score > 0.7:
|
||||
patterns['critical_errors'].append({
|
||||
'type': error.error_type,
|
||||
'frequency': error.frequency,
|
||||
'impact': error.total_user_impact + error.total_system_impact
|
||||
})
|
||||
|
||||
# Identifiziere wiederkehrende Fehler
|
||||
for error in errors:
|
||||
if error.error_count > 10:
|
||||
patterns['recurring_errors'].append({
|
||||
'type': error.error_type,
|
||||
'count': error.error_count,
|
||||
'recovery_rate': error.recovery_success_rate
|
||||
})
|
||||
|
||||
return patterns
|
||||
|
||||
def get_platform_comparison(self,
|
||||
timeframe: Optional[timedelta] = None) -> Dict[str, Any]:
|
||||
"""Vergleicht Plattformen"""
|
||||
platform_stats = self.repository.get_platform_stats(timeframe)
|
||||
|
||||
comparison = {}
|
||||
for platform, stats in platform_stats.items():
|
||||
comparison[platform] = {
|
||||
'success_rate': stats['success_rate'],
|
||||
'total_accounts': stats['successful_accounts'],
|
||||
'avg_duration': stats['avg_duration_seconds'],
|
||||
'performance_score': self._calculate_platform_score(stats)
|
||||
}
|
||||
|
||||
return comparison
|
||||
|
||||
def _calculate_platform_score(self, stats: Dict[str, Any]) -> float:
|
||||
"""Berechnet Performance Score für Platform"""
|
||||
# Gewichtete Bewertung
|
||||
success_weight = 0.5
|
||||
speed_weight = 0.3
|
||||
volume_weight = 0.2
|
||||
|
||||
# Normalisiere Werte
|
||||
success_score = stats['success_rate']
|
||||
speed_score = 1.0 - min(stats['avg_duration_seconds'] / 300, 1.0) # 5 min max
|
||||
volume_score = min(stats['total_attempts'] / 100, 1.0) # 100 als Referenz
|
||||
|
||||
return (success_score * success_weight +
|
||||
speed_score * speed_weight +
|
||||
volume_score * volume_weight)
|
||||
|
||||
def export_data(self,
|
||||
format: str = "json",
|
||||
start: Optional[datetime] = None,
|
||||
end: Optional[datetime] = None) -> bytes:
|
||||
"""Exportiert Daten"""
|
||||
# Generiere Report für Zeitraum
|
||||
report = self.generate_report(
|
||||
ReportType.CUSTOM,
|
||||
start or datetime.now() - timedelta(days=7),
|
||||
end or datetime.now()
|
||||
)
|
||||
|
||||
if format == "json":
|
||||
return json.dumps(report.to_dict(), indent=2).encode()
|
||||
elif format == "csv":
|
||||
# Vereinfachte CSV-Implementation
|
||||
csv_data = "platform,attempts,success,rate\n"
|
||||
for stat in report.platform_stats:
|
||||
csv_data += f"{stat.platform},{stat.total_attempts},{stat.successful_accounts},{stat.success_rate}\n"
|
||||
return csv_data.encode()
|
||||
else:
|
||||
raise ValueError(f"Unsupported format: {format}")
|
||||
|
||||
def cleanup_old_events(self, older_than: datetime) -> int:
|
||||
"""Bereinigt alte Events"""
|
||||
return self.repository.cleanup_old_events(older_than)
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren