Initial commit: AegisSight-Monitor (OSINT-Monitoringsystem)

Dieser Commit ist enthalten in:
claude-dev
2026-03-04 17:53:18 +01:00
Commit 8312d24912
51 geänderte Dateien mit 19355 neuen und 0 gelöschten Zeilen

6
.gitignore vendored Normale Datei
Datei anzeigen

@@ -0,0 +1,6 @@
__pycache__/
*.pyc
.env
logs/
data/
.venv/

121
CLAUDE.md Normale Datei
Datei anzeigen

@@ -0,0 +1,121 @@
# OSINT Lagemonitor
> Lokale Arbeitskopie für das OSINT-Monitor-Projekt auf Server `alt`
## Übersicht
```yaml
projekt: osint-monitor
url: https://osint.intelsight.de
beschreibung: "OSINT-basiertes Lagemonitoring mit Claude-KI-Agenten"
server: alt (91.99.192.14, User: claude-dev)
pfad_server: /home/claude-dev/osint-monitor
pfad_lokal: C:\Users\Administrator\Desktop\OSINT-monitor
status: aktiv (systemd service läuft)
```
## Technologie-Stack
```yaml
backend:
framework: FastAPI (Python 3, venv /home/claude-dev/.venvs/osint/)
datenbank: SQLite (WAL-Modus, aiosqlite) @ /mnt/gitea/osint-data/osint.db
auth: JWT (HS256, bcrypt, 24h Ablauf)
scheduler: APScheduler (Auto-Refresh jede Minute + Cleanup stündlich)
websocket: FastAPI native
ki_agenten: Claude CLI (WebSearch + WebFetch Tools)
port: 8891 (localhost, Nginx Reverse Proxy)
frontend:
typ: Vanilla JS (kein Framework)
design: AegisSight Dark Theme (Navy/Gold)
fonts: Poppins (Titel), Inter (Body)
echtzeit: WebSocket mit Auto-Reconnect
```
## Projektstruktur
```yaml
osint-monitor/:
CLAUDE.md: "Projektdokumentation"
requirements.txt: "Python-Abhängigkeiten"
setup_users.py: "Nutzer-Initialisierung (rac00n, ch33tah)"
data/: "Symlink -> /mnt/gitea/osint-data/ (SQLite DB)"
logs/: "Anwendungs-Logs"
src/:
main.py: "FastAPI App, WebSocket-Manager, Scheduler, Lifespan"
config.py: "Konfiguration (JWT, Claude CLI, RSS-Feeds, Excluded Sources)"
auth.py: "JWT-Authentifizierung (bcrypt, HTTPBearer)"
database.py: "SQLite Schema + Migrationen (8 Tabellen: users, incidents, articles, fact_checks, refresh_log, incident_snapshots, sources, notifications)"
models.py: "Pydantic Request/Response-Schemas (inkl. Source CRUD + Notifications)"
source_rules.py: "Dynamische Quellen-Regeln aus DB (RSS-Feeds + Blacklist)"
routers/:
auth.py: "POST /api/auth/login, GET /api/auth/me"
incidents.py: "CRUD /api/incidents, /api/incidents/{id}/articles|factchecks|refresh"
sources.py: "CRUD /api/sources, /api/sources/stats, /api/sources/refresh-counts"
notifications.py: "GET /api/notifications, GET /api/notifications/unread-count, PUT /api/notifications/mark-read"
feedback.py: "POST /api/feedback (Rate-Limited, HTML-E-Mail an feedback@aegis-sight.de)"
agents/:
claude_client.py: "Shared Claude CLI Client mit JSON-Output + Usage-Tracking (ClaudeUsage, UsageAccumulator)"
orchestrator.py: "AsyncQueue, koordiniert Agenten-Pipeline sequentiell, Token-Akkumulation + Snapshots + DB-Notifications"
researcher.py: "Claude WebSearch Agent (Ad-hoc + Deep Research Modus)"
analyzer.py: "Analyse-Agent (Zusammenfassung + Briefing-Format)"
factchecker.py: "Faktencheck-Agent (quellengebunden)"
feeds/:
rss_parser.py: "RSS-Feed Aggregation (dynamisch aus DB, Fallback auf config.py)"
static/:
index.html: "Login-Seite"
dashboard.html: "Hauptdashboard"
css/style.css: "AegisSight Design System"
js/: "api.js, app.js, components.js, ws.js"
```
## Architektur
```yaml
agenten_pipeline:
1_rss: "RSS-Feeds durchsuchen (nur Ad-hoc-Lagen)"
2_claude_recherche: "Claude CLI WebSearch (Ad-hoc oder Deep Research)"
3_analyse: "Zusammenfassung/Briefing mit Inline-Zitaten [1][2] + sources_json"
4_faktencheck: "Claims gegen unabhängige Quellen prüfen"
orchestrierung: "Sequentielle Queue (1 Auftrag gleichzeitig)"
incident_typen:
adhoc: "Breaking News -> RSS + WebSearch -> Plaintext-Summary mit Quellenreferenzen"
research: "Hintergrundrecherche -> Nur Deep Research -> Markdown-Briefing mit Quellenverzeichnis"
internationale_quellen:
toggle: "Pro Lage konfigurierbar (Checkbox beim Anlegen/Bearbeiten)"
international_true: "DE + internationale Feeds (Reuters, BBC, Al Jazeera) + mehrsprachige Claude-Recherche"
international_false: "Nur deutschsprachige Quellen (DE, AT, CH), internationale RSS-Kategorie übersprungen"
db_feld: "international_sources INTEGER DEFAULT 1"
betroffene_agenten: "RSSParser (Kategorien-Filter), ResearcherAgent (Sprach-Prompts), Orchestrator (Weiterleitung)"
quellenanzeige:
inline_zitate: "Klickbare [1][2] Verweise im Lagebild → Quellenverzeichnis"
quellenverzeichnis: "Am Ende des Lagebilds, nummeriert mit Links"
quellenübersicht: "Aggregierte Ansicht aller Quellen pro Lage mit Sprach-Statistik"
timeline_expand: "Artikel klickbar → Inhaltsvorschau + Link zum Original"
sprach_badges: "EN/FR etc. Badge bei fremdsprachigen Artikeln"
evidence_text: "Faktencheck zeigt erklärenden Text + Quellen-Chips"
deduplizierung: "URL-Normalisierung + Headline-Ähnlichkeit (www, trailing slash, query params)"
benachrichtigungen:
persistenz: "DB-Tabelle notifications (pro Nutzer, 7 Tage Aufbewahrung)"
erzeugung: "Orchestrator schreibt nach refresh_summary in DB (öffentlich=alle Nutzer, privat=nur Ersteller)"
frontend: "NotificationCenter lädt aus DB beim Init, optimistisches UI bei WebSocket-Events, Debounced DB-Sync"
gelesen: "Als gelesen markieren (is_read=1) → visuell abgeblendet, nicht gelöscht"
tab_badge: "document.title = '(N) IntelSight...' bei ungelesenen Notifications"
cleanup: "Stündlicher Job löscht Notifications älter als 7 Tage"
quellenverwaltung:
db_tabelle: "sources (id, name, url, domain, source_type, category, status, notes, added_by, article_count, last_seen_at, created_at)"
source_types: "rss_feed | web_source | excluded"
kategorien: "nachrichtenagentur, oeffentlich-rechtlich, qualitaetszeitung, behoerde, fachmedien, think-tank, international, regional, sonstige"
seeding: "Beim Start aus config.py RSS_FEEDS + EXCLUDED_SOURCES (wenn Tabelle leer)"
dynamisch: "source_rules.py liest aktive Quellen aus DB, Fallback auf config.py"
frontend: "Modal mit Filter, Suche, Inline-Formular; Sidebar: 'Quellen verwalten' Button + Mini-Stats"
deployment:
workflow: "scp Dateien -> ssh alt -> systemctl restart osint-monitor"
```

1
data Softlink
Datei anzeigen

@@ -0,0 +1 @@
/mnt/gitea/osint-data

11
requirements.txt Normale Datei
Datei anzeigen

@@ -0,0 +1,11 @@
fastapi==0.115.6
uvicorn[standard]==0.34.0
python-jose[cryptography]
passlib[bcrypt]
aiosqlite
feedparser
httpx
apscheduler==3.10.4
websockets
python-multipart
aiosmtplib

63
setup_users.py Normale Datei
Datei anzeigen

@@ -0,0 +1,63 @@
"""Erstellt die initialen Nutzer für den OSINT Lagemonitor."""
import asyncio
import os
import sys
import secrets
import string
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src"))
from database import init_db, get_db
from auth import hash_password
def generate_password(length=16):
"""Generiert ein sicheres Passwort."""
alphabet = string.ascii_letters + string.digits + "!@#$%&*"
return ''.join(secrets.choice(alphabet) for _ in range(length))
async def main():
await init_db()
db = await get_db()
users = [
{"username": "rac00n", "password": generate_password()},
{"username": "ch33tah", "password": generate_password()},
]
print("\n=== OSINT Lagemonitor - Nutzer-Setup ===\n")
for user in users:
cursor = await db.execute(
"SELECT id FROM users WHERE username = ?", (user["username"],)
)
existing = await cursor.fetchone()
if existing:
# Passwort aktualisieren
pw_hash = hash_password(user["password"])
await db.execute(
"UPDATE users SET password_hash = ? WHERE username = ?",
(pw_hash, user["username"]),
)
print(f" Nutzer '{user['username']}' - Passwort aktualisiert")
else:
pw_hash = hash_password(user["password"])
await db.execute(
"INSERT INTO users (username, password_hash) VALUES (?, ?)",
(user["username"], pw_hash),
)
print(f" Nutzer '{user['username']}' - Erstellt")
print(f" Passwort: {user['password']}")
print()
await db.commit()
await db.close()
print("WICHTIG: Passwörter jetzt notieren! Sie werden nicht erneut angezeigt.\n")
if __name__ == "__main__":
asyncio.run(main())

169
src/agents/analyzer.py Normale Datei
Datei anzeigen

@@ -0,0 +1,169 @@
"""Analyzer-Agent: Analysiert, übersetzt und fasst Meldungen zusammen."""
import asyncio
import json
import logging
import re
from datetime import datetime
from config import TIMEZONE
from agents.claude_client import call_claude, ClaudeUsage
logger = logging.getLogger("osint.analyzer")
ANALYSIS_PROMPT_TEMPLATE = """Du bist ein OSINT-Analyse-Agent für ein Lagemonitoring-System.
HEUTIGES DATUM: {today}
AUSGABESPRACHE: {output_language}
VORFALL: {title}
KONTEXT: {description}
VORHANDENE MELDUNGEN:
{articles_text}
AUFTRAG:
1. Erstelle eine neutrale, faktenbasierte Zusammenfassung auf {output_language} (max. 500 Wörter)
2. Verwende Inline-Quellenverweise [1], [2], [3] etc. im Zusammenfassungstext
3. Liste die bestätigten Kernfakten auf
4. Übersetze fremdsprachige Überschriften und Inhalte in die Ausgabesprache
STRUKTUR:
- Wenn die Meldungen thematisch klar einen einzelnen Strang behandeln: Fließtext ohne Überschriften
- Wenn verschiedene Aspekte oder Themenfelder aufkommen (z.B. Ereignis + Reaktionen + Hintergrund): Gliedere mit kurzen Markdown-Zwischenüberschriften (##)
- Die Entscheidung liegt bei dir — Überschriften nur wenn sie dem Leser helfen, verschiedene Themenstränge auseinanderzuhalten
REGELN:
- Neutral und sachlich - keine Wertungen oder Spekulationen
- Nur gesicherte Informationen in die Zusammenfassung
- Bei widersprüchlichen Angaben beide Seiten erwähnen
- Quellen immer mit [Nr] referenzieren
- Jede verwendete Quelle MUSS im sources-Array aufgelistet sein
- Nummeriere die Quellen fortlaufend ab [1]
- Ältere Quellen zeitlich einordnen (z.B. "laut einem Bericht vom Januar", "Anfang Februar berichtete...")
Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
- "summary": Zusammenfassung auf {output_language} mit Quellenverweisen [1], [2] etc. im Text (Markdown-Überschriften ## erlaubt wenn sinnvoll)
- "sources": Array von Quellenobjekten, je: {{"nr": 1, "name": "Quellenname", "url": "https://..."}}
- "key_facts": Array von bestätigten Kernfakten (Strings, in Ausgabesprache)
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für fremdsprachige Artikel)
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
BRIEFING_PROMPT_TEMPLATE = """Du bist ein OSINT-Analyse-Agent für ein Lagemonitoring-System.
Du erstellst ein strukturiertes Briefing für eine Hintergrundrecherche.
HEUTIGES DATUM: {today}
AUSGABESPRACHE: {output_language}
THEMA: {title}
KONTEXT: {description}
VORLIEGENDE QUELLEN:
{articles_text}
AUFTRAG:
Erstelle ein strukturiertes Briefing (max. 800 Wörter) auf {output_language} mit folgenden Abschnitten.
Verwende durchgehend Inline-Quellenverweise [1], [2], [3] etc. im Text.
## ÜBERBLICK
Kurze Einordnung des Themas (2-3 Sätze)
## HINTERGRUND
Historischer Kontext, relevante Vorgeschichte
## AKTEURE
Beteiligte Personen, Organisationen, Institutionen und ihre Rollen
## AKTUELLE LAGE
Was ist der aktuelle Stand? Welche Entwicklungen gibt es?
## EINSCHÄTZUNG
Sachliche Bewertung der Situation, mögliche Entwicklungen
## QUELLENQUALITÄT
Kurze Bewertung der Informationslage: Wie belastbar sind die vorliegenden Quellen?
REGELN:
- Neutral und sachlich - keine Wertungen oder Spekulationen
- Nur gesicherte Informationen verwenden
- Bei widersprüchlichen Angaben beide Seiten erwähnen
- Quellen immer mit [Nr] referenzieren
- Jede verwendete Quelle MUSS im sources-Array aufgelistet sein
- Nummeriere die Quellen fortlaufend ab [1]
- Ältere Quellen zeitlich einordnen (z.B. "laut einem Bericht vom Januar", "Anfang Februar berichtete...")
- Markdown-Überschriften (##) für die Abschnitte verwenden
- Fettdruck (**) für Schlüsselbegriffe erlaubt
Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
- "summary": Das strukturierte Briefing als Markdown-Text mit Quellenverweisen [1], [2] etc.
- "sources": Array von Quellenobjekten, je: {{"nr": 1, "name": "Quellenname", "url": "https://..."}}
- "key_facts": Array von gesicherten Kernfakten (Strings, in Ausgabesprache)
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für fremdsprachige Artikel)
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
class AnalyzerAgent:
"""Analysiert und übersetzt Meldungen über Claude CLI."""
async def analyze(self, title: str, description: str, articles: list[dict], incident_type: str = "adhoc") -> tuple[dict | None, ClaudeUsage | None]:
"""Analysiert alle Meldungen zu einem Vorfall."""
if not articles:
return None, None
# Artikel-Text für Prompt aufbereiten
articles_text = ""
for i, article in enumerate(articles[:30]): # Max 30 Artikel um Prompt-Länge zu begrenzen
articles_text += f"\n--- Meldung {i+1} (ID: {article.get('id', 'neu')}) ---\n"
articles_text += f"Quelle: {article.get('source', 'Unbekannt')}\n"
url = article.get('source_url', '')
if url:
articles_text += f"URL: {url}\n"
articles_text += f"Sprache: {article.get('language', 'de')}\n"
published = article.get('published_at', '')
if published:
articles_text += f"Veröffentlicht: {published}\n"
headline = article.get('headline_de') or article.get('headline', '')
articles_text += f"Überschrift: {headline}\n"
content = article.get('content_de') or article.get('content_original', '')
if content:
articles_text += f"Inhalt: {content[:500]}\n"
from config import OUTPUT_LANGUAGE
today = datetime.now(TIMEZONE).strftime("%d.%m.%Y")
template = BRIEFING_PROMPT_TEMPLATE if incident_type == "research" else ANALYSIS_PROMPT_TEMPLATE
prompt = template.format(
title=title,
description=description or "Keine weiteren Details",
articles_text=articles_text,
today=today,
output_language=OUTPUT_LANGUAGE,
)
try:
result, usage = await call_claude(prompt)
analysis = self._parse_response(result)
if analysis:
logger.info(f"Analyse abgeschlossen: {len(analysis.get('sources', []))} Quellen referenziert")
return analysis, usage
except Exception as e:
logger.error(f"Analyse-Fehler: {e}")
return None, None
def _parse_response(self, response: str) -> dict | None:
"""Parst die Claude-Antwort als JSON-Objekt."""
try:
data = json.loads(response)
if isinstance(data, dict):
return data
except json.JSONDecodeError:
pass
match = re.search(r'\{.*\}', response, re.DOTALL)
if match:
try:
data = json.loads(match.group())
if isinstance(data, dict):
return data
except json.JSONDecodeError:
pass
logger.warning("Konnte Analyse-Antwort nicht als JSON parsen")
return None

88
src/agents/claude_client.py Normale Datei
Datei anzeigen

@@ -0,0 +1,88 @@
"""Shared Claude CLI Client mit Usage-Tracking."""
import asyncio
import json
import logging
from dataclasses import dataclass
from config import CLAUDE_PATH, CLAUDE_TIMEOUT
logger = logging.getLogger("osint.claude_client")
@dataclass
class ClaudeUsage:
"""Token-Verbrauch eines einzelnen Claude CLI Aufrufs."""
input_tokens: int = 0
output_tokens: int = 0
cache_creation_tokens: int = 0
cache_read_tokens: int = 0
cost_usd: float = 0.0
duration_ms: int = 0
@dataclass
class UsageAccumulator:
"""Akkumuliert Usage über mehrere Claude-Aufrufe eines Refreshs."""
input_tokens: int = 0
output_tokens: int = 0
cache_creation_tokens: int = 0
cache_read_tokens: int = 0
total_cost_usd: float = 0.0
call_count: int = 0
def add(self, usage: ClaudeUsage):
self.input_tokens += usage.input_tokens
self.output_tokens += usage.output_tokens
self.cache_creation_tokens += usage.cache_creation_tokens
self.cache_read_tokens += usage.cache_read_tokens
self.total_cost_usd += usage.cost_usd
self.call_count += 1
async def call_claude(prompt: str, tools: str | None = "WebSearch,WebFetch") -> tuple[str, ClaudeUsage]:
"""Ruft Claude CLI auf. Gibt (result_text, usage) zurück."""
cmd = [CLAUDE_PATH, "-p", prompt, "--output-format", "json"]
if tools:
cmd.extend(["--allowedTools", tools])
else:
cmd.extend(["--max-turns", "1"])
process = await asyncio.create_subprocess_exec(
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
env={"PATH": "/usr/local/bin:/usr/bin:/bin", "HOME": "/home/claude-dev"},
)
try:
stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=CLAUDE_TIMEOUT)
except asyncio.TimeoutError:
process.kill()
raise TimeoutError(f"Claude CLI Timeout nach {CLAUDE_TIMEOUT}s")
if process.returncode != 0:
error_msg = stderr.decode("utf-8", errors="replace").strip()
logger.error(f"Claude CLI Fehler (Exit {process.returncode}): {error_msg}")
raise RuntimeError(f"Claude CLI Fehler: {error_msg}")
raw = stdout.decode("utf-8", errors="replace").strip()
usage = ClaudeUsage()
result_text = raw
try:
data = json.loads(raw)
result_text = data.get("result", raw)
u = data.get("usage", {})
usage = ClaudeUsage(
input_tokens=u.get("input_tokens", 0),
output_tokens=u.get("output_tokens", 0),
cache_creation_tokens=u.get("cache_creation_input_tokens", 0),
cache_read_tokens=u.get("cache_read_input_tokens", 0),
cost_usd=data.get("total_cost_usd", 0.0),
duration_ms=data.get("duration_ms", 0),
)
logger.info(
f"Claude: {usage.input_tokens} in / {usage.output_tokens} out / "
f"cache {usage.cache_creation_tokens}+{usage.cache_read_tokens} / "
f"${usage.cost_usd:.4f} / {usage.duration_ms}ms"
)
except json.JSONDecodeError:
logger.warning("Claude CLI Antwort kein gültiges JSON, nutze raw output")
return result_text, usage

143
src/agents/factchecker.py Normale Datei
Datei anzeigen

@@ -0,0 +1,143 @@
"""Factchecker-Agent: Prüft Fakten gegen mehrere unabhängige Quellen."""
import asyncio
import json
import logging
import re
from agents.claude_client import call_claude, ClaudeUsage
logger = logging.getLogger("osint.factchecker")
FACTCHECK_PROMPT_TEMPLATE = """Du bist ein Faktencheck-Agent für ein OSINT-Lagemonitoring-System.
AUSGABESPRACHE: {output_language}
VORFALL: {title}
VORLIEGENDE MELDUNGEN:
{articles_text}
STRENGE REGELN - KEINE HALLUZINATIONEN:
- Du darfst NUR Fakten bewerten, die direkt aus den oben übergebenen Meldungen stammen
- KEINE Fakten aus deinem Trainingskorpus - NUR aus den übergebenen Meldungen + WebSearch
- Nutze WebSearch um jeden Claim gegen mindestens 1 weitere unabhängige Quelle zu prüfen
- Rufe die gefundenen URLs per WebFetch ab um den Inhalt zu verifizieren
- Nur wenn du den Claim in der tatsächlich abgerufenen Quelle findest, darfst du ihn als bestätigt markieren
- Jeder Claim MUSS eine konkrete Quellen-URL als Beleg enthalten
- "confirmed" erst bei 2+ unabhängigen Quellen mit überprüfbarer URL
- Lieber "unconfirmed" als falsch bestätigt
AUFTRAG:
1. Identifiziere die 5-10 wichtigsten Faktenaussagen aus den Meldungen
2. Prüfe jeden Claim aktiv per WebSearch gegen mindestens eine weitere unabhängige Quelle
3. Kategorisiere jede Aussage:
- "confirmed": Durch 2+ unabhängige seriöse Quellen mit überprüfbarer URL bestätigt
- "unconfirmed": Nur 1 Quelle oder nicht unabhängig verifizierbar
- "contradicted": Widersprüchliche Informationen aus verschiedenen Quellen
- "developing": Situation noch unklar, entwickelt sich
4. Markiere WICHTIGE NEUE Entwicklungen mit is_notification: true
Antworte AUSSCHLIESSLICH als JSON-Array. Jedes Element hat:
- "claim": Die Faktenaussage auf {output_language}
- "status": "confirmed" | "unconfirmed" | "contradicted" | "developing"
- "sources_count": Anzahl unabhängiger Quellen mit überprüfbarer URL
- "evidence": Begründung MIT konkreten Quellen-URLs als Beleg (z.B. "Bestätigt durch: tagesschau.de (URL), Reuters (URL)")
- "is_notification": true/false (nur bei wichtigen Entwicklungen true)
Antworte NUR mit dem JSON-Array. Keine Einleitung, keine Erklärung."""
RESEARCH_FACTCHECK_PROMPT_TEMPLATE = """Du bist ein Faktencheck-Agent für eine Hintergrundrecherche in einem OSINT-Lagemonitoring-System.
AUSGABESPRACHE: {output_language}
THEMA: {title}
VORLIEGENDE QUELLEN:
{articles_text}
STRENGE REGELN - KEINE HALLUZINATIONEN:
- Du darfst NUR Fakten bewerten, die direkt aus den oben übergebenen Quellen stammen
- KEINE Fakten aus deinem Trainingskorpus - NUR aus den übergebenen Quellen + WebSearch
- Nutze WebSearch um jeden Claim gegen mindestens 1 weitere unabhängige Quelle zu prüfen
- Rufe die gefundenen URLs per WebFetch ab um den Inhalt zu verifizieren
- Nur wenn du den Claim in der tatsächlich abgerufenen Quelle findest, darfst du ihn als gesichert markieren
- Jeder Claim MUSS eine konkrete Quellen-URL als Beleg enthalten
- Lieber "unverified" als falsch bestätigt
AUFTRAG:
Fokus: "Was sind die gesicherten Fakten zu diesem Thema?"
1. Identifiziere die 5-10 wichtigsten Faktenaussagen aus den Quellen
2. Prüfe jeden Claim aktiv per WebSearch gegen weitere unabhängige Quellen
3. Kategorisiere jede Aussage:
- "established": Breit dokumentierter, gesicherter Fakt (3+ unabhängige Quellen mit URL)
- "disputed": Umstrittener Sachverhalt, verschiedene Positionen dokumentiert
- "unverified": Einzelbehauptung, nicht unabhängig verifizierbar
- "developing": Aktuelle Entwicklung, Faktenlage noch im Fluss
4. Markiere WICHTIGE Erkenntnisse mit is_notification: true
Antworte AUSSCHLIESSLICH als JSON-Array. Jedes Element hat:
- "claim": Die Faktenaussage auf {output_language}
- "status": "established" | "disputed" | "unverified" | "developing"
- "sources_count": Anzahl unabhängiger Quellen mit überprüfbarer URL
- "evidence": Begründung MIT konkreten Quellen-URLs als Beleg
- "is_notification": true/false
Antworte NUR mit dem JSON-Array. Keine Einleitung, keine Erklärung."""
class FactCheckerAgent:
"""Prüft Fakten über Claude CLI gegen unabhängige Quellen."""
async def check(self, title: str, articles: list[dict], incident_type: str = "adhoc") -> tuple[list[dict], ClaudeUsage | None]:
"""Führt Faktencheck für eine Lage durch."""
if not articles:
return [], None
articles_text = ""
for i, article in enumerate(articles[:20]):
articles_text += f"\n--- Meldung {i+1} ---\n"
articles_text += f"Quelle: {article.get('source', 'Unbekannt')}\n"
source_url = article.get('source_url', '')
if source_url:
articles_text += f"URL: {source_url}\n"
headline = article.get('headline_de') or article.get('headline', '')
articles_text += f"Überschrift: {headline}\n"
content = article.get('content_de') or article.get('content_original', '')
if content:
articles_text += f"Inhalt: {content[:300]}\n"
from config import OUTPUT_LANGUAGE
template = RESEARCH_FACTCHECK_PROMPT_TEMPLATE if incident_type == "research" else FACTCHECK_PROMPT_TEMPLATE
prompt = template.format(
title=title,
articles_text=articles_text,
output_language=OUTPUT_LANGUAGE,
)
try:
result, usage = await call_claude(prompt)
facts = self._parse_response(result)
logger.info(f"Faktencheck: {len(facts)} Fakten geprüft")
return facts, usage
except Exception as e:
logger.error(f"Faktencheck-Fehler: {e}")
return [], None
def _parse_response(self, response: str) -> list[dict]:
"""Parst die Claude-Antwort als JSON-Array."""
try:
data = json.loads(response)
if isinstance(data, list):
return data
except json.JSONDecodeError:
pass
match = re.search(r'\[.*\]', response, re.DOTALL)
if match:
try:
data = json.loads(match.group())
if isinstance(data, list):
return data
except json.JSONDecodeError:
pass
logger.warning("Konnte Faktencheck-Antwort nicht als JSON parsen")
return []

893
src/agents/orchestrator.py Normale Datei
Datei anzeigen

@@ -0,0 +1,893 @@
"""Agenten-Orchestrierung: Queue und Steuerung der Claude-Agenten."""
import asyncio
import json
import logging
import re
from datetime import datetime, timezone
from config import TIMEZONE
from typing import Optional
from urllib.parse import urlparse, urlunparse
from agents.claude_client import UsageAccumulator
from source_rules import (
DOMAIN_CATEGORY_MAP,
_detect_category,
_extract_domain,
discover_source,
domain_to_display_name,
)
logger = logging.getLogger("osint.orchestrator")
def _normalize_url(url: str) -> str:
"""URL normalisieren für Duplikat-Erkennung."""
if not url:
return ""
url = url.strip()
try:
parsed = urlparse(url)
# Scheme normalisieren
scheme = parsed.scheme.lower() or "https"
# Host normalisieren (www entfernen, lowercase)
netloc = parsed.netloc.lower()
if netloc.startswith("www."):
netloc = netloc[4:]
# Pfad normalisieren (trailing slash entfernen)
path = parsed.path.rstrip("/")
# Query-Parameter und Fragment entfernen (Tracking-Params etc.)
return urlunparse((scheme, netloc, path, "", "", ""))
except Exception:
return url.lower().strip().rstrip("/")
def _normalize_headline(headline: str) -> str:
"""Überschrift normalisieren für Ähnlichkeitsvergleich."""
if not headline:
return ""
h = headline.lower().strip()
# Umlaute normalisieren
h = h.replace("ä", "ae").replace("ö", "oe").replace("ü", "ue").replace("ß", "ss")
# Sonderzeichen entfernen
h = re.sub(r"[^\w\s]", "", h)
h = re.sub(r"\s+", " ", h).strip()
return h
def _is_duplicate(article: dict, seen_urls: set, seen_headlines: set) -> bool:
"""Prüft ob ein Artikel ein Duplikat ist (URL oder Headline)."""
url = article.get("source_url", "")
headline = article.get("headline", "")
# URL-Duplikat
if url:
norm_url = _normalize_url(url)
if norm_url in seen_urls:
return True
seen_urls.add(norm_url)
# Headline-Duplikat (nur wenn Überschrift lang genug)
if headline and len(headline) > 20:
norm_headline = _normalize_headline(headline)
if norm_headline and norm_headline in seen_headlines:
return True
if norm_headline:
seen_headlines.add(norm_headline)
return False
async def _background_discover_sources(articles: list[dict]):
"""Background-Task: Registriert seriöse, unbekannte Quellen aus Recherche-Ergebnissen."""
from database import get_db
db = await get_db()
try:
# 1. Unique Domains extrahieren
seen_domains: set[str] = set()
domains_to_check: list[tuple[str, str, str]] = []
for article in articles:
url = article.get("source_url")
if not url:
continue
domain = _extract_domain(url)
if not domain or domain in seen_domains:
continue
seen_domains.add(domain)
# 2. Nur seriöse Domains (in DOMAIN_CATEGORY_MAP, nicht "sonstige")
category = _detect_category(domain)
if category == "sonstige":
continue
domains_to_check.append((domain, url, category))
if not domains_to_check:
return
# 3. Gegen DB prüfen — welche Domains existieren schon?
new_count = 0
for domain, url, category in domains_to_check:
cursor = await db.execute(
"SELECT id FROM sources WHERE LOWER(domain) = ?",
(domain.lower(),),
)
if await cursor.fetchone():
continue # Domain schon bekannt
# 4. RSS-Feed-Erkennung
try:
result = await discover_source(url)
name = domain_to_display_name(domain)
source_type = result["source_type"] # "rss_feed" oder "web_source"
feed_url = result.get("rss_url")
await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, notes, added_by)
VALUES (?, ?, ?, ?, ?, 'active', 'Auto-entdeckt via Recherche', 'system')""",
(name, feed_url or f"https://{domain}", domain, source_type, category),
)
new_count += 1
logger.info(f"Neue Quelle registriert: {name} ({domain}) als {source_type}")
except Exception as e:
logger.debug(f"Discovery fehlgeschlagen für {domain}: {e}")
if new_count > 0:
await db.commit()
logger.info(f"Background-Discovery: {new_count} neue Quellen registriert")
except Exception as e:
logger.warning(f"Background-Discovery Fehler: {e}")
finally:
await db.close()
async def _create_notifications_for_incident(
db, incident_id: int, visibility: str, created_by: int, tenant_id: int, notifications: list[dict]
):
"""Erzeugt DB-Notifications fuer alle betroffenen Nutzer der Organisation.
- Oeffentliche Lagen -> alle Nutzer der Org
- Private Lagen -> nur Ersteller
"""
if not notifications:
return
now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
if visibility == "public" and tenant_id:
cursor = await db.execute(
"SELECT id FROM users WHERE organization_id = ? AND is_active = 1 AND last_login_at IS NOT NULL",
(tenant_id,),
)
user_ids = [row["id"] for row in await cursor.fetchall()]
else:
user_ids = [created_by]
for user_id in user_ids:
for notif in notifications:
await db.execute(
"""INSERT INTO notifications (user_id, incident_id, type, title, text, icon, tenant_id, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(
user_id,
incident_id,
notif.get("type", "refresh_summary"),
notif["title"],
notif["text"],
notif.get("icon", "info"),
tenant_id,
now,
),
)
await db.commit()
logger.info(f"Notifications erstellt: {len(notifications)} x {len(user_ids)} Nutzer fuer Lage {incident_id}")
async def _send_email_notifications_for_incident(
db, incident_id: int, incident_title: str, visibility: str,
created_by: int, tenant_id: int, notifications: list[dict]
):
"""Sendet E-Mail-Benachrichtigungen basierend auf individuellen Nutzer-Abos.
Jeder Nutzer hat eigene E-Mail-Praeferenzen pro Lage (incident_subscriptions).
Nur Nutzer die aktiv sind und sich mindestens einmal eingeloggt haben
(last_login_at IS NOT NULL) werden beruecksichtigt.
"""
if not notifications:
return
from email_utils.sender import send_email
from email_utils.templates import incident_notification_email
from config import MAGIC_LINK_BASE_URL
# Alle Nutzer mit aktiven Abos fuer diese Lage laden
cursor = await db.execute(
"""SELECT s.notify_email_summary, s.notify_email_new_articles,
s.notify_email_status_change, u.id, u.email, u.username
FROM incident_subscriptions s
JOIN users u ON u.id = s.user_id
WHERE s.incident_id = ?
AND u.is_active = 1
AND u.last_login_at IS NOT NULL
AND (s.notify_email_summary = 1
OR s.notify_email_new_articles = 1
OR s.notify_email_status_change = 1)""",
(incident_id,),
)
subscribers = await cursor.fetchall()
if not subscribers:
return
dashboard_url = f"{MAGIC_LINK_BASE_URL}/dashboard"
for sub in subscribers:
prefs = dict(sub)
# Relevante Notifications basierend auf Nutzer-Praeferenzen filtern
filtered_notifications = []
for n in notifications:
ntype = n.get("type", "refresh_summary")
if ntype == "refresh_summary" and prefs.get("notify_email_summary"):
filtered_notifications.append(n)
elif ntype == "new_articles" and prefs.get("notify_email_new_articles"):
filtered_notifications.append(n)
elif ntype == "status_change" and prefs.get("notify_email_status_change"):
filtered_notifications.append(n)
if not filtered_notifications:
continue
subject, html = incident_notification_email(
username=prefs["username"],
incident_title=incident_title,
notifications=filtered_notifications,
dashboard_url=dashboard_url,
)
try:
await send_email(prefs["email"], subject, html)
logger.info(f"E-Mail-Benachrichtigung gesendet an {prefs['email']} fuer Lage {incident_id} ({len(filtered_notifications)} Items)")
except Exception as e:
logger.error(f"E-Mail-Benachrichtigung fehlgeschlagen fuer {prefs['email']}: {e}")
class AgentOrchestrator:
"""Verwaltet die Claude-Agenten-Queue und koordiniert Recherche-Zyklen."""
def __init__(self):
self._queue: asyncio.Queue = asyncio.Queue()
self._running = False
self._current_task: Optional[int] = None
self._ws_manager = None
self._queued_ids: set[int] = set()
self._cancel_requested: set[int] = set()
def set_ws_manager(self, ws_manager):
"""WebSocket-Manager setzen für Echtzeit-Updates."""
self._ws_manager = ws_manager
async def start(self):
"""Queue-Worker starten."""
self._running = True
asyncio.create_task(self._worker())
logger.info("Agenten-Orchestrator gestartet")
async def stop(self):
"""Queue-Worker stoppen."""
self._running = False
logger.info("Agenten-Orchestrator gestoppt")
async def enqueue_refresh(self, incident_id: int, trigger_type: str = "manual") -> bool:
"""Refresh-Auftrag in die Queue stellen. Gibt False zurueck wenn bereits in Queue/aktiv."""
if incident_id in self._queued_ids or self._current_task == incident_id:
logger.info(f"Refresh fuer Lage {incident_id} uebersprungen: bereits aktiv/in Queue")
return False
visibility, created_by, tenant_id = await self._get_incident_visibility(incident_id)
self._queued_ids.add(incident_id)
await self._queue.put((incident_id, trigger_type))
queue_size = self._queue.qsize()
logger.info(f"Refresh fuer Lage {incident_id} eingereiht (Queue: {queue_size}, Trigger: {trigger_type})")
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {"status": "queued", "queue_position": queue_size},
}, visibility, created_by, tenant_id)
return True
async def cancel_refresh(self, incident_id: int) -> bool:
"""Fordert Abbruch eines laufenden Refreshes an."""
if self._current_task != incident_id:
return False
self._cancel_requested.add(incident_id)
logger.info(f"Cancel angefordert fuer Lage {incident_id}")
if self._ws_manager:
try:
vis, cb, tid = await self._get_incident_visibility(incident_id)
except Exception:
vis, cb, tid = "public", None, None
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {"status": "cancelling", "detail": "Wird abgebrochen..."},
}, vis, cb, tid)
return True
def _check_cancelled(self, incident_id: int):
"""Prüft ob Abbruch angefordert wurde und wirft CancelledError."""
if incident_id in self._cancel_requested:
self._cancel_requested.discard(incident_id)
raise asyncio.CancelledError("Vom Nutzer abgebrochen")
async def _worker(self):
"""Verarbeitet Refresh-Aufträge sequentiell."""
while self._running:
try:
item = await asyncio.wait_for(self._queue.get(), timeout=5.0)
except asyncio.TimeoutError:
continue
incident_id, trigger_type = item
self._queued_ids.discard(incident_id)
self._current_task = incident_id
logger.info(f"Starte Refresh für Lage {incident_id} (Trigger: {trigger_type})")
RETRY_DELAYS = [0, 120, 300] # Sekunden: sofort, 2min, 5min
TRANSIENT_ERRORS = (asyncio.TimeoutError, ConnectionError, OSError)
last_error = None
try:
for attempt in range(3):
try:
await self._run_refresh(incident_id, trigger_type=trigger_type, retry_count=attempt)
last_error = None
break # Erfolg
except asyncio.CancelledError:
logger.info(f"Refresh fuer Lage {incident_id} abgebrochen")
await self._mark_refresh_cancelled(incident_id)
try:
_vis, _cb, _tid = await self._get_incident_visibility(incident_id)
except Exception:
_vis, _cb, _tid = "public", None, None
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "refresh_cancelled",
"incident_id": incident_id,
"data": {"status": "cancelled"},
}, _vis, _cb, _tid)
last_error = None
break
except TRANSIENT_ERRORS as e:
last_error = e
logger.warning(f"Transienter Fehler bei Lage {incident_id} (Versuch {attempt + 1}/3): {e}")
if attempt < 2:
await self._mark_refresh_failed(incident_id, str(e))
delay = RETRY_DELAYS[attempt + 1]
logger.info(f"Retry in {delay}s für Lage {incident_id}")
# Retry-Status per WebSocket senden
if self._ws_manager:
try:
_vis, _cb, _tid = await self._get_incident_visibility(incident_id)
except Exception:
_vis, _cb, _tid = "public", None, None
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {"status": "retrying", "attempt": attempt + 1, "delay": delay},
}, _vis, _cb, _tid)
await asyncio.sleep(delay)
else:
await self._mark_refresh_failed(incident_id, f"Endgültig fehlgeschlagen nach 3 Versuchen: {e}")
except Exception as e:
last_error = e
logger.error(f"Permanenter Fehler bei Refresh für Lage {incident_id}: {e}")
await self._mark_refresh_failed(incident_id, str(e))
break # Permanenter Fehler, kein Retry
if last_error and self._ws_manager:
try:
_vis, _cb, _tid = await self._get_incident_visibility(incident_id)
except Exception:
_vis, _cb, _tid = "public", None, None
await self._ws_manager.broadcast_for_incident({
"type": "refresh_error",
"incident_id": incident_id,
"data": {"error": str(last_error)},
}, _vis, _cb, _tid)
finally:
self._current_task = None
self._queue.task_done()
async def _mark_refresh_cancelled(self, incident_id: int):
"""Markiert den laufenden Refresh-Log-Eintrag als cancelled."""
from database import get_db
db = await get_db()
try:
await db.execute(
"""UPDATE refresh_log SET status = 'cancelled', error_message = 'Vom Nutzer abgebrochen',
completed_at = ? WHERE incident_id = ? AND status = 'running'""",
(datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), incident_id),
)
await db.commit()
except Exception as e:
logger.warning(f"Konnte Refresh-Log nicht als abgebrochen markieren: {e}")
finally:
await db.close()
async def _mark_refresh_failed(self, incident_id: int, error: str):
"""Markiert den laufenden Refresh-Log-Eintrag als error."""
from database import get_db
db = await get_db()
try:
await db.execute(
"""UPDATE refresh_log SET status = 'error', error_message = ?,
completed_at = ? WHERE incident_id = ? AND status = 'running'""",
(error[:500], datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), incident_id),
)
await db.commit()
except Exception as e:
logger.warning(f"Konnte Refresh-Log nicht als fehlgeschlagen markieren: {e}")
finally:
await db.close()
async def _get_incident_visibility(self, incident_id: int) -> tuple[str, Optional[int], Optional[int]]:
"""Incident-Visibility, created_by und tenant_id laden."""
from database import get_db
visibility = "public"
created_by = None
tenant_id = None
db = await get_db()
try:
cursor = await db.execute(
"SELECT visibility, created_by, tenant_id FROM incidents WHERE id = ?", (incident_id,)
)
row = await cursor.fetchone()
if row:
visibility = row["visibility"] or "public"
created_by = row["created_by"]
tenant_id = row["tenant_id"]
finally:
await db.close()
return visibility, created_by, tenant_id
async def _run_refresh(self, incident_id: int, trigger_type: str = "manual", retry_count: int = 0):
"""Führt einen kompletten Refresh-Zyklus durch."""
import aiosqlite
from database import get_db
from agents.researcher import ResearcherAgent
from agents.analyzer import AnalyzerAgent
from agents.factchecker import FactCheckerAgent
from feeds.rss_parser import RSSParser
db = await get_db()
try:
# Lage laden
cursor = await db.execute("SELECT * FROM incidents WHERE id = ?", (incident_id,))
incident = await cursor.fetchone()
if not incident:
logger.warning(f"Lage {incident_id} nicht gefunden")
return
title = incident["title"]
description = incident["description"] or ""
incident_type = incident["type"] or "adhoc"
international = bool(incident["international_sources"]) if "international_sources" in incident.keys() else True
visibility = incident["visibility"] if "visibility" in incident.keys() else "public"
created_by = incident["created_by"] if "created_by" in incident.keys() else None
tenant_id = incident["tenant_id"] if "tenant_id" in incident.keys() else None
# Bei Retry: vorherigen running-Eintrag als error markieren
if retry_count > 0:
await db.execute(
"""UPDATE refresh_log SET status = 'error', error_message = 'Retry gestartet',
completed_at = ? WHERE incident_id = ? AND status = 'running'""",
(datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), incident_id),
)
await db.commit()
# Refresh-Log starten
now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
cursor = await db.execute(
"INSERT INTO refresh_log (incident_id, started_at, status, trigger_type, retry_count, tenant_id) VALUES (?, ?, 'running', ?, ?, ?)",
(incident_id, now, trigger_type, retry_count, tenant_id),
)
await db.commit()
log_id = cursor.lastrowid
usage_acc = UsageAccumulator()
research_status = "deep_researching" if incident_type == "research" else "researching"
research_detail = "Hintergrundrecherche im Web läuft..." if incident_type == "research" else "RSS-Feeds und Web werden durchsucht..."
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {"status": research_status, "detail": research_detail, "started_at": now},
}, visibility, created_by, tenant_id)
# Schritt 1+2: RSS-Feeds und Claude-Recherche parallel ausführen
async def _rss_pipeline():
"""RSS-Feed-Suche (Feed-Selektion + Parsing)."""
if incident_type != "adhoc":
logger.info("Recherche-Modus: RSS-Feeds übersprungen")
return [], None
rss_researcher = ResearcherAgent()
rss_parser = RSSParser()
from source_rules import get_feeds_with_metadata
all_feeds = await get_feeds_with_metadata(tenant_id=tenant_id)
feed_usage = None
if len(all_feeds) > 20:
selected_feeds, feed_usage = await rss_researcher.select_relevant_feeds(
title, description, international, all_feeds
)
logger.info(f"Feed-Selektion: {len(selected_feeds)} von {len(all_feeds)} Feeds ausgewählt")
articles = await rss_parser.search_feeds_selective(title, selected_feeds)
else:
articles = await rss_parser.search_feeds(title, international=international, tenant_id=tenant_id)
logger.info(f"RSS: {len(articles)} relevante Artikel gefunden (international={international})")
return articles, feed_usage
async def _web_search_pipeline():
"""Claude WebSearch-Recherche."""
researcher = ResearcherAgent()
results, usage = await researcher.search(title, description, incident_type, international=international)
logger.info(f"Claude-Recherche: {len(results)} Ergebnisse")
return results, usage
# Beide Pipelines parallel starten
(rss_articles, rss_feed_usage), (search_results, search_usage) = await asyncio.gather(
_rss_pipeline(),
_web_search_pipeline(),
)
if rss_feed_usage:
usage_acc.add(rss_feed_usage)
if search_usage:
usage_acc.add(search_usage)
# Checkpoint 1: Cancel prüfen nach RSS/WebSearch
self._check_cancelled(incident_id)
# Alle Ergebnisse zusammenführen
all_results = rss_articles + search_results
# Duplikate entfernen (normalisierte URL + Headline-Ähnlichkeit)
seen_urls = set()
seen_headlines = set()
unique_results = []
for article in all_results:
if not _is_duplicate(article, seen_urls, seen_headlines):
unique_results.append(article)
dupes_removed = len(all_results) - len(unique_results)
if dupes_removed > 0:
logger.info(f"Deduplizierung: {dupes_removed} Duplikate entfernt, {len(unique_results)} verbleibend")
source_count = len(set(a.get("source", "") for a in unique_results))
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {
"status": "analyzing",
"detail": f"Analysiert {len(unique_results)} Meldungen aus {source_count} Quellen...",
"started_at": now,
},
}, visibility, created_by, tenant_id)
# In DB speichern (neue Artikel) — auch gegen bestehende DB-Einträge prüfen
new_count = 0
for article in unique_results:
# Prüfen ob URL (normalisiert) schon existiert
if article.get("source_url"):
norm_url = _normalize_url(article["source_url"])
cursor = await db.execute(
"SELECT id, source_url FROM articles WHERE incident_id = ?",
(incident_id,),
)
existing_articles = await cursor.fetchall()
already_exists = False
for existing in existing_articles:
if existing["source_url"] and _normalize_url(existing["source_url"]) == norm_url:
already_exists = True
break
if already_exists:
continue
# Headline-Duplikat gegen DB prüfen
headline = article.get("headline", "")
if headline and len(headline) > 20:
norm_h = _normalize_headline(headline)
cursor = await db.execute(
"SELECT id, headline FROM articles WHERE incident_id = ?",
(incident_id,),
)
existing_articles = await cursor.fetchall()
headline_exists = False
for existing in existing_articles:
if _normalize_headline(existing["headline"]) == norm_h:
headline_exists = True
break
if headline_exists:
continue
await db.execute(
"""INSERT INTO articles (incident_id, headline, headline_de, source,
source_url, content_original, content_de, language, published_at, tenant_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
incident_id,
article.get("headline", ""),
article.get("headline_de"),
article.get("source", "Unbekannt"),
article.get("source_url"),
article.get("content_original"),
article.get("content_de"),
article.get("language", "de"),
article.get("published_at"),
tenant_id,
),
)
new_count += 1
await db.commit()
# Quellen-Statistiken aktualisieren
if new_count > 0:
try:
from database import refresh_source_counts
await refresh_source_counts(db)
except Exception as e:
logger.warning(f"Quellen-Statistiken konnten nicht aktualisiert werden: {e}")
# Schritt 3: Analyse und Zusammenfassung
if new_count > 0 or not incident["summary"]:
cursor = await db.execute(
"SELECT * FROM articles WHERE incident_id = ? ORDER BY collected_at DESC",
(incident_id,),
)
all_articles = [dict(row) for row in await cursor.fetchall()]
analyzer = AnalyzerAgent()
analysis, analysis_usage = await analyzer.analyze(title, description, all_articles, incident_type)
if analysis_usage:
usage_acc.add(analysis_usage)
if analysis:
is_first_summary = not incident["summary"]
# Snapshot des alten Lagebilds sichern (nur wenn schon eins existiert)
if incident["summary"]:
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM articles WHERE incident_id = ?",
(incident_id,),
)
snap_articles = (await cursor.fetchone())["cnt"]
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM fact_checks WHERE incident_id = ?",
(incident_id,),
)
snap_fcs = (await cursor.fetchone())["cnt"]
await db.execute(
"""INSERT INTO incident_snapshots
(incident_id, summary, sources_json,
article_count, fact_check_count, refresh_log_id, created_at, tenant_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(incident_id, incident["summary"], incident["sources_json"],
snap_articles, snap_fcs, log_id, now, tenant_id),
)
# sources_json aus der Analyse extrahieren und speichern
sources = analysis.get("sources", [])
sources_json = json.dumps(sources, ensure_ascii=False) if sources else None
new_summary = analysis.get("summary", "")
await db.execute(
"UPDATE incidents SET summary = ?, sources_json = ?, updated_at = ? WHERE id = ?",
(new_summary, sources_json, now, incident_id),
)
# Beim ersten Refresh: Snapshot des neuen Lagebilds erstellen
if is_first_summary and new_summary:
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM articles WHERE incident_id = ?",
(incident_id,),
)
snap_articles = (await cursor.fetchone())["cnt"]
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM fact_checks WHERE incident_id = ?",
(incident_id,),
)
snap_fcs = (await cursor.fetchone())["cnt"]
await db.execute(
"""INSERT INTO incident_snapshots
(incident_id, summary, sources_json,
article_count, fact_check_count, refresh_log_id, created_at, tenant_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
(incident_id, new_summary, sources_json,
snap_articles, snap_fcs, log_id, now, tenant_id),
)
# Übersetzungen aktualisieren
for translation in analysis.get("translations", []):
article_id = translation.get("article_id")
if article_id:
await db.execute(
"UPDATE articles SET headline_de = ?, content_de = ? WHERE id = ?",
(translation.get("headline_de"), translation.get("content_de"), article_id),
)
await db.commit()
# Checkpoint 2: Cancel prüfen nach Analyse
self._check_cancelled(incident_id)
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "status_update",
"incident_id": incident_id,
"data": {"status": "factchecking", "detail": "Prüft Fakten gegen unabhängige Quellen...", "started_at": now},
}, visibility, created_by, tenant_id)
# Schritt 4: Faktencheck
factchecker = FactCheckerAgent()
fact_checks, fc_usage = await factchecker.check(title, all_articles, incident_type)
if fc_usage:
usage_acc.add(fc_usage)
# Checkpoint 3: Cancel prüfen nach Faktencheck
self._check_cancelled(incident_id)
# Prüfen ob dies der erste Refresh ist (keine vorherigen Faktenchecks)
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM fact_checks WHERE incident_id = ?",
(incident_id,),
)
row = await cursor.fetchone()
is_first_refresh = row["cnt"] == 0
# Notification-Summary sammeln
confirmed_count = 0
contradicted_count = 0
status_changes = []
for fc in fact_checks:
# Prüfen ob Claim schon existiert (mit altem Status)
cursor = await db.execute(
"SELECT id, status FROM fact_checks WHERE incident_id = ? AND claim = ?",
(incident_id, fc.get("claim", "")),
)
existing = await cursor.fetchone()
old_status = existing["status"] if existing else None
new_status = fc.get("status", "developing")
if existing:
await db.execute(
"UPDATE fact_checks SET status = ?, sources_count = ?, evidence = ?, is_notification = ?, checked_at = ? WHERE id = ?",
(new_status, fc.get("sources_count", 0), fc.get("evidence"), fc.get("is_notification", 0), now, existing["id"]),
)
else:
await db.execute(
"""INSERT INTO fact_checks (incident_id, claim, status, sources_count, evidence, is_notification, tenant_id)
VALUES (?, ?, ?, ?, ?, ?, ?)""",
(incident_id, fc.get("claim", ""), new_status, fc.get("sources_count", 0), fc.get("evidence"), fc.get("is_notification", 0), tenant_id),
)
# Status-Statistik sammeln
if new_status == "confirmed" or new_status == "established":
confirmed_count += 1
elif new_status == "contradicted" or new_status == "disputed":
contradicted_count += 1
# Echte Status-Änderungen tracken (nicht beim ersten Refresh)
if not is_first_refresh and old_status and old_status != new_status:
status_changes.append({
"claim": fc.get("claim", ""),
"old_status": old_status,
"new_status": new_status,
})
await db.commit()
# Gebündelte Notification senden (nicht beim ersten Refresh)
if not is_first_refresh:
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "refresh_summary",
"incident_id": incident_id,
"data": {
"new_articles": new_count,
"confirmed_count": confirmed_count,
"contradicted_count": contradicted_count,
"status_changes": status_changes,
"is_first_refresh": False,
"incident_title": title,
},
}, visibility, created_by, tenant_id)
# DB-Notifications erzeugen
parts = []
if new_count > 0:
parts.append(f"{new_count} neue Meldung{'en' if new_count != 1 else ''}")
if confirmed_count > 0:
parts.append(f"{confirmed_count} bestätigt")
if contradicted_count > 0:
parts.append(f"{contradicted_count} widersprochen")
summary_text = ", ".join(parts) if parts else "Keine neuen Entwicklungen"
db_notifications = [{
"type": "refresh_summary",
"title": title,
"text": f"Recherche: {summary_text}",
"icon": "warning" if contradicted_count > 0 else "success",
}]
if new_count > 0:
db_notifications.append({
"type": "new_articles",
"title": title,
"text": f"{new_count} neue Meldung{'en' if new_count != 1 else ''} gefunden",
"icon": "info",
})
for sc in status_changes:
db_notifications.append({
"type": "status_change",
"title": title,
"text": f"{sc['claim']}: {sc['old_status']} \u2192 {sc['new_status']}",
"icon": "error" if sc["new_status"] in ("contradicted", "disputed") else "success",
})
if created_by:
await _create_notifications_for_incident(
db, incident_id, visibility, created_by, tenant_id, db_notifications
)
# E-Mail-Benachrichtigungen versenden
await _send_email_notifications_for_incident(
db, incident_id, title, visibility, created_by, tenant_id, db_notifications
)
# Refresh-Log abschließen (mit Token-Statistiken)
await db.execute(
"""UPDATE refresh_log SET
completed_at = ?, articles_found = ?, status = 'completed',
input_tokens = ?, output_tokens = ?,
cache_creation_tokens = ?, cache_read_tokens = ?,
total_cost_usd = ?, api_calls = ?
WHERE id = ?""",
(datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), new_count,
usage_acc.input_tokens, usage_acc.output_tokens,
usage_acc.cache_creation_tokens, usage_acc.cache_read_tokens,
round(usage_acc.total_cost_usd, 7), usage_acc.call_count, log_id),
)
await db.commit()
logger.info(
f"Token: {usage_acc.input_tokens} in / {usage_acc.output_tokens} out / "
f"${usage_acc.total_cost_usd:.4f} ({usage_acc.call_count} Calls)"
)
# Quellen-Discovery im Background starten
if unique_results:
asyncio.create_task(_background_discover_sources(unique_results))
if self._ws_manager:
await self._ws_manager.broadcast_for_incident({
"type": "refresh_complete",
"incident_id": incident_id,
"data": {"new_articles": new_count, "status": "idle"},
}, visibility, created_by, tenant_id)
logger.info(f"Refresh für Lage {incident_id} abgeschlossen: {new_count} neue Artikel")
finally:
await db.close()
# Singleton-Instanz
orchestrator = AgentOrchestrator()

236
src/agents/researcher.py Normale Datei
Datei anzeigen

@@ -0,0 +1,236 @@
"""Researcher-Agent: Sucht nach Informationen via Claude WebSearch."""
import asyncio
import json
import logging
import re
from agents.claude_client import call_claude, ClaudeUsage
logger = logging.getLogger("osint.researcher")
RESEARCH_PROMPT_TEMPLATE = """Du bist ein OSINT-Recherche-Agent für ein Lagemonitoring-System.
AUSGABESPRACHE: {output_language}
AUFTRAG: Suche nach aktuellen Informationen zu folgendem Vorfall:
Titel: {title}
Kontext: {description}
REGELN:
- Suche nur bei seriösen Nachrichtenquellen (Nachrichtenagenturen, Qualitätszeitungen, öffentlich-rechtliche Medien, Behörden)
- KEIN Social Media (Twitter/X, Facebook, Instagram, TikTok, Reddit)
- KEINE Boulevardmedien (Bild, Sun, Daily Mail etc.)
{language_instruction}
- Faktenbasiert und neutral - keine Spekulationen
- Nutze removepaywalls.com für Paywall-geschützte Artikel (z.B. Spiegel+, Zeit+, SZ+): https://www.removepaywalls.com/search?url=ARTIKEL_URL
Gib die Ergebnisse AUSSCHLIESSLICH als JSON-Array zurück, ohne Erklärungen davor oder danach.
Jedes Element hat diese Felder:
- "headline": Originale Überschrift
- "headline_de": Übersetzung in Ausgabesprache (falls Originalsprache abweicht)
- "source": Name der Quelle (z.B. "Reuters", "tagesschau")
- "source_url": URL des Artikels
- "content_summary": Zusammenfassung des Inhalts (2-3 Sätze, in Ausgabesprache)
- "language": Sprache des Originals (z.B. "de", "en", "fr")
- "published_at": Veröffentlichungsdatum falls bekannt (ISO-Format)
Antworte NUR mit dem JSON-Array. Keine Einleitung, keine Erklärung."""
DEEP_RESEARCH_PROMPT_TEMPLATE = """Du bist ein OSINT-Tiefenrecherche-Agent für ein Lagemonitoring-System.
AUSGABESPRACHE: {output_language}
AUFTRAG: Führe eine umfassende Hintergrundrecherche durch zu:
Titel: {title}
Kontext: {description}
RECHERCHE-STRATEGIE:
- Breite Suche: Hintergrundberichte, Analysen, Expertenmeinungen, Think-Tank-Publikationen
- Suche nach: Akteuren, Zusammenhängen, historischem Kontext, rechtlichen Rahmenbedingungen
- Akademische und Fachquellen zusätzlich zu Nachrichtenquellen
- Nutze removepaywalls.com für Paywall-geschützte Artikel (z.B. https://www.removepaywalls.com/search?url=ARTIKEL_URL)
{language_instruction}
- Ziel: 8-15 hochwertige Quellen
QUELLENTYPEN (priorisiert):
1. Fachzeitschriften und Branchenmedien
2. Qualitätszeitungen (Hintergrundberichte, Dossiers)
3. Think Tanks und Forschungsinstitute
4. Offizielle Dokumente und Pressemitteilungen
5. Nachrichtenagenturen (für Faktengrundlage)
AUSSCHLUSS:
- KEIN Social Media (Twitter/X, Facebook, Instagram, TikTok, Reddit)
- KEINE Boulevardmedien
- KEINE Meinungsblogs ohne Quellenbelege
Gib die Ergebnisse AUSSCHLIESSLICH als JSON-Array zurück, ohne Erklärungen davor oder danach.
Jedes Element hat diese Felder:
- "headline": Originale Überschrift
- "headline_de": Übersetzung in Ausgabesprache (falls Originalsprache abweicht)
- "source": Name der Quelle (z.B. "netzpolitik.org", "Handelsblatt")
- "source_url": URL des Artikels
- "content_summary": Ausführliche Zusammenfassung des Inhalts (3-5 Sätze, in Ausgabesprache)
- "language": Sprache des Originals (z.B. "de", "en", "fr")
- "published_at": Veröffentlichungsdatum falls bekannt (ISO-Format)
Antworte NUR mit dem JSON-Array. Keine Einleitung, keine Erklärung."""
# Sprach-Anweisungen
LANG_INTERNATIONAL = "- Suche in Deutsch UND Englisch für internationale Abdeckung"
LANG_GERMAN_ONLY = "- Suche NUR auf Deutsch bei deutschsprachigen Quellen (Deutschland, Österreich, Schweiz)\n- KEINE englischsprachigen oder anderssprachigen Quellen"
LANG_DEEP_INTERNATIONAL = "- Suche in Deutsch, Englisch und weiteren relevanten Sprachen"
LANG_DEEP_GERMAN_ONLY = "- Suche NUR auf Deutsch bei deutschsprachigen Quellen (Deutschland, Österreich, Schweiz)\n- KEINE englischsprachigen oder anderssprachigen Quellen"
FEED_SELECTION_PROMPT_TEMPLATE = """Du bist ein OSINT-Analyst. Wähle aus dieser Feed-Liste die Feeds aus, die für die Lage relevant sein könnten.
LAGE: {title}
KONTEXT: {description}
INTERNATIONALE QUELLEN: {international}
FEEDS:
{feed_list}
REGELN:
- Wähle alle Feeds die thematisch oder regional relevant sein könnten
- Lieber einen Feed zu viel als zu wenig auswählen
- Bei "Internationale Quellen: Nein": Keine internationalen Feeds auswählen
- Allgemeine Nachrichtenfeeds (tagesschau, Spiegel etc.) sind fast immer relevant
- Antworte NUR mit einem JSON-Array der Nummern, z.B. [1, 2, 5, 12]"""
class ResearcherAgent:
"""Führt OSINT-Recherchen über Claude CLI WebSearch durch."""
async def select_relevant_feeds(
self,
title: str,
description: str,
international: bool,
feeds_metadata: list[dict],
) -> tuple[list[dict], ClaudeUsage | None]:
"""Lässt Claude die relevanten Feeds für eine Lage vorauswählen.
Returns:
(ausgewählte Feeds, usage) — Bei Fehler: (alle Feeds, None)
"""
# Feed-Liste als nummerierte Übersicht formatieren
feed_lines = []
for i, feed in enumerate(feeds_metadata, 1):
feed_lines.append(
f"{i}. {feed['name']} ({feed['domain']}) [{feed['category']}]"
)
prompt = FEED_SELECTION_PROMPT_TEMPLATE.format(
title=title,
description=description or "Keine weitere Beschreibung",
international="Ja" if international else "Nein",
feed_list="\n".join(feed_lines),
)
try:
result, usage = await call_claude(prompt, tools=None)
# JSON-Array aus Antwort extrahieren
match = re.search(r'\[[\d\s,]+\]', result)
if not match:
logger.warning("Feed-Selektion: Kein JSON-Array in Antwort, nutze alle Feeds")
return feeds_metadata, usage
indices = json.loads(match.group())
selected = []
for idx in indices:
if isinstance(idx, int) and 1 <= idx <= len(feeds_metadata):
selected.append(feeds_metadata[idx - 1])
if not selected:
logger.warning("Feed-Selektion: Keine gültigen Indizes, nutze alle Feeds")
return feeds_metadata, usage
logger.info(
f"Feed-Selektion: {len(selected)} von {len(feeds_metadata)} Feeds ausgewählt"
)
return selected, usage
except Exception as e:
logger.warning(f"Feed-Selektion fehlgeschlagen ({e}), nutze alle Feeds")
return feeds_metadata, None
async def search(self, title: str, description: str = "", incident_type: str = "adhoc", international: bool = True) -> tuple[list[dict], ClaudeUsage | None]:
"""Sucht nach Informationen zu einem Vorfall."""
from config import OUTPUT_LANGUAGE
if incident_type == "research":
lang_instruction = LANG_DEEP_INTERNATIONAL if international else LANG_DEEP_GERMAN_ONLY
prompt = DEEP_RESEARCH_PROMPT_TEMPLATE.format(
title=title, description=description, language_instruction=lang_instruction,
output_language=OUTPUT_LANGUAGE,
)
else:
lang_instruction = LANG_INTERNATIONAL if international else LANG_GERMAN_ONLY
prompt = RESEARCH_PROMPT_TEMPLATE.format(
title=title, description=description, language_instruction=lang_instruction,
output_language=OUTPUT_LANGUAGE,
)
try:
result, usage = await call_claude(prompt)
articles = self._parse_response(result)
# Ausgeschlossene Quellen dynamisch aus DB laden
excluded_sources = await self._get_excluded_sources()
# Ausgeschlossene Quellen filtern
filtered = []
for article in articles:
source = article.get("source", "").lower()
source_url = article.get("source_url", "").lower()
excluded = False
for excl in excluded_sources:
if excl in source or excl in source_url:
excluded = True
break
if not excluded:
# Bei nur-deutsch: nicht-deutsche Ergebnisse nachfiltern
if not international and article.get("language", "de") != "de":
continue
filtered.append(article)
logger.info(f"Recherche ergab {len(filtered)} Artikel (von {len(articles)} gefundenen, international={international})")
return filtered, usage
except Exception as e:
logger.error(f"Recherche-Fehler: {e}")
return [], None
async def _get_excluded_sources(self) -> list[str]:
"""Lädt ausgeschlossene Quellen aus der Datenbank."""
try:
from source_rules import get_source_rules
rules = await get_source_rules()
return rules.get("excluded_domains", [])
except Exception as e:
logger.warning(f"Fallback auf config.py für Excluded Sources: {e}")
from config import EXCLUDED_SOURCES
return list(EXCLUDED_SOURCES)
def _parse_response(self, response: str) -> list[dict]:
"""Parst die Claude-Antwort als JSON-Array."""
# Versuche JSON direkt zu parsen
try:
data = json.loads(response)
if isinstance(data, list):
return data
except json.JSONDecodeError:
pass
# Versuche JSON aus der Antwort zu extrahieren (zwischen [ und ])
match = re.search(r'\[.*\]', response, re.DOTALL)
if match:
try:
data = json.loads(match.group())
if isinstance(data, list):
return data
except json.JSONDecodeError:
pass
logger.warning("Konnte Claude-Antwort nicht als JSON parsen")
return []

106
src/auth.py Normale Datei
Datei anzeigen

@@ -0,0 +1,106 @@
"""JWT-Authentifizierung mit Magic-Link-Support und Multi-Tenancy."""
import secrets
import string
from datetime import datetime, timedelta, timezone
from jose import jwt, JWTError
import bcrypt as _bcrypt
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from config import JWT_SECRET, JWT_ALGORITHM, JWT_EXPIRE_HOURS, MAGIC_LINK_EXPIRE_MINUTES
security = HTTPBearer()
def hash_password(password: str) -> str:
"""Passwort hashen mit bcrypt."""
return _bcrypt.hashpw(password.encode("utf-8"), _bcrypt.gensalt()).decode("utf-8")
def verify_password(password: str, password_hash: str) -> bool:
"""Passwort gegen Hash pruefen."""
return _bcrypt.checkpw(password.encode("utf-8"), password_hash.encode("utf-8"))
JWT_ISSUER = "intelsight-osint"
JWT_AUDIENCE = "intelsight-osint"
def create_token(
user_id: int,
username: str,
email: str,
role: str = "member",
tenant_id: int = None,
org_slug: str = None,
) -> str:
"""JWT-Token erstellen mit Tenant-Kontext."""
now = datetime.now(timezone.utc)
expire = now + timedelta(hours=JWT_EXPIRE_HOURS)
payload = {
"sub": str(user_id),
"username": username,
"email": email,
"role": role,
"tenant_id": tenant_id,
"org_slug": org_slug,
"iss": JWT_ISSUER,
"aud": JWT_AUDIENCE,
"iat": now,
"exp": expire,
}
return jwt.encode(payload, JWT_SECRET, algorithm=JWT_ALGORITHM)
def decode_token(token: str) -> dict:
"""JWT-Token dekodieren und validieren."""
try:
payload = jwt.decode(
token,
JWT_SECRET,
algorithms=[JWT_ALGORITHM],
issuer=JWT_ISSUER,
audience=JWT_AUDIENCE,
)
return payload
except JWTError:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token ungueltig oder abgelaufen",
)
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(security),
) -> dict:
"""FastAPI Dependency: Aktuellen Nutzer aus Token extrahieren."""
payload = decode_token(credentials.credentials)
return {
"id": int(payload["sub"]),
"username": payload["username"],
"email": payload.get("email", ""),
"role": payload.get("role", "member"),
"tenant_id": payload.get("tenant_id"),
"org_slug": payload.get("org_slug"),
}
async def require_org_member(
current_user: dict = Depends(get_current_user),
) -> dict:
"""FastAPI Dependency: Erfordert Org-Mitgliedschaft."""
if not current_user.get("tenant_id"):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Keine Organisation zugeordnet",
)
return current_user
def generate_magic_token() -> str:
"""Generiert einen 64-Zeichen URL-safe Token."""
return secrets.token_urlsafe(48)
def generate_magic_code() -> str:
"""Generiert einen 6-stelligen numerischen Code."""
return ''.join(secrets.choice(string.digits) for _ in range(6))

74
src/config.py Normale Datei
Datei anzeigen

@@ -0,0 +1,74 @@
"""Konfiguration für den OSINT Lagemonitor."""
import os
from zoneinfo import ZoneInfo
# Zeitzone für alle Anwendungs-Timestamps (DB, Logs, UI)
TIMEZONE = ZoneInfo("Europe/Berlin")
# Pfade
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DATA_DIR = os.path.join(BASE_DIR, "data")
LOG_DIR = os.path.join(BASE_DIR, "logs")
STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "static")
DB_PATH = os.path.join(DATA_DIR, "osint.db")
# JWT
JWT_SECRET = os.environ.get("JWT_SECRET")
if not JWT_SECRET:
raise RuntimeError("JWT_SECRET Umgebungsvariable muss gesetzt sein")
JWT_ALGORITHM = "HS256"
JWT_EXPIRE_HOURS = 24
# Claude CLI
CLAUDE_PATH = os.environ.get("CLAUDE_PATH", "/home/claude-dev/.claude/local/claude")
CLAUDE_MAX_CONCURRENT = 1
CLAUDE_TIMEOUT = 300 # Sekunden (Claude mit WebSearch braucht oft 2-3 Min)
# Ausgabesprache (Lagebilder, Faktenchecks, Zusammenfassungen)
OUTPUT_LANGUAGE = "Deutsch"
# Auto-Refresh
REFRESH_MIN_INTERVAL = 10 # Minuten
REFRESH_MAX_INTERVAL = 10080 # 1 Woche
REFRESH_DEFAULT_INTERVAL = 15
# RSS-Feeds (Fallback, primär aus DB geladen)
RSS_FEEDS = {
"deutsch": [
{"name": "tagesschau", "url": "https://www.tagesschau.de/index~rss2.xml"},
{"name": "ZDF heute", "url": "https://www.zdf.de/rss/zdf/nachrichten"},
{"name": "Spiegel", "url": "https://www.spiegel.de/schlagzeilen/index.rss"},
{"name": "Zeit", "url": "https://newsfeed.zeit.de/index"},
{"name": "FAZ", "url": "https://www.faz.net/rss/aktuell/"},
{"name": "Süddeutsche", "url": "https://rss.sueddeutsche.de/rss/Topthemen"},
{"name": "NZZ", "url": "https://www.nzz.ch/recent.rss"},
{"name": "Deutsche Welle", "url": "https://rss.dw.com/rdf/rss-de-all"},
],
"international": [
{"name": "Reuters", "url": "https://www.reutersagency.com/feed/"},
{"name": "AP News", "url": "https://rsshub.app/apnews/topics/apf-topnews"},
{"name": "BBC World", "url": "https://feeds.bbci.co.uk/news/world/rss.xml"},
{"name": "Al Jazeera", "url": "https://www.aljazeera.com/xml/rss/all.xml"},
{"name": "France24", "url": "https://www.france24.com/en/rss"},
],
"behoerden": [
{"name": "BMI", "url": "https://www.bmi.bund.de/SiteGlobals/Functions/RSSFeed/BMI/RSSNewsfeed/RSSNewsfeed_Nachrichten.xml"},
{"name": "Europol", "url": "https://www.europol.europa.eu/rss.xml"},
],
}
# Ausgeschlossene Quellen (Fallback, primär aus DB geladen)
EXCLUDED_SOURCES = ["bild.de", "bild", "twitter", "x.com", "facebook", "instagram", "tiktok", "reddit"]
# SMTP (E-Mail-Versand für Magic Links und Einladungen)
SMTP_HOST = os.environ.get("SMTP_HOST", "")
SMTP_PORT = int(os.environ.get("SMTP_PORT", "587"))
SMTP_USER = os.environ.get("SMTP_USER", "")
SMTP_PASSWORD = os.environ.get("SMTP_PASSWORD", "")
SMTP_FROM_EMAIL = os.environ.get("SMTP_FROM_EMAIL", "noreply@intelsight.de")
SMTP_FROM_NAME = os.environ.get("SMTP_FROM_NAME", "AegisSight Monitor")
SMTP_USE_TLS = os.environ.get("SMTP_USE_TLS", "true").lower() == "true"
# Magic Link
MAGIC_LINK_EXPIRE_MINUTES = 10
MAGIC_LINK_BASE_URL = os.environ.get("MAGIC_LINK_BASE_URL", "https://osint.intelsight.de")

504
src/database.py Normale Datei
Datei anzeigen

@@ -0,0 +1,504 @@
"""SQLite Datenbank-Setup und Zugriff."""
import aiosqlite
import logging
import os
from config import DB_PATH, DATA_DIR
logger = logging.getLogger("osint.database")
SCHEMA = """
CREATE TABLE IF NOT EXISTS organizations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
slug TEXT UNIQUE NOT NULL,
is_active INTEGER DEFAULT 1,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS licenses (
id INTEGER PRIMARY KEY AUTOINCREMENT,
organization_id INTEGER NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
license_type TEXT NOT NULL DEFAULT 'trial',
max_users INTEGER NOT NULL DEFAULT 5,
valid_from TIMESTAMP NOT NULL,
valid_until TIMESTAMP,
status TEXT NOT NULL DEFAULT 'active',
notes TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
email TEXT UNIQUE NOT NULL,
username TEXT NOT NULL,
password_hash TEXT,
organization_id INTEGER NOT NULL REFERENCES organizations(id),
role TEXT NOT NULL DEFAULT 'member',
is_active INTEGER DEFAULT 1,
last_login_at TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS magic_links (
id INTEGER PRIMARY KEY AUTOINCREMENT,
email TEXT NOT NULL,
token TEXT UNIQUE NOT NULL,
code TEXT NOT NULL,
purpose TEXT NOT NULL DEFAULT 'login',
user_id INTEGER REFERENCES users(id),
is_used INTEGER DEFAULT 0,
expires_at TIMESTAMP NOT NULL,
ip_address TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS portal_admins (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT UNIQUE NOT NULL,
password_hash TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS incidents (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
description TEXT,
status TEXT DEFAULT 'active',
type TEXT DEFAULT 'adhoc',
refresh_mode TEXT DEFAULT 'manual',
refresh_interval INTEGER DEFAULT 15,
retention_days INTEGER DEFAULT 0,
visibility TEXT DEFAULT 'public',
summary TEXT,
sources_json TEXT,
international_sources INTEGER DEFAULT 1,
tenant_id INTEGER REFERENCES organizations(id),
created_by INTEGER REFERENCES users(id),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS articles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
headline TEXT NOT NULL,
headline_de TEXT,
source TEXT NOT NULL,
source_url TEXT,
content_original TEXT,
content_de TEXT,
language TEXT DEFAULT 'de',
published_at TIMESTAMP,
collected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
verification_status TEXT DEFAULT 'unverified',
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE TABLE IF NOT EXISTS fact_checks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
claim TEXT NOT NULL,
status TEXT DEFAULT 'developing',
sources_count INTEGER DEFAULT 0,
evidence TEXT,
is_notification INTEGER DEFAULT 0,
checked_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE TABLE IF NOT EXISTS refresh_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
completed_at TIMESTAMP,
articles_found INTEGER DEFAULT 0,
status TEXT DEFAULT 'running',
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE TABLE IF NOT EXISTS incident_snapshots (
id INTEGER PRIMARY KEY AUTOINCREMENT,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
summary TEXT,
sources_json TEXT,
article_count INTEGER DEFAULT 0,
fact_check_count INTEGER DEFAULT 0,
refresh_log_id INTEGER REFERENCES refresh_log(id),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE TABLE IF NOT EXISTS sources (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
url TEXT,
domain TEXT,
source_type TEXT NOT NULL DEFAULT 'rss_feed',
category TEXT NOT NULL DEFAULT 'sonstige',
status TEXT NOT NULL DEFAULT 'active',
notes TEXT,
added_by TEXT,
article_count INTEGER DEFAULT 0,
last_seen_at TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE TABLE IF NOT EXISTS notifications (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
type TEXT NOT NULL DEFAULT 'refresh_summary',
title TEXT NOT NULL,
text TEXT NOT NULL,
icon TEXT DEFAULT 'info',
is_read INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE INDEX IF NOT EXISTS idx_notifications_user_read ON notifications(user_id, is_read);
CREATE TABLE IF NOT EXISTS incident_subscriptions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
incident_id INTEGER NOT NULL REFERENCES incidents(id) ON DELETE CASCADE,
notify_email_summary INTEGER DEFAULT 0,
notify_email_new_articles INTEGER DEFAULT 0,
notify_email_status_change INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, incident_id)
);
"""
async def get_db() -> aiosqlite.Connection:
"""Erstellt eine neue Datenbankverbindung."""
os.makedirs(DATA_DIR, exist_ok=True)
db = await aiosqlite.connect(DB_PATH)
db.row_factory = aiosqlite.Row
await db.execute("PRAGMA journal_mode=WAL")
await db.execute("PRAGMA foreign_keys=ON")
return db
async def init_db():
"""Initialisiert die Datenbank mit dem Schema."""
db = await get_db()
try:
await db.executescript(SCHEMA)
await db.commit()
# --- Migrationen fuer bestehende Datenbanken ---
# Incidents-Spalten pruefen
cursor = await db.execute("PRAGMA table_info(incidents)")
columns = [row[1] for row in await cursor.fetchall()]
if "type" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN type TEXT DEFAULT 'adhoc'")
await db.commit()
if "sources_json" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN sources_json TEXT")
await db.commit()
if "international_sources" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN international_sources INTEGER DEFAULT 1")
await db.commit()
if "visibility" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN visibility TEXT DEFAULT 'public'")
await db.commit()
if "tenant_id" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
logger.info("Migration: tenant_id zu incidents hinzugefuegt")
# Migration: E-Mail-Benachrichtigungs-Praeferenzen pro Lage
if "notify_email_summary" not in columns:
await db.execute("ALTER TABLE incidents ADD COLUMN notify_email_summary INTEGER DEFAULT 0")
await db.execute("ALTER TABLE incidents ADD COLUMN notify_email_new_articles INTEGER DEFAULT 0")
await db.execute("ALTER TABLE incidents ADD COLUMN notify_email_status_change INTEGER DEFAULT 0")
await db.commit()
logger.info("Migration: E-Mail-Benachrichtigungs-Spalten zu incidents hinzugefuegt")
# Migration: Token-Spalten fuer refresh_log
cursor = await db.execute("PRAGMA table_info(refresh_log)")
rl_columns = [row[1] for row in await cursor.fetchall()]
if "input_tokens" not in rl_columns:
await db.execute("ALTER TABLE refresh_log ADD COLUMN input_tokens INTEGER DEFAULT 0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN output_tokens INTEGER DEFAULT 0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN cache_creation_tokens INTEGER DEFAULT 0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN cache_read_tokens INTEGER DEFAULT 0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN total_cost_usd REAL DEFAULT 0.0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN api_calls INTEGER DEFAULT 0")
await db.commit()
if "trigger_type" not in rl_columns:
await db.execute("ALTER TABLE refresh_log ADD COLUMN trigger_type TEXT DEFAULT 'manual'")
await db.commit()
if "retry_count" not in rl_columns:
await db.execute("ALTER TABLE refresh_log ADD COLUMN retry_count INTEGER DEFAULT 0")
await db.execute("ALTER TABLE refresh_log ADD COLUMN error_message TEXT")
await db.commit()
if "tenant_id" not in rl_columns:
await db.execute("ALTER TABLE refresh_log ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Migration: reliability_score entfernen (falls noch vorhanden)
cursor = await db.execute("PRAGMA table_info(incidents)")
inc_columns = [row[1] for row in await cursor.fetchall()]
if "reliability_score" in inc_columns:
await db.execute("ALTER TABLE incidents DROP COLUMN reliability_score")
await db.commit()
cursor = await db.execute("PRAGMA table_info(incident_snapshots)")
snap_columns = [row[1] for row in await cursor.fetchall()]
if "reliability_score" in snap_columns:
await db.execute("ALTER TABLE incident_snapshots DROP COLUMN reliability_score")
await db.commit()
# Migration: notifications-Tabelle (fuer bestehende DBs)
cursor = await db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='notifications'")
if not await cursor.fetchone():
await db.executescript("""
CREATE TABLE IF NOT EXISTS notifications (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
incident_id INTEGER REFERENCES incidents(id) ON DELETE CASCADE,
type TEXT NOT NULL DEFAULT 'refresh_summary',
title TEXT NOT NULL,
text TEXT NOT NULL,
icon TEXT DEFAULT 'info',
is_read INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tenant_id INTEGER REFERENCES organizations(id)
);
CREATE INDEX IF NOT EXISTS idx_notifications_user_read ON notifications(user_id, is_read);
""")
await db.commit()
# Migration: incident_subscriptions-Tabelle (fuer bestehende DBs)
cursor = await db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='incident_subscriptions'")
if not await cursor.fetchone():
await db.executescript("""
CREATE TABLE IF NOT EXISTS incident_subscriptions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
incident_id INTEGER NOT NULL REFERENCES incidents(id) ON DELETE CASCADE,
notify_email_summary INTEGER DEFAULT 0,
notify_email_new_articles INTEGER DEFAULT 0,
notify_email_status_change INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(user_id, incident_id)
);
""")
await db.commit()
logger.info("Migration: incident_subscriptions-Tabelle erstellt")
else:
# Migration: Spalte umbenennen contradiction -> new_articles
cursor = await db.execute("PRAGMA table_info(incident_subscriptions)")
sub_columns = [row[1] for row in await cursor.fetchall()]
if "notify_email_contradiction" in sub_columns:
await db.execute("ALTER TABLE incident_subscriptions RENAME COLUMN notify_email_contradiction TO notify_email_new_articles")
await db.commit()
logger.info("Migration: notify_email_contradiction -> notify_email_new_articles umbenannt")
# Migration: role-Spalte fuer users
cursor = await db.execute("PRAGMA table_info(users)")
user_columns = [row[1] for row in await cursor.fetchall()]
if "role" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN role TEXT DEFAULT 'member'")
await db.execute("UPDATE users SET role = 'org_admin'")
await db.commit()
logger.info("Migration: role-Spalte zu users hinzugefuegt")
# Migration: email, organization_id, is_active, last_login_at fuer users
if "email" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN email TEXT")
await db.commit()
logger.info("Migration: email zu users hinzugefuegt")
if "organization_id" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN organization_id INTEGER REFERENCES organizations(id)")
await db.commit()
logger.info("Migration: organization_id zu users hinzugefuegt")
# Index erst nach Spalten-Migration erstellen
try:
await db.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_users_org_username ON users(organization_id, username)")
await db.commit()
except Exception:
pass # Index existiert bereits oder Spalte fehlt noch
if "is_active" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1")
await db.commit()
if "last_login_at" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN last_login_at TIMESTAMP")
await db.commit()
# Migration: E-Mail-Benachrichtigungs-Praeferenzen fuer users
if "notify_email_summary" not in user_columns:
await db.execute("ALTER TABLE users ADD COLUMN notify_email_summary INTEGER DEFAULT 0")
await db.execute("ALTER TABLE users ADD COLUMN notify_email_new_articles INTEGER DEFAULT 0")
await db.execute("ALTER TABLE users ADD COLUMN notify_email_status_change INTEGER DEFAULT 0")
await db.commit()
logger.info("Migration: E-Mail-Benachrichtigungs-Spalten zu users hinzugefuegt")
# Migration: tenant_id fuer articles
cursor = await db.execute("PRAGMA table_info(articles)")
art_columns = [row[1] for row in await cursor.fetchall()]
if "tenant_id" not in art_columns:
await db.execute("ALTER TABLE articles ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Migration: tenant_id fuer fact_checks
cursor = await db.execute("PRAGMA table_info(fact_checks)")
fc_columns = [row[1] for row in await cursor.fetchall()]
if "tenant_id" not in fc_columns:
await db.execute("ALTER TABLE fact_checks ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Migration: tenant_id fuer incident_snapshots
cursor = await db.execute("PRAGMA table_info(incident_snapshots)")
snap_columns2 = [row[1] for row in await cursor.fetchall()]
if "tenant_id" not in snap_columns2:
await db.execute("ALTER TABLE incident_snapshots ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Migration: tenant_id fuer sources
cursor = await db.execute("PRAGMA table_info(sources)")
src_columns = [row[1] for row in await cursor.fetchall()]
if "tenant_id" not in src_columns:
await db.execute("ALTER TABLE sources ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Migration: tenant_id fuer notifications
cursor = await db.execute("PRAGMA table_info(notifications)")
notif_columns = [row[1] for row in await cursor.fetchall()]
if "tenant_id" not in notif_columns:
await db.execute("ALTER TABLE notifications ADD COLUMN tenant_id INTEGER REFERENCES organizations(id)")
await db.commit()
# Indexes erstellen (nach Spalten-Migrationen)
for idx_sql in [
"CREATE INDEX IF NOT EXISTS idx_incidents_tenant_status ON incidents(tenant_id, status)",
"CREATE INDEX IF NOT EXISTS idx_articles_tenant_incident ON articles(tenant_id, incident_id)",
]:
try:
await db.execute(idx_sql)
await db.commit()
except Exception:
pass
# Verwaiste running-Eintraege beim Start als error markieren (aelter als 15 Min)
await db.execute(
"""UPDATE refresh_log SET status = 'error', error_message = 'Verwaist beim Neustart',
completed_at = CURRENT_TIMESTAMP
WHERE status = 'running'
AND started_at < datetime('now', '-15 minutes')"""
)
await db.commit()
# Sources-Tabelle seeden (nur wenn leer)
cursor = await db.execute("SELECT COUNT(*) as cnt FROM sources")
row = await cursor.fetchone()
if row["cnt"] == 0:
await _seed_sources(db)
finally:
await db.close()
async def _seed_sources(db: aiosqlite.Connection):
"""Befuellt die sources-Tabelle aus der config.py-Konfiguration."""
from config import RSS_FEEDS, EXCLUDED_SOURCES
category_map = {
"tagesschau": "oeffentlich-rechtlich",
"ZDF heute": "oeffentlich-rechtlich",
"Deutsche Welle": "oeffentlich-rechtlich",
"Spiegel": "qualitaetszeitung",
"Zeit": "qualitaetszeitung",
"FAZ": "qualitaetszeitung",
"Süddeutsche": "qualitaetszeitung",
"NZZ": "qualitaetszeitung",
"Reuters": "nachrichtenagentur",
"AP News": "nachrichtenagentur",
"BBC World": "international",
"Al Jazeera": "international",
"France24": "international",
"BMI": "behoerde",
"Europol": "behoerde",
}
for _rss_category, feeds in RSS_FEEDS.items():
for feed in feeds:
name = feed["name"]
url = feed["url"]
try:
from urllib.parse import urlparse
domain = urlparse(url).netloc.lower().replace("www.", "")
except Exception:
domain = ""
category = category_map.get(name, "sonstige")
await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, ?, 'rss_feed', ?, 'active', 'system', NULL)""",
(name, url, domain, category),
)
for excl in EXCLUDED_SOURCES:
await db.execute(
"""INSERT INTO sources (name, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, 'excluded', 'sonstige', 'active', 'system', NULL)""",
(excl, excl),
)
await db.commit()
await refresh_source_counts(db)
logger.info(f"Sources-Tabelle geseeded: {len(RSS_FEEDS.get('deutsch', []))+len(RSS_FEEDS.get('international', []))+len(RSS_FEEDS.get('behoerden', []))} RSS-Feeds, {len(EXCLUDED_SOURCES)} ausgeschlossene Quellen")
async def refresh_source_counts(db: aiosqlite.Connection):
"""Berechnet Artikelzaehler und last_seen_at fuer alle Quellen neu."""
cursor = await db.execute("SELECT id, name, domain FROM sources WHERE source_type != 'excluded'")
sources = await cursor.fetchall()
for source in sources:
sid = source["id"]
name = source["name"]
domain = source["domain"] or ""
if domain:
cursor = await db.execute(
"""SELECT COUNT(*) as cnt, MAX(collected_at) as last_seen
FROM articles WHERE source = ? OR source_url LIKE ?""",
(name, f"%{domain}%"),
)
else:
cursor = await db.execute(
"SELECT COUNT(*) as cnt, MAX(collected_at) as last_seen FROM articles WHERE source = ?",
(name,),
)
row = await cursor.fetchone()
await db.execute(
"UPDATE sources SET article_count = ?, last_seen_at = ? WHERE id = ?",
(row["cnt"], row["last_seen"], sid),
)
await db.commit()
async def db_dependency():
"""FastAPI Dependency fuer Datenbankverbindungen."""
db = await get_db()
try:
yield db
finally:
await db.close()

0
src/email_utils/__init__.py Normale Datei
Datei anzeigen

Datei anzeigen

@@ -0,0 +1,102 @@
"""In-Memory Rate-Limiting fuer Magic-Link-Anfragen und Code-Verifizierung."""
import time
from collections import defaultdict
class RateLimiter:
"""Rate-Limiter mit zwei Ebenen: pro E-Mail und pro IP."""
def __init__(
self,
max_per_email: int = 3,
email_window_seconds: int = 900, # 15 Minuten
max_per_ip: int = 10,
ip_window_seconds: int = 3600, # 1 Stunde
):
self.max_per_email = max_per_email
self.email_window = email_window_seconds
self.max_per_ip = max_per_ip
self.ip_window = ip_window_seconds
self._email_requests: dict[str, list[float]] = defaultdict(list)
self._ip_requests: dict[str, list[float]] = defaultdict(list)
def _clean(self, entries: list[float], window: int) -> list[float]:
cutoff = time.time() - window
return [t for t in entries if t > cutoff]
def check(self, email: str, ip: str) -> tuple[bool, str]:
"""Prueft ob die Anfrage erlaubt ist.
Returns:
(erlaubt, grund) - True wenn OK, False mit Grund wenn blockiert.
"""
now = time.time()
# E-Mail-Limit
self._email_requests[email] = self._clean(self._email_requests[email], self.email_window)
if len(self._email_requests[email]) >= self.max_per_email:
return False, "Zu viele Anfragen fuer diese E-Mail-Adresse. Bitte warten."
# IP-Limit
self._ip_requests[ip] = self._clean(self._ip_requests[ip], self.ip_window)
if len(self._ip_requests[ip]) >= self.max_per_ip:
return False, "Zu viele Anfragen von dieser IP-Adresse. Bitte warten."
return True, ""
def record(self, email: str, ip: str):
"""Zeichnet eine erfolgreiche Anfrage auf."""
now = time.time()
self._email_requests[email].append(now)
self._ip_requests[ip].append(now)
class VerifyCodeLimiter:
"""Rate-Limiter fuer Code-Verifizierung (Brute-Force-Schutz).
Zaehlt Fehlversuche pro E-Mail und pro IP.
Nach max_attempts wird gesperrt bis das Zeitfenster ablaeuft.
"""
def __init__(
self,
max_attempts_per_email: int = 5,
max_attempts_per_ip: int = 15,
window_seconds: int = 600, # 10 Minuten (= Magic-Link-Ablaufzeit)
):
self.max_per_email = max_attempts_per_email
self.max_per_ip = max_attempts_per_ip
self.window = window_seconds
self._email_failures: dict[str, list[float]] = defaultdict(list)
self._ip_failures: dict[str, list[float]] = defaultdict(list)
def _clean(self, entries: list[float]) -> list[float]:
cutoff = time.time() - self.window
return [t for t in entries if t > cutoff]
def check(self, email: str, ip: str) -> tuple[bool, str]:
"""Prueft ob ein Verifizierungsversuch erlaubt ist."""
self._email_failures[email] = self._clean(self._email_failures[email])
if len(self._email_failures[email]) >= self.max_per_email:
return False, "Zu viele Fehlversuche. Bitte neuen Code anfordern."
self._ip_failures[ip] = self._clean(self._ip_failures[ip])
if len(self._ip_failures[ip]) >= self.max_per_ip:
return False, "Zu viele Fehlversuche von dieser IP-Adresse."
return True, ""
def record_failure(self, email: str, ip: str):
"""Zeichnet einen fehlgeschlagenen Versuch auf."""
now = time.time()
self._email_failures[email].append(now)
self._ip_failures[ip].append(now)
def clear(self, email: str):
"""Loescht Zaehler nach erfolgreichem Login."""
self._email_failures.pop(email, None)
# Singleton-Instanzen
magic_link_limiter = RateLimiter()
verify_code_limiter = VerifyCodeLimiter()

54
src/email_utils/sender.py Normale Datei
Datei anzeigen

@@ -0,0 +1,54 @@
"""Async E-Mail-Versand via SMTP."""
import logging
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import aiosmtplib
from config import (
SMTP_HOST,
SMTP_PORT,
SMTP_USER,
SMTP_PASSWORD,
SMTP_FROM_EMAIL,
SMTP_FROM_NAME,
SMTP_USE_TLS,
)
logger = logging.getLogger("osint.email")
async def send_email(to_email: str, subject: str, html_body: str) -> bool:
"""Sendet eine HTML-E-Mail.
Returns:
True bei Erfolg, False bei Fehler.
"""
if not SMTP_HOST:
logger.warning(f"SMTP nicht konfiguriert - E-Mail an {to_email} nicht gesendet: {subject}")
return False
msg = MIMEMultipart("alternative")
msg["From"] = f"{SMTP_FROM_NAME} <{SMTP_FROM_EMAIL}>"
msg["To"] = to_email
msg["Subject"] = subject
# Text-Fallback (simpel)
text_content = f"Betreff: {subject}\n\nBitte oeffnen Sie diese E-Mail in einem HTML-faehigen E-Mail-Client."
msg.attach(MIMEText(text_content, "plain", "utf-8"))
msg.attach(MIMEText(html_body, "html", "utf-8"))
try:
await aiosmtplib.send(
msg,
hostname=SMTP_HOST,
port=SMTP_PORT,
username=SMTP_USER if SMTP_USER else None,
password=SMTP_PASSWORD if SMTP_PASSWORD else None,
start_tls=SMTP_USE_TLS,
)
logger.info(f"E-Mail gesendet an {to_email}: {subject}")
return True
except Exception as e:
logger.error(f"E-Mail-Versand fehlgeschlagen an {to_email}: {e}")
return False

138
src/email_utils/templates.py Normale Datei
Datei anzeigen

@@ -0,0 +1,138 @@
"""HTML-E-Mail-Vorlagen fuer Magic Links, Einladungen und Benachrichtigungen."""
def magic_link_login_email(username: str, code: str, link: str) -> tuple[str, str]:
"""Erzeugt Login-E-Mail mit Magic Link und Code.
Returns:
(subject, html_body)
"""
subject = f"AegisSight Monitor - Anmeldung"
html = f"""<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: #0f172a; color: #e2e8f0; padding: 40px 20px;">
<div style="max-width: 480px; margin: 0 auto; background: #1e293b; border-radius: 12px; padding: 32px; border: 1px solid #334155;">
<h1 style="color: #f0b429; font-size: 20px; margin: 0 0 24px 0;">AegisSight Monitor</h1>
<p style="margin: 0 0 16px 0;">Hallo {username},</p>
<p style="margin: 0 0 24px 0;">Klicken Sie auf den Link oder geben Sie den Code ein, um sich anzumelden:</p>
<div style="background: #0f172a; border-radius: 8px; padding: 20px; text-align: center; margin: 0 0 24px 0;">
<div style="font-size: 32px; font-weight: 700; letter-spacing: 8px; color: #f0b429; font-family: monospace;">{code}</div>
</div>
<div style="text-align: center; margin: 0 0 24px 0;">
<a href="{link}" style="display: inline-block; background: #f0b429; color: #0f172a; padding: 12px 32px; border-radius: 6px; text-decoration: none; font-weight: 600;">Jetzt anmelden</a>
</div>
<p style="color: #94a3b8; font-size: 13px; margin: 0;">Dieser Link ist 10 Minuten gueltig. Falls Sie diese Anmeldung nicht angefordert haben, ignorieren Sie diese E-Mail.</p>
</div>
</body>
</html>"""
return subject, html
def invite_email(username: str, org_name: str, code: str, link: str) -> tuple[str, str]:
"""Erzeugt Einladungs-E-Mail fuer neue Nutzer.
Returns:
(subject, html_body)
"""
subject = f"Einladung zum AegisSight Monitor - {org_name}"
html = f"""<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: #0f172a; color: #e2e8f0; padding: 40px 20px;">
<div style="max-width: 480px; margin: 0 auto; background: #1e293b; border-radius: 12px; padding: 32px; border: 1px solid #334155;">
<h1 style="color: #f0b429; font-size: 20px; margin: 0 0 24px 0;">AegisSight Monitor</h1>
<p style="margin: 0 0 16px 0;">Hallo {username},</p>
<p style="margin: 0 0 16px 0;">Sie wurden zur Organisation <strong>{org_name}</strong> im AegisSight Monitor eingeladen.</p>
<p style="margin: 0 0 24px 0;">Klicken Sie auf den Link, um Ihren Zugang zu aktivieren:</p>
<div style="background: #0f172a; border-radius: 8px; padding: 20px; text-align: center; margin: 0 0 24px 0;">
<div style="font-size: 32px; font-weight: 700; letter-spacing: 8px; color: #f0b429; font-family: monospace;">{code}</div>
</div>
<div style="text-align: center; margin: 0 0 24px 0;">
<a href="{link}" style="display: inline-block; background: #f0b429; color: #0f172a; padding: 12px 32px; border-radius: 6px; text-decoration: none; font-weight: 600;">Einladung annehmen</a>
</div>
<p style="color: #94a3b8; font-size: 13px; margin: 0;">Dieser Link ist 48 Stunden gueltig.</p>
</div>
</body>
</html>"""
return subject, html
def incident_notification_email(
username: str,
incident_title: str,
notifications: list[dict],
dashboard_url: str,
) -> tuple[str, str]:
"""Erzeugt Benachrichtigungs-E-Mail fuer Lagen-Updates.
Args:
username: Empfaenger-Name
incident_title: Titel der Lage/Recherche
notifications: Liste von {"text": ..., "icon": ...} Dicts
dashboard_url: Link zum Dashboard
Returns:
(subject, html_body)
"""
subject = f"AegisSight - {incident_title}"
icon_map = {
"success": "&#10003;", # Haekchen
"warning": "&#9888;", # Warndreieck
"error": "&#10007;", # Kreuz
"info": "&#9432;", # Info-Kreis
}
color_map = {
"success": "#22c55e",
"warning": "#f0b429",
"error": "#ef4444",
"info": "#94a3b8",
}
items_html = ""
for n in notifications:
icon = icon_map.get(n.get("icon", "info"), "&#9432;")
color = color_map.get(n.get("icon", "info"), "#94a3b8")
text = n.get("text", "")
items_html += f"""
<div style="display: flex; align-items: flex-start; gap: 10px; padding: 10px 0; border-bottom: 1px solid #334155;">
<span style="color: {color}; font-size: 18px; line-height: 1;">{icon}</span>
<span style="color: #e2e8f0; font-size: 14px; line-height: 1.4;">{text}</span>
</div>"""
html = f"""<!DOCTYPE html>
<html>
<head><meta charset="UTF-8"></head>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: #0f172a; color: #e2e8f0; padding: 40px 20px;">
<div style="max-width: 480px; margin: 0 auto; background: #1e293b; border-radius: 12px; padding: 32px; border: 1px solid #334155;">
<h1 style="color: #f0b429; font-size: 20px; margin: 0 0 8px 0;">AegisSight Monitor</h1>
<p style="color: #94a3b8; font-size: 12px; margin: 0 0 24px 0;">Lagebericht-Benachrichtigung</p>
<p style="margin: 0 0 8px 0;">Hallo {username},</p>
<p style="margin: 0 0 20px 0;">es gibt Neuigkeiten zur Lage <strong style="color: #f0b429;">{incident_title}</strong>:</p>
<div style="background: #0f172a; border-radius: 8px; padding: 4px 16px; margin: 0 0 24px 0;">
{items_html}
</div>
<div style="text-align: center; margin: 0 0 24px 0;">
<a href="{dashboard_url}" style="display: inline-block; background: #f0b429; color: #0f172a; padding: 12px 32px; border-radius: 6px; text-decoration: none; font-weight: 600;">Im Dashboard ansehen</a>
</div>
<p style="color: #64748b; font-size: 12px; margin: 0;">Diese Benachrichtigung kann in den Einstellungen im Dashboard deaktiviert werden.</p>
</div>
</body>
</html>"""
return subject, html

157
src/feeds/rss_parser.py Normale Datei
Datei anzeigen

@@ -0,0 +1,157 @@
"""RSS-Feed Parser: Durchsucht vorkonfigurierte Feeds nach relevanten Meldungen."""
import asyncio
import logging
import feedparser
import httpx
from datetime import datetime, timezone
from config import TIMEZONE
logger = logging.getLogger("osint.rss")
class RSSParser:
"""Durchsucht RSS-Feeds nach relevanten Artikeln."""
# Stoppwörter die bei der RSS-Suche ignoriert werden
STOP_WORDS = {
"und", "oder", "der", "die", "das", "ein", "eine", "in", "im", "am", "an",
"auf", "für", "mit", "von", "zu", "zum", "zur", "bei", "nach", "vor",
"über", "unter", "ist", "sind", "hat", "the", "and", "for", "with", "from",
}
async def search_feeds(self, search_term: str, international: bool = True, tenant_id: int = None) -> list[dict]:
"""Durchsucht RSS-Feeds nach einem Suchbegriff.
Args:
search_term: Suchbegriff
international: Wenn False, nur deutsche Feeds + Behoerden (keine internationalen)
tenant_id: Optionale Org-ID fuer tenant-spezifische Quellen
"""
all_articles = []
search_words = [
w for w in search_term.lower().split()
if w not in self.STOP_WORDS and len(w) >= 3
]
if not search_words:
search_words = search_term.lower().split()[:2]
rss_feeds = await self._get_rss_feeds(tenant_id=tenant_id)
# Feed-Kategorien filtern
if international:
categories = rss_feeds.keys()
else:
categories = [c for c in rss_feeds.keys() if c != "international"]
tasks = []
for category in categories:
for feed_config in rss_feeds.get(category, []):
tasks.append(self._fetch_feed(feed_config, search_words))
results = await asyncio.gather(*tasks, return_exceptions=True)
for result in results:
if isinstance(result, Exception):
logger.warning(f"Feed-Fehler: {result}")
continue
all_articles.extend(result)
cat_info = "alle" if international else "nur deutsch + behörden"
logger.info(f"RSS-Suche nach '{search_term}' ({cat_info}): {len(all_articles)} Treffer")
return all_articles
async def search_feeds_selective(self, search_term: str, selected_feeds: list[dict]) -> list[dict]:
"""Durchsucht nur die übergebenen Feeds (vorselektiert durch Claude).
Args:
search_term: Suchbegriff
selected_feeds: Liste von Feed-Dicts mit mindestens {"name", "url"}
"""
all_articles = []
search_words = [
w for w in search_term.lower().split()
if w not in self.STOP_WORDS and len(w) >= 3
]
if not search_words:
search_words = search_term.lower().split()[:2]
tasks = []
for feed_config in selected_feeds:
tasks.append(self._fetch_feed(feed_config, search_words))
results = await asyncio.gather(*tasks, return_exceptions=True)
for result in results:
if isinstance(result, Exception):
logger.warning(f"Feed-Fehler: {result}")
continue
all_articles.extend(result)
logger.info(f"RSS-Selektiv nach '{search_term}': {len(all_articles)} Treffer aus {len(selected_feeds)} Feeds")
return all_articles
async def _get_rss_feeds(self, tenant_id: int = None) -> dict:
"""Laedt RSS-Feeds aus der Datenbank (global + org-spezifisch)."""
try:
from source_rules import get_source_rules
rules = await get_source_rules(tenant_id=tenant_id)
return rules.get("rss_feeds", {})
except Exception as e:
logger.warning(f"Fallback auf config.py fuer RSS-Feeds: {e}")
from config import RSS_FEEDS
return dict(RSS_FEEDS)
async def _fetch_feed(self, feed_config: dict, search_words: list[str]) -> list[dict]:
"""Einzelnen RSS-Feed abrufen und durchsuchen."""
name = feed_config["name"]
url = feed_config["url"]
articles = []
try:
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
response = await client.get(url, headers={
"User-Agent": "OSINT-Monitor/1.0 (News Aggregator)"
})
response.raise_for_status()
feed = await asyncio.to_thread(feedparser.parse, response.text)
for entry in feed.entries[:50]:
title = entry.get("title", "")
summary = entry.get("summary", "")
text = f"{title} {summary}".lower()
# Prüfe ob mindestens ein Suchwort vorkommt
if all(word in text for word in search_words):
published = None
if hasattr(entry, "published_parsed") and entry.published_parsed:
try:
published = datetime(*entry.published_parsed[:6], tzinfo=timezone.utc).astimezone(TIMEZONE).isoformat()
except (TypeError, ValueError):
pass
articles.append({
"headline": title,
"headline_de": title if self._is_german(title) else None,
"source": name,
"source_url": entry.get("link", ""),
"content_original": summary[:1000] if summary else None,
"content_de": summary[:1000] if summary and self._is_german(summary) else None,
"language": "de" if self._is_german(title) else "en",
"published_at": published,
})
except Exception as e:
logger.debug(f"Feed {name} ({url}): {e}")
return articles
def _is_german(self, text: str) -> bool:
"""Einfache Heuristik ob ein Text deutsch ist."""
german_words = {"der", "die", "das", "und", "ist", "von", "mit", "für", "auf", "ein",
"eine", "den", "dem", "des", "sich", "wird", "nach", "bei", "auch",
"über", "wie", "aus", "hat", "zum", "zur", "als", "noch", "mehr",
"nicht", "aber", "oder", "sind", "vor", "einem", "einer", "wurde"}
words = set(text.lower().split())
matches = words & german_words
return len(matches) >= 2

331
src/main.py Normale Datei
Datei anzeigen

@@ -0,0 +1,331 @@
"""OSINT Lagemonitor - Hauptanwendung."""
import asyncio
import json
import logging
import os
import sys
from contextlib import asynccontextmanager
from datetime import datetime
from typing import Dict
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse, RedirectResponse
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from starlette.middleware.base import BaseHTTPMiddleware
from config import STATIC_DIR, LOG_DIR, DATA_DIR, TIMEZONE
from database import init_db, get_db
from auth import decode_token
from agents.orchestrator import orchestrator
# Logging
os.makedirs(LOG_DIR, exist_ok=True)
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
handlers=[
logging.StreamHandler(sys.stdout),
logging.FileHandler(os.path.join(LOG_DIR, "osint-monitor.log")),
],
)
logger = logging.getLogger("osint")
class WebSocketManager:
"""Verwaltet WebSocket-Verbindungen für Echtzeit-Updates."""
def __init__(self):
self._connections: Dict[WebSocket, int] = {} # ws -> user_id
async def connect(self, websocket: WebSocket, user_id: int):
self._connections[websocket] = user_id
logger.info(f"WebSocket verbunden (User {user_id}, {len(self._connections)} aktiv)")
def disconnect(self, websocket: WebSocket):
self._connections.pop(websocket, None)
logger.info(f"WebSocket getrennt ({len(self._connections)} aktiv)")
async def broadcast(self, message: dict):
"""Nachricht an alle verbundenen Clients senden."""
if not self._connections:
return
data = json.dumps(message, ensure_ascii=False)
disconnected = []
for ws in self._connections:
try:
await ws.send_text(data)
except Exception:
disconnected.append(ws)
for ws in disconnected:
self._connections.pop(ws, None)
async def broadcast_for_incident(self, message: dict, visibility: str, created_by: int):
"""Nachricht nur an berechtigte Clients senden (private Lagen → nur Ersteller)."""
if not self._connections:
return
data = json.dumps(message, ensure_ascii=False)
disconnected = []
for ws, user_id in self._connections.items():
if visibility == "private" and user_id != created_by:
continue
try:
await ws.send_text(data)
except Exception:
disconnected.append(ws)
for ws in disconnected:
self._connections.pop(ws, None)
ws_manager = WebSocketManager()
# Scheduler für Auto-Refresh
scheduler = AsyncIOScheduler()
async def check_auto_refresh():
"""Prüft welche Lagen einen Auto-Refresh brauchen."""
db = await get_db()
try:
cursor = await db.execute(
"SELECT id, refresh_interval FROM incidents WHERE status = 'active' AND refresh_mode = 'auto'"
)
incidents = await cursor.fetchall()
now = datetime.now(TIMEZONE)
for incident in incidents:
incident_id = incident["id"]
interval = incident["refresh_interval"]
# Nur letzten AUTO-Refresh prüfen (manuelle Refreshs ignorieren)
cursor = await db.execute(
"SELECT started_at FROM refresh_log WHERE incident_id = ? AND trigger_type = 'auto' ORDER BY started_at DESC LIMIT 1",
(incident_id,),
)
last_refresh = await cursor.fetchone()
should_refresh = False
if not last_refresh:
should_refresh = True
else:
last_time = datetime.fromisoformat(last_refresh["started_at"])
if last_time.tzinfo is None:
last_time = last_time.replace(tzinfo=TIMEZONE)
elapsed = (now - last_time).total_seconds() / 60
if elapsed >= interval:
should_refresh = True
if should_refresh:
# Prüfen ob bereits ein laufender Refresh existiert
cursor = await db.execute(
"SELECT id FROM refresh_log WHERE incident_id = ? AND status = 'running' LIMIT 1",
(incident_id,),
)
if await cursor.fetchone():
continue # Laufender Refresh — überspringen
await orchestrator.enqueue_refresh(incident_id, trigger_type="auto")
except Exception as e:
logger.error(f"Auto-Refresh Check Fehler: {e}")
finally:
await db.close()
async def cleanup_expired():
"""Bereinigt abgelaufene Lagen basierend auf retention_days."""
db = await get_db()
try:
cursor = await db.execute(
"SELECT id, retention_days, created_at FROM incidents WHERE retention_days > 0 AND status = 'active'"
)
incidents = await cursor.fetchall()
now = datetime.now(TIMEZONE)
for incident in incidents:
created = datetime.fromisoformat(incident["created_at"])
if created.tzinfo is None:
created = created.replace(tzinfo=TIMEZONE)
age_days = (now - created).days
if age_days >= incident["retention_days"]:
await db.execute(
"UPDATE incidents SET status = 'archived' WHERE id = ?",
(incident["id"],),
)
logger.info(f"Lage {incident['id']} archiviert (Aufbewahrung abgelaufen)")
# Verwaiste running-Einträge bereinigen (> 15 Minuten ohne Abschluss)
cursor = await db.execute(
"SELECT id, incident_id, started_at FROM refresh_log WHERE status = 'running'"
)
orphans = await cursor.fetchall()
for orphan in orphans:
started = datetime.fromisoformat(orphan["started_at"])
if started.tzinfo is None:
started = started.replace(tzinfo=TIMEZONE)
age_minutes = (now - started).total_seconds() / 60
if age_minutes >= 15:
await db.execute(
"UPDATE refresh_log SET status = 'error', completed_at = ?, error_message = ? WHERE id = ?",
(now.isoformat(), f"Verwaist (>{int(age_minutes)} Min ohne Abschluss, automatisch bereinigt)", orphan["id"]),
)
logger.warning(f"Verwaisten Refresh #{orphan['id']} für Lage {orphan['incident_id']} bereinigt ({int(age_minutes)} Min)")
# Alte Notifications bereinigen (> 7 Tage)
await db.execute("DELETE FROM notifications WHERE created_at < datetime('now', '-7 days')")
await db.commit()
except Exception as e:
logger.error(f"Cleanup Fehler: {e}")
finally:
await db.close()
@asynccontextmanager
async def lifespan(app: FastAPI):
"""App Startup/Shutdown."""
# Startup
os.makedirs(DATA_DIR, exist_ok=True)
await init_db()
# Verwaiste running-Einträge beim Start bereinigen
db = await get_db()
try:
result = await db.execute(
"UPDATE refresh_log SET status = 'error', completed_at = ?, error_message = 'Verwaist (Neustart, automatisch bereinigt)' WHERE status = 'running'",
(datetime.now(TIMEZONE).isoformat(),),
)
if result.rowcount > 0:
await db.commit()
logger.warning(f"{result.rowcount} verwaiste running-Einträge beim Start bereinigt")
finally:
await db.close()
orchestrator.set_ws_manager(ws_manager)
await orchestrator.start()
scheduler.add_job(check_auto_refresh, "interval", minutes=1, id="auto_refresh")
scheduler.add_job(cleanup_expired, "interval", hours=1, id="cleanup")
scheduler.start()
logger.info("OSINT Lagemonitor gestartet")
yield
# Shutdown
scheduler.shutdown()
await orchestrator.stop()
logger.info("OSINT Lagemonitor gestoppt")
app = FastAPI(
title="OSINT Lagemonitor",
version="1.0.0",
lifespan=lifespan,
)
# Security-Headers Middleware
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
response = await call_next(request)
response.headers["Content-Security-Policy"] = (
"default-src 'self'; "
"script-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net; "
"style-src 'self' 'unsafe-inline' https://fonts.googleapis.com https://cdn.jsdelivr.net; "
"font-src 'self' https://fonts.gstatic.com; "
"img-src 'self' data:; "
"connect-src 'self' wss: ws:; "
"frame-ancestors 'none'"
)
response.headers["Permissions-Policy"] = (
"camera=(), microphone=(), geolocation=(), payment=()"
)
return response
app.add_middleware(SecurityHeadersMiddleware)
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["https://osint.intelsight.de"],
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE"],
allow_headers=["Authorization", "Content-Type"],
)
# Router einbinden
from routers.auth import router as auth_router
from routers.incidents import router as incidents_router
from routers.sources import router as sources_router
from routers.notifications import router as notifications_router
from routers.feedback import router as feedback_router
app.include_router(auth_router)
app.include_router(incidents_router)
app.include_router(sources_router)
app.include_router(notifications_router)
app.include_router(feedback_router)
@app.websocket("/api/ws")
async def websocket_endpoint(websocket: WebSocket):
"""WebSocket-Endpunkt für Echtzeit-Updates."""
await websocket.accept()
# Token als erste Nachricht empfangen (nicht in URL)
try:
token = await asyncio.wait_for(websocket.receive_text(), timeout=5.0)
except asyncio.TimeoutError:
try:
await websocket.close(code=4001, reason="Token fehlt")
except Exception:
pass
return
except WebSocketDisconnect:
return
try:
payload = decode_token(token)
user_id = int(payload["sub"])
except Exception:
try:
await websocket.close(code=4001, reason="Token ungültig")
except Exception:
pass
return
# Authentifizierung erfolgreich
await ws_manager.connect(websocket, user_id)
await websocket.send_text("authenticated")
try:
while True:
data = await websocket.receive_text()
if data == "ping":
await websocket.send_text("pong")
except WebSocketDisconnect:
ws_manager.disconnect(websocket)
# Statische Dateien und Frontend-Routing
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
@app.get("/")
async def root():
"""Login-Seite ausliefern."""
return FileResponse(os.path.join(STATIC_DIR, "index.html"))
@app.get("/dashboard")
async def dashboard():
"""Dashboard ausliefern."""
return FileResponse(os.path.join(STATIC_DIR, "dashboard.html"))
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="127.0.0.1", port=8891)

0
src/middleware/__init__.py Normale Datei
Datei anzeigen

Datei anzeigen

@@ -0,0 +1,51 @@
"""FastAPI Dependency: Lizenzpruefung vor mutiernden Endpoints."""
from fastapi import Depends, HTTPException, status
from auth import get_current_user
from database import db_dependency
from services.license_service import check_license
import aiosqlite
async def require_active_license(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
) -> dict:
"""Dependency die sicherstellt, dass die Lizenz der Org aktiv ist.
Blockiert mutierende Operationen bei abgelaufener/widerrufener Lizenz.
Gibt den current_user zurueck (angereichert mit Lizenz-Info).
"""
tenant_id = current_user.get("tenant_id")
if not tenant_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Keine Organisation zugeordnet",
)
lic = await check_license(db, tenant_id)
if not lic["valid"]:
if lic["status"] == "org_disabled":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Organisation deaktiviert. Bitte kontaktieren Sie den Support.",
)
current_user["license"] = lic
return current_user
async def require_writable_license(
current_user: dict = Depends(require_active_license),
) -> dict:
"""Dependency die sicherstellt, dass die Lizenz Schreibzugriff erlaubt.
Blockiert neue Lagen/Refreshes bei abgelaufener Lizenz (Nur-Lesen-Modus).
"""
lic = current_user.get("license", {})
if lic.get("read_only"):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Lizenz abgelaufen oder widerrufen. Nur Lesezugriff moeglich.",
)
return current_user

0
src/migration/__init__.py Normale Datei
Datei anzeigen

Datei anzeigen

@@ -0,0 +1,177 @@
"""Einmalige Datenmigration zu Multi-Tenancy.
Dieses Script:
1. Erstellt die AegisSight-Organisation
2. Erstellt eine permanente Lizenz fuer AegisSight
3. Weist bestehende Nutzer (rac00n, ch33tah) der AegisSight-Org zu
4. Setzt tenant_id auf alle bestehenden Daten
5. Erstellt Portal-Admin-Zugaenge
"""
import asyncio
import os
import sys
import shutil
from datetime import datetime
# Pfade fuer Imports
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from config import DB_PATH, TIMEZONE
from database import init_db, get_db
from auth import hash_password
# E-Mail-Adressen fuer bestehende Nutzer
USER_EMAILS = {
"rac00n": os.environ.get("RACOON_EMAIL", "momohomma@googlemail.com"),
"ch33tah": os.environ.get("CHEETAH_EMAIL", "hendrik_gebhardt@gmx.de"),
}
async def migrate():
"""Fuehrt die Multi-Tenancy-Migration durch."""
# 1. Backup erstellen
if os.path.exists(DB_PATH):
backup_path = DB_PATH + f".backup-{datetime.now().strftime('%Y%m%d_%H%M%S')}"
shutil.copy2(DB_PATH, backup_path)
print(f"Backup erstellt: {backup_path}")
else:
print("Keine bestehende Datenbank gefunden. Frische Installation.")
# 2. Schema-Migration (init_db erstellt neue Tabellen und fuegt Spalten hinzu)
await init_db()
print("Schema-Migration abgeschlossen.")
db = await get_db()
try:
# 3. Pruefen ob Migration bereits gelaufen ist
cursor = await db.execute("SELECT COUNT(*) as cnt FROM organizations")
org_count = (await cursor.fetchone())["cnt"]
if org_count > 0:
print("Migration wurde bereits durchgefuehrt. Abbruch.")
return
# 4. AegisSight-Organisation anlegen
now = datetime.now(TIMEZONE).isoformat()
cursor = await db.execute(
"""INSERT INTO organizations (name, slug, is_active, created_at, updated_at)
VALUES (?, ?, 1, ?, ?)""",
("AegisSight", "aegissight", now, now),
)
aegis_org_id = cursor.lastrowid
print(f"Organisation AegisSight angelegt (ID: {aegis_org_id})")
# 5. Permanente Lizenz fuer AegisSight
await db.execute(
"""INSERT INTO licenses (organization_id, license_type, max_users, valid_from, valid_until, status, notes)
VALUES (?, 'permanent', 50, ?, NULL, 'active', 'Interne AegisSight-Lizenz')""",
(aegis_org_id, now),
)
print("Permanente Lizenz fuer AegisSight erstellt")
# 6. Bestehende Nutzer der AegisSight-Org zuweisen
cursor = await db.execute("SELECT id, username FROM users")
users = await cursor.fetchall()
for user in users:
username = user["username"]
email = USER_EMAILS.get(username, f"{username}@aegis-sight.de")
await db.execute(
"""UPDATE users SET
organization_id = ?,
role = 'org_admin',
is_active = 1,
email = ?
WHERE id = ?""",
(aegis_org_id, email, user["id"]),
)
print(f"Nutzer '{username}' -> AegisSight (org_admin, email: {email})")
# 7. tenant_id auf alle bestehenden Incidents setzen
await db.execute(
"UPDATE incidents SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
cursor = await db.execute("SELECT changes()")
changes = (await cursor.fetchone())[0]
print(f"{changes} Incidents mit tenant_id versehen")
# 8. tenant_id auf alle bestehenden Articles setzen
await db.execute(
"UPDATE articles SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
cursor = await db.execute("SELECT changes()")
changes = (await cursor.fetchone())[0]
print(f"{changes} Articles mit tenant_id versehen")
# 9. tenant_id auf alle bestehenden fact_checks setzen
await db.execute(
"UPDATE fact_checks SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
# 10. tenant_id auf alle bestehenden refresh_log setzen
await db.execute(
"UPDATE refresh_log SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
# 11. tenant_id auf alle bestehenden incident_snapshots setzen
await db.execute(
"UPDATE incident_snapshots SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
# 12. tenant_id auf alle bestehenden notifications setzen
await db.execute(
"UPDATE notifications SET tenant_id = ? WHERE tenant_id IS NULL",
(aegis_org_id,),
)
# 13. System-Quellen bleiben global (tenant_id=NULL)
# Nur nutzer-erstellte Quellen bekommen tenant_id
await db.execute(
"UPDATE sources SET tenant_id = ? WHERE added_by != 'system' AND tenant_id IS NULL",
(aegis_org_id,),
)
print("Nutzer-Quellen mit tenant_id versehen (System-Quellen bleiben global)")
# 14. Portal-Admin-Zugaenge anlegen
print("\n--- Portal-Admin-Zugaenge ---")
portal_users = ["rac00n", "ch33tah"]
for pu in portal_users:
# Pruefen ob schon existiert
cursor = await db.execute(
"SELECT id FROM portal_admins WHERE username = ?", (pu,)
)
if await cursor.fetchone():
print(f"Portal-Admin '{pu}' existiert bereits")
continue
# Passwort generieren
import secrets
import string
password = ''.join(secrets.choice(string.ascii_letters + string.digits + "!@#$%&*") for _ in range(16))
pw_hash = hash_password(password)
await db.execute(
"INSERT INTO portal_admins (username, password_hash) VALUES (?, ?)",
(pu, pw_hash),
)
print(f"Portal-Admin '{pu}' erstellt - Passwort: {password}")
await db.commit()
print("\nMigration erfolgreich abgeschlossen!")
except Exception as e:
print(f"\nFEHLER bei Migration: {e}")
raise
finally:
await db.close()
if __name__ == "__main__":
asyncio.run(migrate())

245
src/models.py Normale Datei
Datei anzeigen

@@ -0,0 +1,245 @@
"""Pydantic Models für Request/Response Schemas."""
from pydantic import BaseModel, Field
from typing import Optional
from datetime import datetime
# Auth (Legacy)
class LoginRequest(BaseModel):
username: str
password: str
# Auth (Magic Link)
class MagicLinkRequest(BaseModel):
email: str = Field(min_length=1, max_length=254)
class MagicLinkResponse(BaseModel):
message: str
class VerifyTokenRequest(BaseModel):
token: str
class VerifyCodeRequest(BaseModel):
email: str = Field(min_length=1, max_length=254)
code: str = Field(min_length=6, max_length=6)
class TokenResponse(BaseModel):
access_token: str
token_type: str = "bearer"
username: str
class UserResponse(BaseModel):
id: int
username: str
class UserMeResponse(BaseModel):
id: int
username: str
email: str = ""
role: str = "member"
org_name: str = ""
org_slug: str = ""
tenant_id: Optional[int] = None
license_status: str = "unknown"
license_type: str = ""
read_only: bool = False
# Incidents (Lagen)
class IncidentCreate(BaseModel):
title: str = Field(min_length=1, max_length=200)
description: Optional[str] = None
type: str = Field(default="adhoc", pattern="^(adhoc|research)$")
refresh_mode: str = Field(default="manual", pattern="^(manual|auto)$")
refresh_interval: int = Field(default=15, ge=10, le=10080)
retention_days: int = Field(default=0, ge=0, le=999)
international_sources: bool = True
visibility: str = Field(default="public", pattern="^(public|private)$")
class IncidentUpdate(BaseModel):
title: Optional[str] = None
description: Optional[str] = None
type: Optional[str] = Field(default=None, pattern="^(adhoc|research)$")
status: Optional[str] = Field(default=None, pattern="^(active|archived)$")
refresh_mode: Optional[str] = Field(default=None, pattern="^(manual|auto)$")
refresh_interval: Optional[int] = Field(default=None, ge=10, le=10080)
retention_days: Optional[int] = Field(default=None, ge=0, le=999)
international_sources: Optional[bool] = None
visibility: Optional[str] = Field(default=None, pattern="^(public|private)$")
class IncidentResponse(BaseModel):
id: int
title: str
description: Optional[str]
type: str
status: str
refresh_mode: str
refresh_interval: int
retention_days: int
visibility: str = "public"
summary: Optional[str]
sources_json: Optional[str] = None
international_sources: bool = True
created_by: int
created_by_username: str = ""
created_at: str
updated_at: str
article_count: int = 0
source_count: int = 0
# Articles
class ArticleResponse(BaseModel):
id: int
incident_id: int
headline: str
headline_de: Optional[str]
source: str
source_url: Optional[str]
content_original: Optional[str]
content_de: Optional[str]
language: str
published_at: Optional[str]
collected_at: str
verification_status: str
# Fact Checks
class FactCheckResponse(BaseModel):
id: int
incident_id: int
claim: str
status: str
sources_count: int
evidence: Optional[str]
is_notification: bool
checked_at: str
# Sources (Quellenverwaltung)
class SourceCreate(BaseModel):
name: str = Field(min_length=1, max_length=200)
url: Optional[str] = None
domain: Optional[str] = None
source_type: str = Field(default="rss_feed", pattern="^(rss_feed|web_source|excluded)$")
category: str = Field(default="sonstige", pattern="^(nachrichtenagentur|oeffentlich-rechtlich|qualitaetszeitung|behoerde|fachmedien|think-tank|international|regional|sonstige)$")
status: str = Field(default="active", pattern="^(active|inactive)$")
notes: Optional[str] = None
class SourceUpdate(BaseModel):
name: Optional[str] = Field(default=None, max_length=200)
url: Optional[str] = None
domain: Optional[str] = None
source_type: Optional[str] = Field(default=None, pattern="^(rss_feed|web_source|excluded)$")
category: Optional[str] = Field(default=None, pattern="^(nachrichtenagentur|oeffentlich-rechtlich|qualitaetszeitung|behoerde|fachmedien|think-tank|international|regional|sonstige)$")
status: Optional[str] = Field(default=None, pattern="^(active|inactive)$")
notes: Optional[str] = None
class SourceResponse(BaseModel):
id: int
name: str
url: Optional[str]
domain: Optional[str]
source_type: str
category: str
status: str
notes: Optional[str]
added_by: Optional[str]
article_count: int = 0
last_seen_at: Optional[str] = None
created_at: str
# Source Discovery
class DiscoverRequest(BaseModel):
url: str = Field(min_length=1, max_length=500)
class DiscoverResponse(BaseModel):
name: str
domain: str
rss_url: Optional[str] = None
category: str
source_type: str
# Multi-Discovery
class DiscoverMultiResponse(BaseModel):
domain: str
category: str
added_count: int
skipped_count: int
total_found: int
sources: list[SourceResponse]
fallback_single: bool = False
# Domain-Aktionen (Sperren/Entsperren)
class DomainActionRequest(BaseModel):
domain: str = Field(min_length=1, max_length=200)
notes: Optional[str] = None
# Refresh-Log
class RefreshLogResponse(BaseModel):
id: int
started_at: str
completed_at: Optional[str] = None
articles_found: int = 0
status: str
trigger_type: str = "manual"
retry_count: int = 0
error_message: Optional[str] = None
duration_seconds: Optional[float] = None
# Notifications
class NotificationResponse(BaseModel):
id: int
incident_id: Optional[int]
type: str
title: str
text: str
icon: str
is_read: bool
created_at: str
class NotificationMarkReadRequest(BaseModel):
notification_ids: Optional[list[int]] = None # None = alle
class SubscriptionUpdate(BaseModel):
notify_email_summary: bool = False
notify_email_new_articles: bool = False
notify_email_status_change: bool = False
class SubscriptionResponse(BaseModel):
notify_email_summary: bool = False
notify_email_new_articles: bool = False
notify_email_status_change: bool = False
class FeedbackRequest(BaseModel):
category: str = Field(pattern="^(bug|feature|question|other)$")
message: str = Field(min_length=10, max_length=5000)
class WSMessage(BaseModel):
type: str # new_article, status_update, notification, refresh_complete
incident_id: int
data: dict

276
src/routers/auth.py Normale Datei
Datei anzeigen

@@ -0,0 +1,276 @@
"""Auth-Router: Magic-Link-Login und Nutzerverwaltung."""
import logging
from datetime import datetime, timedelta, timezone
from fastapi import APIRouter, Depends, HTTPException, Request, status
from models import (
MagicLinkRequest,
MagicLinkResponse,
VerifyTokenRequest,
VerifyCodeRequest,
TokenResponse,
UserMeResponse,
)
from auth import (
create_token,
get_current_user,
generate_magic_token,
generate_magic_code,
verify_password,
)
from database import db_dependency
from config import TIMEZONE, MAGIC_LINK_EXPIRE_MINUTES, MAGIC_LINK_BASE_URL
from email_utils.sender import send_email
from email_utils.templates import magic_link_login_email
from email_utils.rate_limiter import magic_link_limiter, verify_code_limiter
import aiosqlite
logger = logging.getLogger("osint.auth")
router = APIRouter(prefix="/api/auth", tags=["auth"])
@router.post("/magic-link", response_model=MagicLinkResponse)
async def request_magic_link(
data: MagicLinkRequest,
request: Request,
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Magic Link anfordern. Sendet E-Mail mit Link + Code."""
email = data.email.lower().strip()
ip = request.client.host if request.client else "unknown"
# Rate-Limit pruefen
allowed, reason = magic_link_limiter.check(email, ip)
if not allowed:
logger.warning(f"Rate-Limit fuer {email} von {ip}: {reason}")
# Trotzdem 200 zurueckgeben (kein Information-Leak)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
# Nutzer suchen
cursor = await db.execute(
"""SELECT u.id, u.email, u.username, u.role, u.organization_id, u.is_active,
o.is_active as org_active, o.slug as org_slug
FROM users u
JOIN organizations o ON o.id = u.organization_id
WHERE LOWER(u.email) = ?""",
(email,),
)
user = await cursor.fetchone()
if not user:
magic_link_limiter.record(email, ip)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
if not user["is_active"]:
magic_link_limiter.record(email, ip)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
if not user["org_active"]:
magic_link_limiter.record(email, ip)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
# Lizenz pruefen
from services.license_service import check_license
lic = await check_license(db, user["organization_id"])
if lic.get("status") == "org_disabled":
magic_link_limiter.record(email, ip)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
# Token + Code generieren
token = generate_magic_token()
code = generate_magic_code()
expires_at = (datetime.now(timezone.utc) + timedelta(minutes=MAGIC_LINK_EXPIRE_MINUTES)).strftime('%Y-%m-%d %H:%M:%S')
# Alte ungenutzte Magic Links fuer diese E-Mail invalidieren
await db.execute(
"UPDATE magic_links SET is_used = 1 WHERE email = ? AND is_used = 0",
(email,),
)
# Neuen Magic Link speichern
await db.execute(
"""INSERT INTO magic_links (email, token, code, purpose, user_id, expires_at, ip_address)
VALUES (?, ?, ?, 'login', ?, ?, ?)""",
(email, token, code, user["id"], expires_at, ip),
)
await db.commit()
# E-Mail senden
link = f"{MAGIC_LINK_BASE_URL}/auth/verify?token={token}"
subject, html = magic_link_login_email(user["username"], code, link)
await send_email(email, subject, html)
magic_link_limiter.record(email, ip)
return MagicLinkResponse(message="Wenn ein Konto existiert, wurde eine E-Mail gesendet.")
@router.post("/verify", response_model=TokenResponse)
async def verify_magic_link(
data: VerifyTokenRequest,
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Magic Link verifizieren (Token aus URL)."""
cursor = await db.execute(
"""SELECT ml.*, u.username, u.email, u.role, u.organization_id, u.is_active,
o.slug as org_slug, o.is_active as org_active
FROM magic_links ml
JOIN users u ON u.id = ml.user_id
JOIN organizations o ON o.id = u.organization_id
WHERE ml.token = ? AND ml.is_used = 0""",
(data.token,),
)
ml = await cursor.fetchone()
if not ml:
raise HTTPException(status_code=400, detail="Ungueltiger oder bereits verwendeter Link")
# Ablauf pruefen
now = datetime.now(timezone.utc)
expires = datetime.fromisoformat(ml["expires_at"])
if expires.tzinfo is None:
expires = expires.replace(tzinfo=timezone.utc)
if now > expires:
raise HTTPException(status_code=400, detail="Link abgelaufen. Bitte neuen Link anfordern.")
if not ml["is_active"] or not ml["org_active"]:
raise HTTPException(status_code=403, detail="Konto oder Organisation deaktiviert")
# Magic Link als verwendet markieren
await db.execute("UPDATE magic_links SET is_used = 1 WHERE id = ?", (ml["id"],))
# Letzten Login aktualisieren
await db.execute(
"UPDATE users SET last_login_at = ? WHERE id = ?",
(now.isoformat(), ml["user_id"]),
)
await db.commit()
# JWT erstellen
token = create_token(
user_id=ml["user_id"],
username=ml["username"],
email=ml["email"],
role=ml["role"],
tenant_id=ml["organization_id"],
org_slug=ml["org_slug"],
)
return TokenResponse(
access_token=token,
username=ml["username"],
)
@router.post("/verify-code", response_model=TokenResponse)
async def verify_magic_code(
data: VerifyCodeRequest,
request: Request,
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Magic Code verifizieren (6-stelliger Code + E-Mail)."""
email = data.email.lower().strip()
ip = request.client.host if request.client else "unknown"
# Brute-Force-Schutz: Fehlversuche pruefen
allowed, reason = verify_code_limiter.check(email, ip)
if not allowed:
logger.warning(f"Verify-Code Rate-Limit fuer {email} von {ip}: {reason}")
# Bei Sperre alle offenen Magic Links fuer diese E-Mail invalidieren
await db.execute(
"UPDATE magic_links SET is_used = 1 WHERE email = ? AND is_used = 0",
(email,),
)
await db.commit()
raise HTTPException(status_code=429, detail=reason)
cursor = await db.execute(
"""SELECT ml.*, u.username, u.email as user_email, u.role, u.organization_id, u.is_active,
o.slug as org_slug, o.is_active as org_active
FROM magic_links ml
JOIN users u ON u.id = ml.user_id
JOIN organizations o ON o.id = u.organization_id
WHERE LOWER(ml.email) = ? AND ml.code = ? AND ml.is_used = 0
ORDER BY ml.created_at DESC LIMIT 1""",
(email, data.code),
)
ml = await cursor.fetchone()
if not ml:
verify_code_limiter.record_failure(email, ip)
logger.warning(f"Fehlgeschlagener Code-Versuch fuer {email} von {ip}")
raise HTTPException(status_code=400, detail="Ungueltiger Code")
# Ablauf pruefen
now = datetime.now(timezone.utc)
expires = datetime.fromisoformat(ml["expires_at"])
if expires.tzinfo is None:
expires = expires.replace(tzinfo=timezone.utc)
if now > expires:
raise HTTPException(status_code=400, detail="Code abgelaufen. Bitte neuen Code anfordern.")
if not ml["is_active"] or not ml["org_active"]:
raise HTTPException(status_code=403, detail="Konto oder Organisation deaktiviert")
# Magic Link als verwendet markieren
await db.execute("UPDATE magic_links SET is_used = 1 WHERE id = ?", (ml["id"],))
# Letzten Login aktualisieren
await db.execute(
"UPDATE users SET last_login_at = ? WHERE id = ?",
(now.isoformat(), ml["user_id"]),
)
await db.commit()
# Fehlversuche-Zaehler nach Erfolg zuruecksetzen
verify_code_limiter.clear(email)
token = create_token(
user_id=ml["user_id"],
username=ml["username"],
email=ml["user_email"],
role=ml["role"],
tenant_id=ml["organization_id"],
org_slug=ml["org_slug"],
)
return TokenResponse(
access_token=token,
username=ml["username"],
)
@router.get("/me", response_model=UserMeResponse)
async def get_me(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Aktuellen Nutzer mit Org-Info abfragen."""
# Org-Name laden
org_name = ""
if current_user.get("tenant_id"):
cursor = await db.execute(
"SELECT name FROM organizations WHERE id = ?",
(current_user["tenant_id"],),
)
org = await cursor.fetchone()
if org:
org_name = org["name"]
# Lizenzstatus laden
license_info = {}
if current_user.get("tenant_id"):
from services.license_service import check_license
license_info = await check_license(db, current_user["tenant_id"])
return UserMeResponse(
id=current_user["id"],
username=current_user["username"],
email=current_user.get("email", ""),
role=current_user["role"],
org_name=org_name,
org_slug=current_user.get("org_slug", ""),
tenant_id=current_user.get("tenant_id"),
license_status=license_info.get("status", "unknown"),
license_type=license_info.get("license_type", ""),
read_only=license_info.get("read_only", False),
)

115
src/routers/feedback.py Normale Datei
Datei anzeigen

@@ -0,0 +1,115 @@
"""Feedback-Router: Nutzer-Feedback per E-Mail an das Team."""
import html
import time
import logging
from collections import defaultdict
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import aiosmtplib
from fastapi import APIRouter, Depends, HTTPException, status
from auth import get_current_user
from models import FeedbackRequest
from config import (
SMTP_HOST,
SMTP_PORT,
SMTP_USER,
SMTP_PASSWORD,
SMTP_FROM_EMAIL,
SMTP_FROM_NAME,
SMTP_USE_TLS,
)
logger = logging.getLogger("osint.feedback")
router = APIRouter(prefix="/api", tags=["feedback"])
FEEDBACK_EMAIL = "feedback@aegis-sight.de"
CATEGORY_LABELS = {
"bug": "Fehlerbericht",
"feature": "Feature-Wunsch",
"question": "Frage",
"other": "Sonstiges",
}
# In-Memory Rate-Limiting: max 3 pro Nutzer/Stunde
_user_timestamps: dict[int, list[float]] = defaultdict(list)
_MAX_PER_HOUR = 3
_WINDOW = 3600
@router.post("/feedback", status_code=204)
async def send_feedback(
data: FeedbackRequest,
current_user: dict = Depends(get_current_user),
):
"""Feedback per E-Mail an das Team senden."""
user_id = current_user["id"]
# Rate-Limiting
now = time.time()
cutoff = now - _WINDOW
_user_timestamps[user_id] = [t for t in _user_timestamps[user_id] if t > cutoff]
if len(_user_timestamps[user_id]) >= _MAX_PER_HOUR:
raise HTTPException(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
detail="Maximal 3 Feedback-Nachrichten pro Stunde. Bitte spaeter erneut versuchen.",
)
if not SMTP_HOST:
logger.warning("SMTP nicht konfiguriert - Feedback nicht gesendet")
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="E-Mail-Versand nicht verfuegbar.",
)
username = current_user["username"]
email = current_user.get("email", "")
category_label = CATEGORY_LABELS.get(data.category, data.category)
message_escaped = html.escape(data.message)
subject = f"[AegisSight Feedback] {category_label} von {username}"
html_body = f"""\
<div style="font-family:Arial,sans-serif;max-width:600px;margin:0 auto;">
<div style="background:#151D2E;color:#E8ECF4;padding:20px;border-radius:8px 8px 0 0;">
<h2 style="margin:0;color:#C8A851;">Neues Feedback</h2>
</div>
<div style="background:#1A2440;color:#E8ECF4;padding:20px;border-radius:0 0 8px 8px;">
<table style="border-collapse:collapse;">
<tr><td style="color:#8896AB;padding:4px 16px 4px 0;">Kategorie:</td><td><strong>{category_label}</strong></td></tr>
<tr><td style="color:#8896AB;padding:4px 16px 4px 0;">Nutzer:</td><td>{html.escape(username)}</td></tr>
<tr><td style="color:#8896AB;padding:4px 16px 4px 0;">E-Mail:</td><td>{html.escape(email) if email else "nicht hinterlegt"}</td></tr>
</table>
<hr style="border:none;border-top:1px solid #1E2D45;margin:16px 0;">
<div style="white-space:pre-wrap;line-height:1.5;">{message_escaped}</div>
</div>
</div>"""
msg = MIMEMultipart("alternative")
msg["From"] = f"{SMTP_FROM_NAME} <{SMTP_FROM_EMAIL}>"
msg["To"] = FEEDBACK_EMAIL
msg["Subject"] = subject
if email:
msg["Reply-To"] = email
text_fallback = f"Feedback von {username} ({category_label}):\n\n{data.message}"
msg.attach(MIMEText(text_fallback, "plain", "utf-8"))
msg.attach(MIMEText(html_body, "html", "utf-8"))
try:
await aiosmtplib.send(
msg,
hostname=SMTP_HOST,
port=SMTP_PORT,
username=SMTP_USER if SMTP_USER else None,
password=SMTP_PASSWORD if SMTP_PASSWORD else None,
start_tls=SMTP_USE_TLS,
)
_user_timestamps[user_id].append(now)
logger.info(f"Feedback von {username} ({category_label}) gesendet")
except Exception as e:
logger.error(f"Feedback-E-Mail fehlgeschlagen: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="E-Mail konnte nicht gesendet werden.",
)

636
src/routers/incidents.py Normale Datei
Datei anzeigen

@@ -0,0 +1,636 @@
"""Incidents-Router: Lagen verwalten (Multi-Tenant)."""
from fastapi import APIRouter, Depends, HTTPException, Query, status
from fastapi.responses import StreamingResponse
from models import IncidentCreate, IncidentUpdate, IncidentResponse, SubscriptionUpdate, SubscriptionResponse
from auth import get_current_user
from middleware.license_check import require_writable_license
from database import db_dependency
from datetime import datetime, timezone
import aiosqlite
import json
import re
import unicodedata
router = APIRouter(prefix="/api/incidents", tags=["incidents"])
INCIDENT_UPDATE_COLUMNS = {
"title", "description", "type", "status", "refresh_mode",
"refresh_interval", "retention_days", "international_sources", "visibility",
}
async def _check_incident_access(
db: aiosqlite.Connection, incident_id: int, user_id: int, tenant_id: int
) -> aiosqlite.Row:
"""Lage laden und Zugriff pruefen (Tenant + Sichtbarkeit)."""
cursor = await db.execute(
"SELECT * FROM incidents WHERE id = ? AND tenant_id = ?",
(incident_id, tenant_id),
)
row = await cursor.fetchone()
if not row:
raise HTTPException(status_code=404, detail="Lage nicht gefunden")
if row["visibility"] == "private" and row["created_by"] != user_id:
raise HTTPException(status_code=403, detail="Kein Zugriff auf private Lage")
return row
async def _enrich_incident(db: aiosqlite.Connection, row: aiosqlite.Row) -> dict:
"""Incident-Row mit Statistiken und Ersteller-Name anreichern."""
incident = dict(row)
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM articles WHERE incident_id = ?",
(incident["id"],),
)
article_count = (await cursor.fetchone())["cnt"]
cursor = await db.execute(
"SELECT COUNT(DISTINCT source) as cnt FROM articles WHERE incident_id = ?",
(incident["id"],),
)
source_count = (await cursor.fetchone())["cnt"]
cursor = await db.execute(
"SELECT username FROM users WHERE id = ?",
(incident["created_by"],),
)
user_row = await cursor.fetchone()
incident["article_count"] = article_count
incident["source_count"] = source_count
incident["created_by_username"] = user_row["username"] if user_row else "Unbekannt"
return incident
@router.get("", response_model=list[IncidentResponse])
async def list_incidents(
status_filter: str = None,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Alle Lagen des Tenants auflisten (oeffentliche + eigene private)."""
tenant_id = current_user.get("tenant_id")
user_id = current_user["id"]
query = "SELECT * FROM incidents WHERE tenant_id = ? AND (visibility = 'public' OR created_by = ?)"
params = [tenant_id, user_id]
if status_filter:
query += " AND status = ?"
params.append(status_filter)
query += " ORDER BY updated_at DESC"
cursor = await db.execute(query, params)
rows = await cursor.fetchall()
results = []
for row in rows:
results.append(await _enrich_incident(db, row))
return results
@router.post("", response_model=IncidentResponse, status_code=status.HTTP_201_CREATED)
async def create_incident(
data: IncidentCreate,
current_user: dict = Depends(require_writable_license),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Neue Lage anlegen."""
tenant_id = current_user.get("tenant_id")
now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
cursor = await db.execute(
"""INSERT INTO incidents (title, description, type, refresh_mode, refresh_interval,
retention_days, international_sources, visibility,
tenant_id, created_by, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
data.title,
data.description,
data.type,
data.refresh_mode,
data.refresh_interval,
data.retention_days,
1 if data.international_sources else 0,
data.visibility,
tenant_id,
current_user["id"],
now,
now,
),
)
await db.commit()
cursor = await db.execute("SELECT * FROM incidents WHERE id = ?", (cursor.lastrowid,))
row = await cursor.fetchone()
return await _enrich_incident(db, row)
@router.get("/refreshing")
async def get_refreshing_incidents(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Gibt IDs aller Lagen mit laufendem Refresh zurueck (nur eigener Tenant)."""
tenant_id = current_user.get("tenant_id")
cursor = await db.execute(
"""SELECT rl.incident_id, rl.started_at FROM refresh_log rl
JOIN incidents i ON i.id = rl.incident_id
WHERE rl.status = 'running'
AND i.tenant_id = ?
AND (i.visibility = 'public' OR i.created_by = ?)""",
(tenant_id, current_user["id"]),
)
rows = await cursor.fetchall()
return {
"refreshing": [row["incident_id"] for row in rows],
"details": {str(row["incident_id"]): {"started_at": row["started_at"]} for row in rows},
}
@router.get("/{incident_id}", response_model=IncidentResponse)
async def get_incident(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Einzelne Lage abrufen."""
tenant_id = current_user.get("tenant_id")
row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
return await _enrich_incident(db, row)
@router.put("/{incident_id}", response_model=IncidentResponse)
async def update_incident(
incident_id: int,
data: IncidentUpdate,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Lage aktualisieren."""
tenant_id = current_user.get("tenant_id")
row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
updates = {}
for field, value in data.model_dump(exclude_none=True).items():
if field not in INCIDENT_UPDATE_COLUMNS:
continue
updates[field] = value
if not updates:
return await _enrich_incident(db, row)
updates["updated_at"] = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
set_clause = ", ".join(f"{k} = ?" for k in updates)
values = list(updates.values()) + [incident_id]
await db.execute(f"UPDATE incidents SET {set_clause} WHERE id = ?", values)
await db.commit()
cursor = await db.execute("SELECT * FROM incidents WHERE id = ?", (incident_id,))
row = await cursor.fetchone()
return await _enrich_incident(db, row)
@router.delete("/{incident_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_incident(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Lage loeschen (nur Ersteller)."""
tenant_id = current_user.get("tenant_id")
cursor = await db.execute(
"SELECT id, created_by FROM incidents WHERE id = ? AND tenant_id = ?",
(incident_id, tenant_id),
)
incident = await cursor.fetchone()
if not incident:
raise HTTPException(status_code=404, detail="Lage nicht gefunden")
if incident["created_by"] != current_user["id"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Nur der Ersteller kann diese Lage loeschen",
)
await db.execute("DELETE FROM incidents WHERE id = ?", (incident_id,))
await db.commit()
@router.get("/{incident_id}/articles")
async def get_articles(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Alle Artikel einer Lage abrufen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
cursor = await db.execute(
"SELECT * FROM articles WHERE incident_id = ? ORDER BY collected_at DESC",
(incident_id,),
)
rows = await cursor.fetchall()
return [dict(row) for row in rows]
@router.get("/{incident_id}/snapshots")
async def get_snapshots(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Lageberichte (Snapshots) einer Lage abrufen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
cursor = await db.execute(
"""SELECT id, incident_id, summary, sources_json,
article_count, fact_check_count, created_at
FROM incident_snapshots WHERE incident_id = ?
ORDER BY created_at DESC""",
(incident_id,),
)
rows = await cursor.fetchall()
return [dict(row) for row in rows]
@router.get("/{incident_id}/factchecks")
async def get_factchecks(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Faktenchecks einer Lage abrufen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
cursor = await db.execute(
"SELECT * FROM fact_checks WHERE incident_id = ? ORDER BY checked_at DESC",
(incident_id,),
)
rows = await cursor.fetchall()
return [dict(row) for row in rows]
@router.get("/{incident_id}/refresh-log")
async def get_refresh_log(
incident_id: int,
limit: int = 20,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Refresh-Verlauf einer Lage abrufen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
cursor = await db.execute(
"""SELECT id, started_at, completed_at, articles_found, status,
trigger_type, retry_count, error_message
FROM refresh_log WHERE incident_id = ?
ORDER BY started_at DESC LIMIT ?""",
(incident_id, min(limit, 100)),
)
rows = await cursor.fetchall()
results = []
for row in rows:
entry = dict(row)
if entry["started_at"] and entry["completed_at"]:
try:
start = datetime.fromisoformat(entry["started_at"])
end = datetime.fromisoformat(entry["completed_at"])
entry["duration_seconds"] = round((end - start).total_seconds(), 1)
except Exception:
entry["duration_seconds"] = None
else:
entry["duration_seconds"] = None
results.append(entry)
return results
@router.get("/{incident_id}/subscription", response_model=SubscriptionResponse)
async def get_subscription(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""E-Mail-Abo-Einstellungen des aktuellen Nutzers fuer eine Lage abrufen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
cursor = await db.execute(
"""SELECT notify_email_summary, notify_email_new_articles, notify_email_status_change
FROM incident_subscriptions WHERE user_id = ? AND incident_id = ?""",
(current_user["id"], incident_id),
)
row = await cursor.fetchone()
if row:
return dict(row)
return {"notify_email_summary": False, "notify_email_new_articles": False, "notify_email_status_change": False}
@router.put("/{incident_id}/subscription", response_model=SubscriptionResponse)
async def update_subscription(
incident_id: int,
data: SubscriptionUpdate,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""E-Mail-Abo-Einstellungen des aktuellen Nutzers fuer eine Lage setzen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
await db.execute(
"""INSERT INTO incident_subscriptions (user_id, incident_id, notify_email_summary, notify_email_new_articles, notify_email_status_change)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(user_id, incident_id) DO UPDATE SET
notify_email_summary = excluded.notify_email_summary,
notify_email_new_articles = excluded.notify_email_new_articles,
notify_email_status_change = excluded.notify_email_status_change""",
(
current_user["id"],
incident_id,
1 if data.notify_email_summary else 0,
1 if data.notify_email_new_articles else 0,
1 if data.notify_email_status_change else 0,
),
)
await db.commit()
return {
"notify_email_summary": data.notify_email_summary,
"notify_email_new_articles": data.notify_email_new_articles,
"notify_email_status_change": data.notify_email_status_change,
}
@router.post("/{incident_id}/refresh")
async def trigger_refresh(
incident_id: int,
current_user: dict = Depends(require_writable_license),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Manuellen Refresh fuer eine Lage ausloesen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
from agents.orchestrator import orchestrator
enqueued = await orchestrator.enqueue_refresh(incident_id)
if not enqueued:
return {"status": "skipped", "incident_id": incident_id}
return {"status": "queued", "incident_id": incident_id}
@router.post("/{incident_id}/cancel-refresh")
async def cancel_refresh(
incident_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Laufenden Refresh fuer eine Lage abbrechen."""
tenant_id = current_user.get("tenant_id")
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
from agents.orchestrator import orchestrator
cancelled = await orchestrator.cancel_refresh(incident_id)
return {"status": "cancelling" if cancelled else "not_running"}
def _slugify(text: str) -> str:
"""Dateinamen-sicherer Slug aus Titel."""
replacements = {
"\u00e4": "ae", "\u00f6": "oe", "\u00fc": "ue", "\u00df": "ss",
"\u00c4": "Ae", "\u00d6": "Oe", "\u00dc": "Ue",
}
for src, dst in replacements.items():
text = text.replace(src, dst)
text = unicodedata.normalize("NFKD", text)
text = re.sub(r"[^\w\s-]", "", text)
text = re.sub(r"[\s_]+", "-", text).strip("-")
return text[:80].lower()
def _build_markdown_export(
incident: dict, articles: list, fact_checks: list,
snapshots: list, scope: str, creator: str
) -> str:
"""Markdown-Dokument zusammenbauen."""
typ = "Hintergrundrecherche" if incident.get("type") == "research" else "Breaking News"
updated = (incident.get("updated_at") or "")[:16].replace("T", " ")
lines = []
lines.append(f"# {incident['title']}")
lines.append(f"> {typ} | Erstellt von {creator} | Stand: {updated}")
lines.append("")
# Lagebild
summary = incident.get("summary") or "*Noch kein Lagebild verf\u00fcgbar.*"
lines.append("## Lagebild")
lines.append("")
lines.append(summary)
lines.append("")
# Quellenverzeichnis aus sources_json
sources_json = incident.get("sources_json")
if sources_json:
try:
sources = json.loads(sources_json) if isinstance(sources_json, str) else sources_json
if sources:
lines.append("## Quellenverzeichnis")
lines.append("")
for i, src in enumerate(sources, 1):
name = src.get("name") or src.get("title") or src.get("url", "")
url = src.get("url", "")
if url:
lines.append(f"{i}. [{name}]({url})")
else:
lines.append(f"{i}. {name}")
lines.append("")
except (json.JSONDecodeError, TypeError):
pass
# Faktencheck
if fact_checks:
lines.append("## Faktencheck")
lines.append("")
for fc in fact_checks:
claim = fc.get("claim", "")
fc_status = fc.get("status", "")
sources_count = fc.get("sources_count", 0)
evidence = fc.get("evidence", "")
status_label = {
"confirmed": "Best\u00e4tigt", "unconfirmed": "Unbest\u00e4tigt",
"disputed": "Umstritten", "false": "Falsch",
}.get(fc_status, fc_status)
line = f"- **{claim}** \u2014 {status_label} ({sources_count} Quellen)"
if evidence:
line += f"\n {evidence}"
lines.append(line)
lines.append("")
# Scope=full: Artikel\u00fcbersicht
if scope == "full" and articles:
lines.append("## Artikel\u00fcbersicht")
lines.append("")
lines.append("| Headline | Quelle | Sprache | Datum |")
lines.append("|----------|--------|---------|-------|")
for art in articles:
headline = (art.get("headline_de") or art.get("headline") or "").replace("|", "/")
source = (art.get("source") or "").replace("|", "/")
lang = art.get("language", "")
pub = (art.get("published_at") or art.get("collected_at") or "")[:16]
lines.append(f"| {headline} | {source} | {lang} | {pub} |")
lines.append("")
# Scope=full: Snapshot-Verlauf
if scope == "full" and snapshots:
lines.append("## Snapshot-Verlauf")
lines.append("")
for snap in snapshots:
snap_date = (snap.get("created_at") or "")[:16].replace("T", " ")
art_count = snap.get("article_count", 0)
fc_count = snap.get("fact_check_count", 0)
lines.append(f"### Snapshot vom {snap_date}")
lines.append(f"Artikel: {art_count} | Faktenchecks: {fc_count}")
lines.append("")
snap_summary = snap.get("summary", "")
if snap_summary:
lines.append(snap_summary)
lines.append("")
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M UTC")
lines.append("---")
lines.append(f"*Exportiert am {now} aus AegisSight Monitor*")
return "\n".join(lines)
def _build_json_export(
incident: dict, articles: list, fact_checks: list,
snapshots: list, scope: str, creator: str
) -> dict:
"""Strukturiertes JSON fuer Export."""
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
sources = []
sources_json = incident.get("sources_json")
if sources_json:
try:
sources = json.loads(sources_json) if isinstance(sources_json, str) else sources_json
except (json.JSONDecodeError, TypeError):
pass
export = {
"export_version": "1.0",
"exported_at": now,
"scope": scope,
"incident": {
"id": incident["id"],
"title": incident["title"],
"description": incident.get("description"),
"type": incident.get("type"),
"status": incident.get("status"),
"visibility": incident.get("visibility"),
"created_by": creator,
"created_at": incident.get("created_at"),
"updated_at": incident.get("updated_at"),
"summary": incident.get("summary"),
"international_sources": bool(incident.get("international_sources")),
},
"sources": sources,
"fact_checks": [
{
"claim": fc.get("claim"),
"status": fc.get("status"),
"sources_count": fc.get("sources_count"),
"evidence": fc.get("evidence"),
"checked_at": fc.get("checked_at"),
}
for fc in fact_checks
],
}
if scope == "full":
export["articles"] = [
{
"headline": art.get("headline"),
"headline_de": art.get("headline_de"),
"source": art.get("source"),
"source_url": art.get("source_url"),
"language": art.get("language"),
"published_at": art.get("published_at"),
"collected_at": art.get("collected_at"),
"verification_status": art.get("verification_status"),
}
for art in articles
]
export["snapshots"] = [
{
"created_at": snap.get("created_at"),
"article_count": snap.get("article_count"),
"fact_check_count": snap.get("fact_check_count"),
"summary": snap.get("summary"),
}
for snap in snapshots
]
return export
@router.get("/{incident_id}/export")
async def export_incident(
incident_id: int,
format: str = Query(..., pattern="^(md|json)$"),
scope: str = Query("report", pattern="^(report|full)$"),
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Lage als Markdown oder JSON exportieren."""
tenant_id = current_user.get("tenant_id")
row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
incident = dict(row)
# Ersteller-Name
cursor = await db.execute("SELECT username FROM users WHERE id = ?", (incident["created_by"],))
user_row = await cursor.fetchone()
creator = user_row["username"] if user_row else "Unbekannt"
# Artikel
cursor = await db.execute(
"SELECT * FROM articles WHERE incident_id = ? ORDER BY collected_at DESC",
(incident_id,),
)
articles = [dict(r) for r in await cursor.fetchall()]
# Faktenchecks
cursor = await db.execute(
"SELECT * FROM fact_checks WHERE incident_id = ? ORDER BY checked_at DESC",
(incident_id,),
)
fact_checks = [dict(r) for r in await cursor.fetchall()]
# Snapshots (nur bei full)
snapshots = []
if scope == "full":
cursor = await db.execute(
"SELECT * FROM incident_snapshots WHERE incident_id = ? ORDER BY created_at DESC",
(incident_id,),
)
snapshots = [dict(r) for r in await cursor.fetchall()]
# Dateiname
date_str = datetime.now(timezone.utc).strftime("%Y%m%d")
slug = _slugify(incident["title"])
scope_suffix = "_vollexport" if scope == "full" else ""
if format == "md":
body = _build_markdown_export(incident, articles, fact_checks, snapshots, scope, creator)
filename = f"{slug}{scope_suffix}_{date_str}.md"
media_type = "text/markdown; charset=utf-8"
else:
export_data = _build_json_export(incident, articles, fact_checks, snapshots, scope, creator)
body = json.dumps(export_data, ensure_ascii=False, indent=2)
filename = f"{slug}{scope_suffix}_{date_str}.json"
media_type = "application/json; charset=utf-8"
return StreamingResponse(
iter([body]),
media_type=media_type,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)

62
src/routers/notifications.py Normale Datei
Datei anzeigen

@@ -0,0 +1,62 @@
"""Notifications-Router: Persistente Benachrichtigungen (Multi-Tenant)."""
import logging
from fastapi import APIRouter, Depends, Query
from models import NotificationResponse, NotificationMarkReadRequest
from auth import get_current_user
from database import db_dependency
import aiosqlite
logger = logging.getLogger("osint.notifications")
router = APIRouter(prefix="/api/notifications", tags=["notifications"])
@router.get("", response_model=list[NotificationResponse])
async def list_notifications(
limit: int = Query(default=50, ge=1, le=200),
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Eigene Notifications abrufen (neueste zuerst)."""
cursor = await db.execute(
"SELECT * FROM notifications WHERE user_id = ? ORDER BY created_at DESC LIMIT ?",
(current_user["id"], limit),
)
rows = await cursor.fetchall()
return [dict(row) for row in rows]
@router.get("/unread-count")
async def get_unread_count(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Anzahl ungelesener Notifications."""
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM notifications WHERE user_id = ? AND is_read = 0",
(current_user["id"],),
)
row = await cursor.fetchone()
return {"unread_count": row["cnt"]}
@router.put("/mark-read")
async def mark_notifications_read(
body: NotificationMarkReadRequest,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Notifications als gelesen markieren (bestimmte IDs oder alle)."""
if body.notification_ids:
placeholders = ",".join("?" for _ in body.notification_ids)
await db.execute(
f"UPDATE notifications SET is_read = 1 WHERE user_id = ? AND id IN ({placeholders})",
[current_user["id"]] + body.notification_ids,
)
else:
await db.execute(
"UPDATE notifications SET is_read = 1 WHERE user_id = ? AND is_read = 0",
(current_user["id"],),
)
await db.commit()
return {"ok": True}

527
src/routers/sources.py Normale Datei
Datei anzeigen

@@ -0,0 +1,527 @@
"""Sources-Router: Quellenverwaltung (Multi-Tenant)."""
import logging
from collections import defaultdict
from fastapi import APIRouter, Depends, HTTPException, status
from models import SourceCreate, SourceUpdate, SourceResponse, DiscoverRequest, DiscoverResponse, DiscoverMultiResponse, DomainActionRequest
from auth import get_current_user
from database import db_dependency, refresh_source_counts
from source_rules import discover_source, discover_all_feeds, evaluate_feeds_with_claude, _extract_domain, _detect_category, domain_to_display_name
import aiosqlite
logger = logging.getLogger("osint.sources")
router = APIRouter(prefix="/api/sources", tags=["sources"])
SOURCE_UPDATE_COLUMNS = {"name", "url", "domain", "source_type", "category", "status", "notes"}
def _check_source_ownership(source: dict, username: str):
"""Prueft ob der Nutzer die Quelle bearbeiten/loeschen darf."""
added_by = source.get("added_by", "")
if added_by == "system":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="System-Quellen koennen nicht veraendert werden",
)
if added_by and added_by != username:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Nur der Ersteller kann diese Quelle bearbeiten",
)
@router.get("", response_model=list[SourceResponse])
async def list_sources(
source_type: str = None,
category: str = None,
source_status: str = None,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Alle Quellen auflisten (global + org-spezifisch)."""
tenant_id = current_user.get("tenant_id")
# Global (tenant_id=NULL) + eigene Org
query = "SELECT * FROM sources WHERE (tenant_id IS NULL OR tenant_id = ?)"
params = [tenant_id]
if source_type:
query += " AND source_type = ?"
params.append(source_type)
if category:
query += " AND category = ?"
params.append(category)
if source_status:
query += " AND status = ?"
params.append(source_status)
query += " ORDER BY source_type, category, name"
cursor = await db.execute(query, params)
rows = await cursor.fetchall()
return [dict(row) for row in rows]
@router.get("/stats")
async def get_source_stats(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Aggregierte Quellen-Statistiken (global + eigene Org)."""
tenant_id = current_user.get("tenant_id")
cursor = await db.execute("""
SELECT
source_type,
COUNT(*) as count,
SUM(article_count) as total_articles
FROM sources
WHERE status = 'active' AND (tenant_id IS NULL OR tenant_id = ?)
GROUP BY source_type
""", (tenant_id,))
rows = await cursor.fetchall()
stats = {
"rss_feed": {"count": 0, "articles": 0},
"web_source": {"count": 0, "articles": 0},
"excluded": {"count": 0, "articles": 0},
}
for row in rows:
st = row["source_type"]
if st in stats:
stats[st]["count"] = row["count"]
stats[st]["articles"] = row["total_articles"] or 0
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM articles WHERE tenant_id = ?",
(tenant_id,),
)
total_row = await cursor.fetchone()
return {
"by_type": stats,
"total_sources": sum(s["count"] for s in stats.values()),
"total_articles": total_row["cnt"],
}
@router.post("/discover", response_model=DiscoverResponse)
async def discover_source_endpoint(
data: DiscoverRequest,
current_user: dict = Depends(get_current_user),
):
"""RSS-Feed, Name, Kategorie und Domain einer URL automatisch erkennen."""
try:
result = await discover_source(data.url)
return result
except Exception as e:
logger.error(f"Discovery fehlgeschlagen: {e}", exc_info=True)
raise HTTPException(status_code=500, detail="Discovery fehlgeschlagen")
@router.post("/discover-multi", response_model=DiscoverMultiResponse)
async def discover_multi_endpoint(
data: DiscoverRequest,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Findet ALLE RSS-Feeds einer Domain, bewertet sie mit Claude und legt relevante als Quellen an."""
tenant_id = current_user.get("tenant_id")
try:
multi = await discover_all_feeds(data.url)
domain = multi["domain"]
category = multi["category"]
if not multi["feeds"]:
single = await discover_source(data.url)
sources = []
if single.get("rss_url"):
cursor = await db.execute(
"SELECT id FROM sources WHERE url = ?", (single["rss_url"],)
)
existing = await cursor.fetchone()
if not existing:
cursor = await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, ?, ?, ?, 'active', ?, ?)""",
(single["name"], single["rss_url"], single["domain"],
single["source_type"], single["category"], current_user["username"], tenant_id),
)
await db.commit()
cursor = await db.execute("SELECT * FROM sources WHERE id = ?", (cursor.lastrowid,))
row = await cursor.fetchone()
sources.append(dict(row))
return DiscoverMultiResponse(
domain=single.get("domain", domain),
category=single.get("category", category),
added_count=len(sources),
skipped_count=1 if not sources and single.get("rss_url") else 0,
total_found=1 if single.get("rss_url") else 0,
sources=sources,
fallback_single=True,
)
relevant_feeds = await evaluate_feeds_with_claude(domain, multi["feeds"])
cursor = await db.execute("SELECT url FROM sources WHERE url IS NOT NULL")
existing_urls = {row["url"] for row in await cursor.fetchall()}
new_ids = []
skipped = 0
for feed in relevant_feeds:
if feed["url"] in existing_urls:
skipped += 1
continue
cursor = await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, ?, 'rss_feed', ?, 'active', ?, ?)""",
(feed["name"], feed["url"], domain, category, current_user["username"], tenant_id),
)
new_ids.append(cursor.lastrowid)
existing_urls.add(feed["url"])
cursor = await db.execute(
"SELECT id FROM sources WHERE LOWER(domain) = ? AND source_type = 'web_source'",
(domain.lower(),),
)
if not await cursor.fetchone():
cursor = await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, ?, 'web_source', ?, 'active', ?, ?)""",
(domain_to_display_name(domain), f"https://{domain}", domain, category, current_user["username"], tenant_id),
)
new_ids.append(cursor.lastrowid)
await db.commit()
added_sources = []
if new_ids:
placeholders = ",".join("?" for _ in new_ids)
cursor = await db.execute(
f"SELECT * FROM sources WHERE id IN ({placeholders}) ORDER BY id",
new_ids,
)
added_sources = [dict(row) for row in await cursor.fetchall()]
return DiscoverMultiResponse(
domain=domain,
category=category,
added_count=len(added_sources),
skipped_count=skipped,
total_found=len(multi["feeds"]),
sources=added_sources,
fallback_single=False,
)
except Exception as e:
logger.error(f"Multi-Discovery fehlgeschlagen: {e}", exc_info=True)
raise HTTPException(status_code=500, detail="Multi-Discovery fehlgeschlagen")
@router.post("/rediscover-existing")
async def rediscover_existing_endpoint(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Einmalige Migration: Bestehende RSS-Quellen nach zusaetzlichen Feeds durchsuchen."""
tenant_id = current_user.get("tenant_id")
try:
cursor = await db.execute(
"SELECT * FROM sources WHERE source_type = 'rss_feed' AND status = 'active' AND (tenant_id IS NULL OR tenant_id = ?)",
(tenant_id,),
)
existing_sources = [dict(row) for row in await cursor.fetchall()]
domains = defaultdict(list)
for src in existing_sources:
if src["domain"]:
domains[src["domain"]].append(src)
cursor = await db.execute("SELECT url FROM sources WHERE url IS NOT NULL")
existing_urls = {row["url"] for row in await cursor.fetchall()}
domains_processed = 0
feeds_added = 0
feeds_skipped = 0
for domain, sources in domains.items():
domains_processed += 1
base_url = f"https://{domain}"
try:
multi = await discover_all_feeds(base_url)
if not multi["feeds"]:
continue
relevant_feeds = await evaluate_feeds_with_claude(domain, multi["feeds"])
category = _detect_category(domain)
for feed in relevant_feeds:
if feed["url"] in existing_urls:
feeds_skipped += 1
continue
await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, added_by, tenant_id)
VALUES (?, ?, ?, 'rss_feed', ?, 'active', ?, ?)""",
(feed["name"], feed["url"], domain, category, current_user["username"], tenant_id),
)
existing_urls.add(feed["url"])
feeds_added += 1
await db.commit()
except Exception as e:
logger.warning(f"Rediscovery fuer {domain} fehlgeschlagen: {e}")
continue
return {
"domains_processed": domains_processed,
"feeds_added": feeds_added,
"feeds_skipped": feeds_skipped,
}
except Exception as e:
logger.error(f"Rediscovery fehlgeschlagen: {e}", exc_info=True)
raise HTTPException(status_code=500, detail="Rediscovery fehlgeschlagen")
@router.post("/block-domain")
async def block_domain(
data: DomainActionRequest,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Domain sperren: Alle Feeds deaktivieren + excluded-Eintrag anlegen."""
tenant_id = current_user.get("tenant_id")
domain = data.domain.lower().strip()
username = current_user["username"]
cursor = await db.execute(
"SELECT added_by FROM sources WHERE LOWER(domain) = ? AND source_type != 'excluded' AND status = 'active' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
affected = await cursor.fetchall()
for row in affected:
ab = row["added_by"] or ""
if ab != "system" and ab != username and ab != "":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Domain enthaelt Quellen anderer Nutzer",
)
cursor = await db.execute(
"UPDATE sources SET status = 'inactive' WHERE LOWER(domain) = ? AND source_type != 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
feeds_deactivated = cursor.rowcount
cursor = await db.execute(
"SELECT id FROM sources WHERE LOWER(domain) = ? AND source_type = 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
existing = await cursor.fetchone()
if existing:
excluded_id = existing["id"]
if data.notes:
await db.execute(
"UPDATE sources SET notes = ? WHERE id = ?",
(data.notes, excluded_id),
)
else:
cursor = await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, notes, added_by, tenant_id)
VALUES (?, NULL, ?, 'excluded', 'sonstige', 'active', ?, ?, ?)""",
(domain, domain, data.notes, current_user["username"], tenant_id),
)
excluded_id = cursor.lastrowid
await db.commit()
return {
"domain": domain,
"feeds_deactivated": feeds_deactivated,
"excluded_id": excluded_id,
}
@router.post("/unblock-domain")
async def unblock_domain(
data: DomainActionRequest,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Domain entsperren: excluded-Eintrag loeschen + Feeds reaktivieren."""
tenant_id = current_user.get("tenant_id")
domain = data.domain.lower().strip()
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM sources WHERE LOWER(domain) = ? AND source_type != 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
row = await cursor.fetchone()
has_feeds = row["cnt"] > 0
if has_feeds:
await db.execute(
"DELETE FROM sources WHERE LOWER(domain) = ? AND source_type = 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
cursor = await db.execute(
"UPDATE sources SET status = 'active' WHERE LOWER(domain) = ? AND source_type != 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)",
(domain, tenant_id),
)
feeds_reactivated = cursor.rowcount
else:
await db.execute(
"""UPDATE sources SET source_type = 'web_source', status = 'active', notes = 'Entsperrt'
WHERE LOWER(domain) = ? AND source_type = 'excluded' AND (tenant_id IS NULL OR tenant_id = ?)""",
(domain, tenant_id),
)
feeds_reactivated = 0
await db.commit()
return {
"domain": domain,
"feeds_reactivated": feeds_reactivated,
}
@router.delete("/domain/{domain}")
async def delete_domain(
domain: str,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Alle Quellen einer Domain loeschen (nur org-eigene, nicht globale)."""
tenant_id = current_user.get("tenant_id")
domain_lower = domain.lower().strip()
cursor = await db.execute(
"SELECT * FROM sources WHERE LOWER(domain) = ? AND tenant_id = ?",
(domain_lower, tenant_id),
)
rows = await cursor.fetchall()
if not rows:
raise HTTPException(status_code=404, detail="Keine Quellen fuer diese Domain gefunden")
username = current_user["username"]
for row in rows:
source = dict(row)
if source["added_by"] == "system":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Domain enthaelt System-Quellen, die nicht geloescht werden koennen",
)
if source["added_by"] and source["added_by"] != username:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Domain enthaelt Quellen anderer Nutzer",
)
await db.execute(
"DELETE FROM sources WHERE LOWER(domain) = ? AND tenant_id = ?",
(domain_lower, tenant_id),
)
await db.commit()
return {
"domain": domain_lower,
"deleted_count": len(rows),
}
@router.post("", response_model=SourceResponse, status_code=status.HTTP_201_CREATED)
async def create_source(
data: SourceCreate,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Neue Quelle hinzufuegen (org-spezifisch)."""
tenant_id = current_user.get("tenant_id")
cursor = await db.execute(
"""INSERT INTO sources (name, url, domain, source_type, category, status, notes, added_by, tenant_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
data.name,
data.url,
data.domain,
data.source_type,
data.category,
data.status,
data.notes,
current_user["username"],
tenant_id,
),
)
await db.commit()
cursor = await db.execute("SELECT * FROM sources WHERE id = ?", (cursor.lastrowid,))
row = await cursor.fetchone()
return dict(row)
@router.put("/{source_id}", response_model=SourceResponse)
async def update_source(
source_id: int,
data: SourceUpdate,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Quelle bearbeiten."""
cursor = await db.execute("SELECT * FROM sources WHERE id = ?", (source_id,))
row = await cursor.fetchone()
if not row:
raise HTTPException(status_code=404, detail="Quelle nicht gefunden")
_check_source_ownership(dict(row), current_user["username"])
updates = {}
for field, value in data.model_dump(exclude_none=True).items():
if field not in SOURCE_UPDATE_COLUMNS:
continue
updates[field] = value
if not updates:
return dict(row)
set_clause = ", ".join(f"{k} = ?" for k in updates)
values = list(updates.values()) + [source_id]
await db.execute(f"UPDATE sources SET {set_clause} WHERE id = ?", values)
await db.commit()
cursor = await db.execute("SELECT * FROM sources WHERE id = ?", (source_id,))
row = await cursor.fetchone()
return dict(row)
@router.delete("/{source_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_source(
source_id: int,
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Quelle loeschen."""
cursor = await db.execute("SELECT * FROM sources WHERE id = ?", (source_id,))
row = await cursor.fetchone()
if not row:
raise HTTPException(status_code=404, detail="Quelle nicht gefunden")
_check_source_ownership(dict(row), current_user["username"])
await db.execute("DELETE FROM sources WHERE id = ?", (source_id,))
await db.commit()
@router.post("/refresh-counts")
async def trigger_refresh_counts(
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
"""Artikelzaehler fuer alle Quellen neu berechnen."""
await refresh_source_counts(db)
return {"status": "ok"}

102
src/rss_parser.py Normale Datei
Datei anzeigen

@@ -0,0 +1,102 @@
"""RSS-Feed Parser: Durchsucht vorkonfigurierte Feeds nach relevanten Meldungen."""
import asyncio
import logging
import feedparser
import httpx
from datetime import datetime, timezone
from config import RSS_FEEDS
logger = logging.getLogger("osint.rss")
class RSSParser:
"""Durchsucht RSS-Feeds nach relevanten Artikeln."""
# Stoppwörter die bei der RSS-Suche ignoriert werden
STOP_WORDS = {
"und", "oder", "der", "die", "das", "ein", "eine", "in", "im", "am", "an",
"auf", "für", "mit", "von", "zu", "zum", "zur", "bei", "nach", "vor",
"über", "unter", "ist", "sind", "hat", "the", "and", "for", "with", "from",
}
async def search_feeds(self, search_term: str) -> list[dict]:
"""Durchsucht alle konfigurierten RSS-Feeds nach einem Suchbegriff."""
all_articles = []
# Stoppwörter und kurze Wörter (< 3 Zeichen) filtern
search_words = [
w for w in search_term.lower().split()
if w not in self.STOP_WORDS and len(w) >= 3
]
if not search_words:
search_words = search_term.lower().split()[:2]
tasks = []
for category, feeds in RSS_FEEDS.items():
for feed_config in feeds:
tasks.append(self._fetch_feed(feed_config, search_words))
results = await asyncio.gather(*tasks, return_exceptions=True)
for result in results:
if isinstance(result, Exception):
logger.warning(f"Feed-Fehler: {result}")
continue
all_articles.extend(result)
logger.info(f"RSS-Suche nach '{search_term}': {len(all_articles)} Treffer")
return all_articles
async def _fetch_feed(self, feed_config: dict, search_words: list[str]) -> list[dict]:
"""Einzelnen RSS-Feed abrufen und durchsuchen."""
name = feed_config["name"]
url = feed_config["url"]
articles = []
try:
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
response = await client.get(url, headers={
"User-Agent": "OSINT-Monitor/1.0 (News Aggregator)"
})
response.raise_for_status()
feed = await asyncio.to_thread(feedparser.parse, response.text)
for entry in feed.entries[:50]:
title = entry.get("title", "")
summary = entry.get("summary", "")
text = f"{title} {summary}".lower()
# Prüfe ob mindestens ein Suchwort vorkommt
if any(word in text for word in search_words):
published = None
if hasattr(entry, "published_parsed") and entry.published_parsed:
try:
published = datetime(*entry.published_parsed[:6], tzinfo=timezone.utc).isoformat()
except (TypeError, ValueError):
pass
articles.append({
"headline": title,
"headline_de": title if self._is_german(title) else None,
"source": name,
"source_url": entry.get("link", ""),
"content_original": summary[:1000] if summary else None,
"content_de": summary[:1000] if summary and self._is_german(summary) else None,
"language": "de" if self._is_german(title) else "en",
"published_at": published,
})
except Exception as e:
logger.debug(f"Feed {name} ({url}): {e}")
return articles
def _is_german(self, text: str) -> bool:
"""Einfache Heuristik ob ein Text deutsch ist."""
german_words = {"der", "die", "das", "und", "ist", "von", "mit", "für", "auf", "ein",
"eine", "den", "dem", "des", "sich", "wird", "nach", "bei", "auch",
"über", "wie", "aus", "hat", "zum", "zur", "als", "noch", "mehr",
"nicht", "aber", "oder", "sind", "vor", "einem", "einer", "wurde"}
words = set(text.lower().split())
matches = words & german_words
return len(matches) >= 2

0
src/services/__init__.py Normale Datei
Datei anzeigen

Datei anzeigen

@@ -0,0 +1,117 @@
"""Lizenz-Verwaltung und -Pruefung."""
import logging
from datetime import datetime, timezone
from config import TIMEZONE
import aiosqlite
logger = logging.getLogger("osint.license")
async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
"""Prueft den Lizenzstatus einer Organisation.
Returns:
dict mit: valid, status, license_type, max_users, current_users, read_only, message
"""
# Organisation pruefen
cursor = await db.execute(
"SELECT id, name, is_active FROM organizations WHERE id = ?",
(organization_id,),
)
org = await cursor.fetchone()
if not org:
return {"valid": False, "status": "not_found", "read_only": True, "message": "Organisation nicht gefunden"}
if not org["is_active"]:
return {"valid": False, "status": "org_disabled", "read_only": True, "message": "Organisation deaktiviert"}
# Aktive Lizenz suchen
cursor = await db.execute(
"""SELECT * FROM licenses
WHERE organization_id = ? AND status = 'active'
ORDER BY created_at DESC LIMIT 1""",
(organization_id,),
)
license_row = await cursor.fetchone()
if not license_row:
return {"valid": False, "status": "no_license", "read_only": True, "message": "Keine aktive Lizenz"}
# Ablauf pruefen
now = datetime.now(timezone.utc)
valid_until = license_row["valid_until"]
if valid_until is not None:
try:
expiry = datetime.fromisoformat(valid_until)
if expiry.tzinfo is None:
expiry = expiry.replace(tzinfo=timezone.utc)
if now > expiry:
return {
"valid": False,
"status": "expired",
"license_type": license_row["license_type"],
"read_only": True,
"message": "Lizenz abgelaufen",
}
except (ValueError, TypeError):
pass
# Nutzerzahl pruefen
cursor = await db.execute(
"SELECT COUNT(*) as cnt FROM users WHERE organization_id = ? AND is_active = 1",
(organization_id,),
)
current_users = (await cursor.fetchone())["cnt"]
return {
"valid": True,
"status": license_row["status"],
"license_type": license_row["license_type"],
"max_users": license_row["max_users"],
"current_users": current_users,
"read_only": False,
"message": "Lizenz aktiv",
}
async def can_add_user(db: aiosqlite.Connection, organization_id: int) -> tuple[bool, str]:
"""Prueft ob ein neuer Nutzer hinzugefuegt werden kann (Nutzer-Limit).
Returns:
(erlaubt, grund)
"""
lic = await check_license(db, organization_id)
if not lic["valid"]:
return False, lic["message"]
if lic["current_users"] >= lic["max_users"]:
return False, f"Nutzer-Limit erreicht ({lic['current_users']}/{lic['max_users']})"
return True, ""
async def expire_licenses(db: aiosqlite.Connection):
"""Setzt abgelaufene Lizenzen auf 'expired'. Taeglich aufrufen."""
cursor = await db.execute(
"""SELECT id, organization_id FROM licenses
WHERE status = 'active'
AND valid_until IS NOT NULL
AND valid_until < datetime('now')"""
)
expired = await cursor.fetchall()
count = 0
for lic in expired:
await db.execute(
"UPDATE licenses SET status = 'expired' WHERE id = ?",
(lic["id"],),
)
count += 1
logger.info(f"Lizenz {lic['id']} fuer Org {lic['organization_id']} als abgelaufen markiert")
if count > 0:
await db.commit()
logger.info(f"{count} Lizenz(en) als abgelaufen markiert")
return count

680
src/source_rules.py Normale Datei
Datei anzeigen

@@ -0,0 +1,680 @@
"""Dynamische Quellen-Regeln aus der Datenbank."""
import logging
import re
import json
import asyncio
from urllib.parse import urlparse
import httpx
import feedparser
from config import CLAUDE_PATH, CLAUDE_TIMEOUT
logger = logging.getLogger("osint.source_rules")
# Domain -> Kategorie Mapping für Auto-Erkennung
DOMAIN_CATEGORY_MAP = {
# Nachrichtenagenturen
"reuters.com": "nachrichtenagentur",
"apnews.com": "nachrichtenagentur",
"dpa.com": "nachrichtenagentur",
"afp.com": "nachrichtenagentur",
# Öffentlich-Rechtlich
"tagesschau.de": "oeffentlich-rechtlich",
"zdf.de": "oeffentlich-rechtlich",
"dw.com": "oeffentlich-rechtlich",
"br.de": "oeffentlich-rechtlich",
"ndr.de": "oeffentlich-rechtlich",
"wdr.de": "oeffentlich-rechtlich",
"mdr.de": "oeffentlich-rechtlich",
"swr.de": "oeffentlich-rechtlich",
"hr.de": "oeffentlich-rechtlich",
"rbb24.de": "oeffentlich-rechtlich",
"ard.de": "oeffentlich-rechtlich",
"orf.at": "oeffentlich-rechtlich",
"srf.ch": "oeffentlich-rechtlich",
# Qualitätszeitungen
"spiegel.de": "qualitaetszeitung",
"zeit.de": "qualitaetszeitung",
"faz.net": "qualitaetszeitung",
"sueddeutsche.de": "qualitaetszeitung",
"nzz.ch": "qualitaetszeitung",
"welt.de": "qualitaetszeitung",
"tagesspiegel.de": "qualitaetszeitung",
"fr.de": "qualitaetszeitung",
"stern.de": "qualitaetszeitung",
"focus.de": "qualitaetszeitung",
# Behörden
"bmi.bund.de": "behoerde",
"europol.europa.eu": "behoerde",
"bka.de": "behoerde",
"bsi.bund.de": "behoerde",
"verfassungsschutz.de": "behoerde",
"bpb.de": "behoerde",
# Fachmedien
"netzpolitik.org": "fachmedien",
"handelsblatt.com": "fachmedien",
"heise.de": "fachmedien",
"golem.de": "fachmedien",
"t3n.de": "fachmedien",
"wiwo.de": "fachmedien",
# Think Tanks
"swp-berlin.org": "think-tank",
"iiss.org": "think-tank",
"brookings.edu": "think-tank",
"rand.org": "think-tank",
"dgap.org": "think-tank",
"chathamhouse.org": "think-tank",
# International
"bbc.co.uk": "international",
"bbc.com": "international",
"aljazeera.com": "international",
"france24.com": "international",
"cnn.com": "international",
"theguardian.com": "international",
"nytimes.com": "international",
"washingtonpost.com": "international",
"lemonde.fr": "international",
"elpais.com": "international",
# Regional
"berliner-zeitung.de": "regional",
"hamburger-abendblatt.de": "regional",
"stuttgarter-zeitung.de": "regional",
"ksta.de": "regional",
"rp-online.de": "regional",
"merkur.de": "regional",
}
# Bekannte Feed-Pfade zum Durchprobieren
_FEED_PATHS = ["/feed", "/rss", "/rss.xml", "/atom.xml", "/feed.xml", "/index.xml", "/feed/rss", "/feeds/posts/default"]
# Erweiterte nachrichtenspezifische Feed-Pfade für Multi-Discovery
_NEWS_FEED_PATHS = [
"/world/rss", "/world/rss.xml", "/world/feed",
"/politics/rss", "/politics/rss.xml", "/politics/feed",
"/business/rss", "/business/rss.xml", "/business/feed",
"/technology/rss", "/technology/rss.xml", "/technology/feed",
"/environment/rss", "/environment/rss.xml", "/environment/feed",
"/science/rss", "/science/rss.xml", "/science/feed",
"/europe/rss", "/europe/rss.xml", "/europe/feed",
"/security/rss", "/security/rss.xml", "/security/feed",
"/international/rss", "/international/rss.xml", "/international/feed",
"/economy/rss", "/economy/rss.xml", "/economy/feed",
"/defence/rss", "/defence/rss.xml", "/defence/feed",
"/middle-east/rss", "/middle-east/rss.xml",
"/asia/rss", "/asia/rss.xml",
"/africa/rss", "/africa/rss.xml",
"/americas/rss", "/americas/rss.xml",
"/uk-news/rss", "/us-news/rss",
"/commentisfree/rss", "/opinion/rss",
"/law/rss", "/media/rss",
"/global-development/rss",
"/news/feed", "/news/rss", "/news/rss.xml",
"/politik/rss", "/politik/rss.xml",
"/wirtschaft/rss", "/wirtschaft/rss.xml",
"/panorama/rss", "/panorama/rss.xml",
"/wissen/rss", "/wissen/rss.xml",
"/ausland/rss", "/ausland/rss.xml",
"/inland/rss", "/inland/rss.xml",
"/netzwelt/rss", "/netzwelt/rss.xml",
"/kultur/rss", "/kultur/rss.xml",
]
# Bekannte Feed-Subdomains für Portale die Feeds auf separater Domain hosten
_DOMAIN_FEED_URLS = {
"bbc.com": [
"https://feeds.bbci.co.uk/news/rss.xml",
"https://feeds.bbci.co.uk/news/world/rss.xml",
"https://feeds.bbci.co.uk/news/business/rss.xml",
"https://feeds.bbci.co.uk/news/politics/rss.xml",
"https://feeds.bbci.co.uk/news/technology/rss.xml",
"https://feeds.bbci.co.uk/news/science_and_environment/rss.xml",
"https://feeds.bbci.co.uk/news/health/rss.xml",
"https://feeds.bbci.co.uk/news/education/rss.xml",
"https://feeds.bbci.co.uk/news/world/middle_east/rss.xml",
"https://feeds.bbci.co.uk/news/world/europe/rss.xml",
"https://feeds.bbci.co.uk/news/world/africa/rss.xml",
"https://feeds.bbci.co.uk/news/world/asia/rss.xml",
"https://feeds.bbci.co.uk/news/world/us_and_canada/rss.xml",
"https://feeds.bbci.co.uk/news/world/latin_america/rss.xml",
"https://feeds.bbci.co.uk/news/entertainment_and_arts/rss.xml",
],
"bbc.co.uk": "bbc.com", # Alias
"reuters.com": [
"https://www.reutersagency.com/feed/",
],
"aljazeera.com": [
"https://www.aljazeera.com/xml/rss/all.xml",
],
}
def _get_extra_feed_urls(domain: str) -> list[str]:
"""Gibt bekannte Feed-URLs für Domains mit separater Feed-Subdomain zurück."""
entry = _DOMAIN_FEED_URLS.get(domain)
if isinstance(entry, str):
# Alias — auf andere Domain verweisen
entry = _DOMAIN_FEED_URLS.get(entry)
if isinstance(entry, list):
return entry
return []
def _normalize_url(url: str) -> str:
"""URL normalisieren (https:// ergänzen falls fehlend)."""
url = url.strip()
if not url.startswith(("http://", "https://")):
url = "https://" + url
return url
def _extract_domain(url: str) -> str:
"""Domain aus URL extrahieren (ohne www.)."""
parsed = urlparse(url)
domain = parsed.hostname or ""
if domain.startswith("www."):
domain = domain[4:]
return domain
def _detect_category(domain: str) -> str:
"""Kategorie anhand der Domain erkennen."""
if domain in DOMAIN_CATEGORY_MAP:
return DOMAIN_CATEGORY_MAP[domain]
# Subdomain-Match: z.B. feeds.reuters.com -> reuters.com
parts = domain.split(".")
if len(parts) > 2:
parent = ".".join(parts[-2:])
if parent in DOMAIN_CATEGORY_MAP:
return DOMAIN_CATEGORY_MAP[parent]
return "sonstige"
# Bekannte Domain → Anzeigename Zuordnungen
DOMAIN_DISPLAY_NAMES = {
"tagesschau.de": "tagesschau",
"zdf.de": "ZDF heute",
"spiegel.de": "Spiegel",
"zeit.de": "Zeit",
"newsfeed.zeit.de": "Zeit",
"faz.net": "FAZ",
"sueddeutsche.de": "Süddeutsche Zeitung",
"rss.sueddeutsche.de": "Süddeutsche Zeitung",
"nzz.ch": "NZZ",
"dw.com": "Deutsche Welle",
"rss.dw.com": "Deutsche Welle",
"reuters.com": "Reuters",
"reutersagency.com": "Reuters",
"rsshub.app": "RSSHub",
"apnews.com": "AP News",
"bbc.com": "BBC",
"bbc.co.uk": "BBC",
"feeds.bbci.co.uk": "BBC",
"aljazeera.com": "Al Jazeera",
"france24.com": "France24",
"theguardian.com": "The Guardian",
"nytimes.com": "New York Times",
"washingtonpost.com": "Washington Post",
"cnn.com": "CNN",
"bmi.bund.de": "BMI",
"europol.europa.eu": "Europol",
"handelsblatt.com": "Handelsblatt",
"wiwo.de": "WirtschaftsWoche",
"heise.de": "Heise Online",
"golem.de": "Golem",
"netzpolitik.org": "netzpolitik.org",
"t3n.de": "t3n",
"welt.de": "Welt",
"tagesspiegel.de": "Tagesspiegel",
"stern.de": "Stern",
"focus.de": "Focus",
"n-tv.de": "n-tv",
"bild.de": "BILD",
"tarnkappe.info": "Tarnkappe",
"bleepingcomputer.com": "BleepingComputer",
"techcrunch.com": "TechCrunch",
"theverge.com": "The Verge",
"wired.com": "WIRED",
"tomshardware.com": "Tom's Hardware",
"finanzen.net": "Finanzen.net",
"404media.co": "404 Media",
"medium.com": "Medium",
"swp-berlin.org": "SWP Berlin",
"dgap.org": "DGAP",
"brookings.edu": "Brookings",
"rand.org": "RAND",
"lemonde.fr": "Le Monde",
"elpais.com": "El País",
"orf.at": "ORF",
"srf.ch": "SRF",
"br.de": "BR",
"ndr.de": "NDR",
"wdr.de": "WDR",
"mdr.de": "MDR",
"swr.de": "SWR",
"hr.de": "hr",
"rbb24.de": "rbb24",
"fr.de": "Frankfurter Rundschau",
"rp-online.de": "Rheinische Post",
"ksta.de": "Kölner Stadt-Anzeiger",
"berliner-zeitung.de": "Berliner Zeitung",
"stuttgarter-zeitung.de": "Stuttgarter Zeitung",
"hamburger-abendblatt.de": "Hamburger Abendblatt",
"merkur.de": "Münchner Merkur",
"bsi.bund.de": "BSI",
"bpb.de": "bpb",
"bka.de": "BKA",
"verfassungsschutz.de": "Verfassungsschutz",
"bashinho.de": "Bashinho",
}
def domain_to_display_name(domain: str) -> str:
"""Wandelt eine Domain in einen lesbaren Anzeigenamen um.
Prüft erst die bekannte Zuordnung, dann leitet einen sinnvollen
Namen aus der Domain ab (erster Teil, kapitalisiert).
"""
if domain in DOMAIN_DISPLAY_NAMES:
return DOMAIN_DISPLAY_NAMES[domain]
# Subdomain-Match: feeds.reuters.com -> reuters.com
parts = domain.split(".")
if len(parts) > 2:
parent = ".".join(parts[-2:])
if parent in DOMAIN_DISPLAY_NAMES:
return DOMAIN_DISPLAY_NAMES[parent]
# Fallback: Domain-Kern extrahieren und kapitalisieren
# z.B. "example-news.de" → "Example News"
core = parts[-2] if len(parts) >= 2 else parts[0]
return core.replace("-", " ").title()
async def _validate_feed(client: httpx.AsyncClient, url: str) -> dict | None:
"""Prüft ob eine URL ein gültiger RSS/Atom-Feed ist. Gibt Feed-Info zurück oder None."""
try:
resp = await client.get(url)
if resp.status_code != 200:
return None
content_type = resp.headers.get("content-type", "")
text = resp.text[:10000] # Nur Anfang prüfen
# Muss XML-artig sein
if "<rss" not in text and "<feed" not in text and "<channel" not in text:
return None
feed = await asyncio.to_thread(feedparser.parse, text)
if feed.get("bozo") and not feed.entries:
return None
if feed.feed.get("title") or feed.entries:
return {
"url": str(resp.url), # Finale URL nach Redirects
"title": feed.feed.get("title", ""),
}
except Exception:
pass
return None
async def discover_source(url: str) -> dict:
"""Erkennt RSS-Feed, Name, Domain und Kategorie einer URL automatisch.
Returns:
dict mit: name, domain, rss_url, category, source_type
"""
url = _normalize_url(url)
domain = _extract_domain(url)
category = _detect_category(domain)
result = {
"name": domain_to_display_name(domain),
"domain": domain,
"rss_url": None,
"category": category,
"source_type": "web_source",
}
async with httpx.AsyncClient(
timeout=12.0,
follow_redirects=True,
headers={"User-Agent": "Mozilla/5.0 (compatible; OSINT-Monitor/1.0)"},
) as client:
# 1. Seite abrufen und nach RSS-Links suchen
page_title = None
try:
resp = await client.get(url)
if resp.status_code == 200:
html = resp.text[:50000]
# <title> extrahieren
title_match = re.search(r"<title[^>]*>([^<]+)</title>", html, re.IGNORECASE)
if title_match:
page_title = title_match.group(1).strip()
# RSS/Atom Link-Tags suchen
feed_links = re.findall(
r'<link[^>]+type=["\']application/(rss|atom)\+xml["\'][^>]*>',
html,
re.IGNORECASE,
)
# Auch umgekehrte Attribut-Reihenfolge
feed_links += re.findall(
r'<link[^>]+href=["\']([^"\']+)["\'][^>]+type=["\']application/(rss|atom)\+xml["\'][^>]*>',
html,
re.IGNORECASE,
)
# href aus den gefundenen Tags extrahieren
feed_urls = []
for tag in re.finditer(
r'<link[^>]+type=["\']application/(?:rss|atom)\+xml["\'][^>]*>',
html,
re.IGNORECASE,
):
href_match = re.search(r'href=["\']([^"\']+)["\']', tag.group(0))
if href_match:
href = href_match.group(1)
# Relative URLs auflösen
if href.startswith("/"):
parsed = urlparse(url)
href = f"{parsed.scheme}://{parsed.netloc}{href}"
elif not href.startswith("http"):
href = url.rstrip("/") + "/" + href
feed_urls.append(href)
# Gefundene Feed-URLs validieren
for feed_url in feed_urls:
feed_info = await _validate_feed(client, feed_url)
if feed_info:
result["rss_url"] = feed_info["url"]
result["source_type"] = "rss_feed"
if feed_info["title"]:
result["name"] = feed_info["title"]
elif page_title:
result["name"] = page_title
return result
except Exception as e:
logger.debug(f"Fehler beim Abrufen von {url}: {e}")
# 2. Bekannte Feed-Pfade durchprobieren
parsed = urlparse(url)
base_url = f"{parsed.scheme}://{parsed.netloc}"
for path in _FEED_PATHS:
feed_url = base_url + path
feed_info = await _validate_feed(client, feed_url)
if feed_info:
result["rss_url"] = feed_info["url"]
result["source_type"] = "rss_feed"
if feed_info["title"]:
result["name"] = feed_info["title"]
elif page_title:
result["name"] = page_title
return result
# Kein Feed gefunden — Name aus Seitentitel
if page_title:
result["name"] = page_title
return result
async def discover_all_feeds(url: str) -> dict:
"""Findet ALLE RSS/Atom-Feeds einer Domain.
Returns:
dict mit: domain, category, page_title, feeds: [{"url", "title"}, ...]
"""
url = _normalize_url(url)
domain = _extract_domain(url)
category = _detect_category(domain)
result = {
"domain": domain,
"category": category,
"page_title": None,
"feeds": [],
}
seen_urls = set()
async with httpx.AsyncClient(
timeout=15.0,
follow_redirects=True,
headers={"User-Agent": "Mozilla/5.0 (compatible; OSINT-Monitor/1.0)"},
) as client:
# 1. HTML-Seite abrufen und ALLE RSS-Link-Tags sammeln
candidate_urls = []
try:
resp = await client.get(url)
if resp.status_code == 200:
html = resp.text[:100000]
title_match = re.search(r"<title[^>]*>([^<]+)</title>", html, re.IGNORECASE)
if title_match:
result["page_title"] = title_match.group(1).strip()
parsed = urlparse(url)
base = f"{parsed.scheme}://{parsed.netloc}"
for tag in re.finditer(
r'<link[^>]+type=["\']application/(?:rss|atom)\+xml["\'][^>]*>',
html,
re.IGNORECASE,
):
href_match = re.search(r'href=["\']([^"\']+)["\']', tag.group(0))
if href_match:
href = href_match.group(1)
if href.startswith("/"):
href = base + href
elif not href.startswith("http"):
href = url.rstrip("/") + "/" + href
candidate_urls.append(href)
except Exception as e:
logger.debug(f"Fehler beim Abrufen von {url}: {e}")
# 2. Bekannte Feed-Pfade hinzufügen (Standard + Nachrichten-spezifisch)
parsed = urlparse(url)
base_url = f"{parsed.scheme}://{parsed.netloc}"
for path in _FEED_PATHS + _NEWS_FEED_PATHS:
candidate_urls.append(base_url + path)
# 2b. Bekannte Feed-URLs für Domains mit separater Feed-Subdomain (z.B. BBC)
extra_urls = _get_extra_feed_urls(domain)
candidate_urls.extend(extra_urls)
# 3. Alle Kandidaten parallel validieren (in Batches von 10)
async def _validate_and_collect(feed_url: str):
try:
return await _validate_feed(client, feed_url)
except Exception:
return None
for i in range(0, len(candidate_urls), 10):
batch = candidate_urls[i:i + 10]
results = await asyncio.gather(*[_validate_and_collect(u) for u in batch])
for feed_info in results:
if feed_info and feed_info["url"] not in seen_urls:
seen_urls.add(feed_info["url"])
result["feeds"].append(feed_info)
logger.info(f"discover_all_feeds({domain}): {len(result['feeds'])} Feeds gefunden")
return result
async def evaluate_feeds_with_claude(domain: str, feeds: list[dict]) -> list[dict]:
"""Lässt Claude die OSINT-Relevanz der Feeds bewerten.
Args:
domain: Domain-Name
feeds: Liste von {"url", "title"} Dicts
Returns:
Liste von {"url", "title", "name"} Dicts (nur relevante Feeds)
"""
if not feeds:
return []
feed_list = "\n".join(
f" {i+1}. {f['title'] or f['url']}{f['url']}"
for i, f in enumerate(feeds)
)
prompt = f"""Du bist ein OSINT-Analyst. Bewerte diese RSS-Feeds der Domain "{domain}" nach OSINT-Relevanz.
OSINT-relevante Themen: Politik, Sicherheit, Wirtschaft, Internationale Beziehungen, Verteidigung, Konflikte, Terrorismus, Cybersecurity, Umweltkatastrophen, Technologie, Wissenschaft, Nachrichten allgemein.
NICHT relevant: Sport, Lifestyle, Rezepte, Unterhaltung, Reisen, Mode, Kultur/Kunst, Wetter, Kreuzworträtsel, Podcasts (allgemein), Leserbriefe, Kommentare/Meinung.
Feeds:
{feed_list}
Antworte AUSSCHLIESSLICH mit einem JSON-Array. Jedes Element:
{{"index": <1-basiert>, "relevant": true/false, "name": "<Anzeigename für OSINT-Monitor, z.B. 'Guardian World' oder 'Spiegel Politik'>"}}
Nur das JSON-Array, kein anderer Text."""
try:
cmd = [
CLAUDE_PATH,
"-p", prompt,
"--output-format", "text",
]
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
env={"PATH": "/usr/local/bin:/usr/bin:/bin", "HOME": "/home/claude-dev"},
)
try:
stdout, stderr = await asyncio.wait_for(
process.communicate(), timeout=min(CLAUDE_TIMEOUT, 120)
)
except asyncio.TimeoutError:
process.kill()
logger.warning(f"Claude-Bewertung Timeout für {domain}, nutze Fallback")
return _fallback_all_feeds(domain, feeds)
if process.returncode != 0:
logger.warning(f"Claude-Bewertung fehlgeschlagen für {domain}, nutze Fallback")
return _fallback_all_feeds(domain, feeds)
response = stdout.decode("utf-8", errors="replace").strip()
# JSON aus Antwort extrahieren (Claude gibt manchmal Markdown-Blöcke zurück)
json_match = re.search(r'\[.*\]', response, re.DOTALL)
if not json_match:
logger.warning(f"Kein JSON in Claude-Antwort für {domain}, nutze Fallback")
return _fallback_all_feeds(domain, feeds)
evaluations = json.loads(json_match.group(0))
relevant = []
for ev in evaluations:
idx = ev.get("index", 0) - 1
if ev.get("relevant") and 0 <= idx < len(feeds):
feed = feeds[idx]
relevant.append({
"url": feed["url"],
"title": feed["title"],
"name": ev.get("name", feed["title"] or domain),
})
logger.info(f"Claude-Bewertung für {domain}: {len(relevant)}/{len(feeds)} relevant")
return relevant
except json.JSONDecodeError:
logger.warning(f"JSON-Parse-Fehler bei Claude-Antwort für {domain}, nutze Fallback")
return _fallback_all_feeds(domain, feeds)
except Exception as e:
logger.warning(f"Claude-Bewertung Fehler für {domain}: {e}, nutze Fallback")
return _fallback_all_feeds(domain, feeds)
def _fallback_all_feeds(domain: str, feeds: list[dict]) -> list[dict]:
"""Fallback: Alle Feeds übernehmen mit Feed-Titel als Name."""
return [
{
"url": f["url"],
"title": f["title"],
"name": f["title"] or domain,
}
for f in feeds
]
async def get_feeds_with_metadata(tenant_id: int = None) -> list[dict]:
"""Alle aktiven RSS-Feeds mit Metadaten fuer Claude-Selektion (global + org-spezifisch)."""
from database import get_db
db = await get_db()
try:
if tenant_id:
cursor = await db.execute(
"SELECT name, url, domain, category FROM sources "
"WHERE source_type = 'rss_feed' AND status = 'active' "
"AND (tenant_id IS NULL OR tenant_id = ?)",
(tenant_id,),
)
else:
cursor = await db.execute(
"SELECT name, url, domain, category FROM sources "
"WHERE source_type = 'rss_feed' AND status = 'active'"
)
return [dict(row) for row in await cursor.fetchall()]
except Exception as e:
logger.error(f"Fehler beim Laden der Feed-Metadaten: {e}")
return []
finally:
await db.close()
async def get_source_rules(tenant_id: int = None) -> dict:
"""Liest Quellen-Konfiguration aus DB (global + org-spezifisch).
Returns:
dict mit:
- excluded_domains: Liste ausgeschlossener Domains
- rss_feeds: Dict mit Kategorien deutsch/international/behoerden
"""
from database import get_db
db = await get_db()
try:
if tenant_id:
cursor = await db.execute(
"SELECT * FROM sources WHERE status = 'active' AND (tenant_id IS NULL OR tenant_id = ?)",
(tenant_id,),
)
else:
cursor = await db.execute(
"SELECT * FROM sources WHERE status = 'active'"
)
sources = [dict(row) for row in await cursor.fetchall()]
excluded_domains = []
rss_feeds = {"deutsch": [], "international": [], "behoerden": []}
for source in sources:
if source["source_type"] == "excluded":
excluded_domains.append(source["domain"] or source["name"])
elif source["source_type"] == "rss_feed" and source["url"]:
feed_entry = {"name": source["name"], "url": source["url"]}
cat = source["category"]
if cat == "behoerde":
rss_feeds["behoerden"].append(feed_entry)
elif cat == "international":
rss_feeds["international"].append(feed_entry)
else:
# Alle anderen Kategorien → deutsch
rss_feeds["deutsch"].append(feed_entry)
return {
"excluded_domains": excluded_domains,
"rss_feeds": rss_feeds,
}
except Exception as e:
logger.error(f"Fehler beim Laden der Quellen-Regeln: {e}")
# Fallback auf config.py
from config import RSS_FEEDS, EXCLUDED_SOURCES
return {
"excluded_domains": list(EXCLUDED_SOURCES),
"rss_feeds": dict(RSS_FEEDS),
}
finally:
await db.close()

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 8.6 KiB

444
src/static/components.js Normale Datei
Datei anzeigen

@@ -0,0 +1,444 @@
/**
* UI-Komponenten für das Dashboard.
*/
const UI = {
/**
* Sidebar-Eintrag für eine Lage rendern.
*/
renderIncidentItem(incident, isActive) {
const isRefreshing = App._refreshingIncidents && App._refreshingIncidents.has(incident.id);
const dotClass = isRefreshing ? 'refreshing' : (incident.status === 'active' ? 'active' : 'archived');
const activeClass = isActive ? 'active' : '';
const creator = incident.created_by_username || '';
return `
<div class="incident-item ${activeClass}" data-id="${incident.id}" onclick="App.selectIncident(${incident.id})" role="button" tabindex="0">
<span class="incident-dot ${dotClass}" id="dot-${incident.id}"></span>
<div style="flex:1;min-width:0;">
<div class="incident-name">${this.escape(incident.title)}</div>
<div class="incident-meta">${incident.article_count} Artikel &middot; ${this.escape(creator)}</div>
</div>
${incident.visibility === 'private' ? '<span class="badge badge-private" style="font-size:9px;">PRIVAT</span>' : ''}
${incident.type === 'research' ? '<span class="badge badge-research" style="font-size:9px;">RECH</span>' : ''}
${incident.refresh_mode === 'auto' ? '<span class="badge badge-auto" style="font-size:9px;">AUTO</span>' : ''}
</div>
`;
},
/**
* Faktencheck-Eintrag rendern.
*/
factCheckLabels: {
confirmed: 'Bestätigt durch mehrere Quellen',
unconfirmed: 'Nicht unabhängig bestätigt',
contradicted: 'Widerlegt',
developing: 'Faktenlage noch im Fluss',
established: 'Gesicherter Fakt (3+ Quellen)',
disputed: 'Umstrittener Sachverhalt',
unverified: 'Nicht unabhängig verifizierbar',
},
factCheckChipLabels: {
confirmed: 'Bestätigt',
unconfirmed: 'Unbestätigt',
contradicted: 'Widerlegt',
developing: 'Unklar',
established: 'Gesichert',
disputed: 'Umstritten',
unverified: 'Ungeprüft',
},
factCheckIcons: {
confirmed: '&#10003;',
unconfirmed: '?',
contradicted: '&#10007;',
developing: '&#8635;',
established: '&#10003;',
disputed: '&#9888;',
unverified: '?',
},
/**
* Faktencheck-Filterleiste rendern.
*/
renderFactCheckFilters(factchecks) {
// Welche Stati kommen tatsächlich vor + Zähler
const statusCounts = {};
factchecks.forEach(fc => {
statusCounts[fc.status] = (statusCounts[fc.status] || 0) + 1;
});
const statusOrder = ['confirmed', 'established', 'developing', 'unconfirmed', 'unverified', 'disputed', 'contradicted'];
const usedStatuses = statusOrder.filter(s => statusCounts[s]);
if (usedStatuses.length <= 1) return '';
const items = usedStatuses.map(status => {
const icon = this.factCheckIcons[status] || '?';
const chipLabel = this.factCheckChipLabels[status] || status;
const count = statusCounts[status];
return `<label class="fc-dropdown-item" data-status="${status}">
<input type="checkbox" checked onchange="App.toggleFactCheckFilter('${status}')">
<span class="factcheck-icon ${status}">${icon}</span>
<span class="fc-dropdown-label">${chipLabel}</span>
<span class="fc-dropdown-count">${count}</span>
</label>`;
}).join('');
return `<button class="fc-dropdown-toggle" onclick="App.toggleFcDropdown(event)">Filter</button>
<div class="fc-dropdown-menu" id="fc-dropdown-menu">${items}</div>`;
},
renderFactCheck(fc) {
const urls = (fc.evidence || '').match(/https?:\/\/[^\s,)]+/g) || [];
const count = urls.length;
return `
<div class="factcheck-item" data-fc-status="${fc.status}">
<div class="factcheck-icon ${fc.status}" title="${this.factCheckLabels[fc.status] || fc.status}" aria-hidden="true">${this.factCheckIcons[fc.status] || '?'}</div>
<span class="sr-only">${this.factCheckLabels[fc.status] || fc.status}</span>
<div style="flex:1;">
<div class="factcheck-claim">${this.escape(fc.claim)}</div>
<div style="display:flex;align-items:center;gap:6px;margin-top:2px;">
<span class="factcheck-sources">${count} Quelle${count !== 1 ? 'n' : ''}</span>
</div>
<div class="evidence-block">${this.renderEvidence(fc.evidence || '')}</div>
</div>
</div>
`;
},
/**
* Evidence mit erklärenden Text UND Quellen-Chips rendern.
*/
renderEvidence(text) {
if (!text) return '<span class="evidence-empty">Keine Belege</span>';
const urls = text.match(/https?:\/\/[^\s,)]+/g) || [];
if (urls.length === 0) {
return `<span class="evidence-text">${this.escape(text)}</span>`;
}
// Erklärenden Text extrahieren (URLs entfernen)
let explanation = text;
urls.forEach(url => { explanation = explanation.replace(url, '').trim(); });
// Aufräumen: Klammern, mehrfache Kommas/Leerzeichen
explanation = explanation.replace(/\(\s*\)/g, '');
explanation = explanation.replace(/,\s*,/g, ',');
explanation = explanation.replace(/\s+/g, ' ').trim();
explanation = explanation.replace(/[,.:;]+$/, '').trim();
// Chips für jede URL
const chips = urls.map(url => {
let label;
try { label = new URL(url).hostname.replace('www.', ''); } catch { label = url; }
return `<a href="${this.escape(url)}" target="_blank" rel="noopener" class="evidence-chip" title="${this.escape(url)}">${this.escape(label)}</a>`;
}).join('');
const explanationHtml = explanation
? `<span class="evidence-text">${this.escape(explanation)}</span>`
: '';
return `${explanationHtml}<div class="evidence-chips">${chips}</div>`;
},
/**
* Verifizierungs-Badge.
*/
verificationBadge(status) {
const map = {
verified: { class: 'badge-verified', text: 'Verifiziert' },
unverified: { class: 'badge-unverified', text: 'Offen' },
contradicted: { class: 'badge-contradicted', text: 'Widerlegt' },
};
const badge = map[status] || map.unverified;
return `<span class="badge ${badge.class}">${badge.text}</span>`;
},
/**
* Toast-Benachrichtigung anzeigen.
*/
showToast(message, type = 'info', duration = 5000) {
const container = document.getElementById('toast-container');
const toast = document.createElement('div');
toast.className = `toast toast-${type}`;
toast.setAttribute('role', 'status');
toast.innerHTML = `<span class="toast-text">${this.escape(message)}</span>`;
container.appendChild(toast);
setTimeout(() => {
toast.style.opacity = '0';
toast.style.transform = 'translateX(100%)';
toast.style.transition = 'all 0.3s ease';
setTimeout(() => toast.remove(), 300);
}, duration);
},
/**
* Fortschrittsanzeige einblenden und Status setzen.
*/
showProgress(status) {
const bar = document.getElementById('progress-bar');
if (!bar) return;
bar.style.display = 'block';
const steps = {
queued: { active: 0, label: 'In Warteschlange...' },
researching: { active: 1, label: 'Recherchiert Quellen...' },
deep_researching: { active: 1, label: 'Tiefenrecherche läuft...' },
analyzing: { active: 2, label: 'Analysiert Meldungen...' },
factchecking: { active: 3, label: 'Faktencheck läuft...' },
};
const step = steps[status] || steps.queued;
const stepIds = ['step-researching', 'step-analyzing', 'step-factchecking'];
stepIds.forEach((id, i) => {
const el = document.getElementById(id);
if (!el) return;
el.className = 'progress-step';
if (i + 1 < step.active) el.classList.add('done');
else if (i + 1 === step.active) el.classList.add('active');
});
const fill = document.getElementById('progress-fill');
const percent = step.active === 0 ? 5 : Math.round((step.active / 3) * 100);
if (fill) {
fill.style.width = percent + '%';
}
// ARIA-Werte auf der Progressbar aktualisieren
bar.setAttribute('aria-valuenow', String(percent));
bar.setAttribute('aria-valuetext', step.label);
const label = document.getElementById('progress-label');
if (label) label.textContent = step.label;
},
/**
* Fortschrittsanzeige ausblenden.
*/
hideProgress() {
const bar = document.getElementById('progress-bar');
if (bar) bar.style.display = 'none';
},
/**
* Zusammenfassung mit Inline-Zitaten und Quellenverzeichnis rendern.
*/
renderSummary(summary, sourcesJson, incidentType) {
if (!summary) return '<span style="color:var(--text-tertiary);">Noch keine Zusammenfassung.</span>';
let sources = [];
try { sources = JSON.parse(sourcesJson || '[]'); } catch(e) {}
// Markdown-Rendering
let html = this.escape(summary);
// ## Überschriften
html = html.replace(/^## (.+)$/gm, '<h3 class="briefing-heading">$1</h3>');
// **Fettdruck**
html = html.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>');
// Listen (- Item)
html = html.replace(/^- (.+)$/gm, '<li>$1</li>');
html = html.replace(/(<li>.*<\/li>\n?)+/gs, '<ul>$&</ul>');
// Zeilenumbrüche (aber nicht nach Headings/Listen)
html = html.replace(/\n(?!<)/g, '<br>');
// Überflüssige <br> nach Block-Elementen entfernen + doppelte <br> zusammenfassen
html = html.replace(/<\/h3>(<br>)+/g, '</h3>');
html = html.replace(/<\/ul>(<br>)+/g, '</ul>');
html = html.replace(/(<br>){2,}/g, '<br>');
// Inline-Zitate [1], [2] etc. als klickbare Links rendern
if (sources.length > 0) {
html = html.replace(/\[(\d+)\]/g, (match, num) => {
const src = sources.find(s => s.nr === parseInt(num));
if (src && src.url) {
return `<a href="${this.escape(src.url)}" target="_blank" rel="noopener" class="citation" title="${this.escape(src.name)}">[${num}]</a>`;
}
return match;
});
}
return `<div class="briefing-content">${html}</div>`;
},
/**
* Quellenübersicht für eine Lage rendern.
*/
renderSourceOverview(articles) {
if (!articles || articles.length === 0) return '';
// Nach Quelle aggregieren
const sourceMap = {};
articles.forEach(a => {
const name = a.source || 'Unbekannt';
if (!sourceMap[name]) {
sourceMap[name] = { count: 0, languages: new Set(), urls: [] };
}
sourceMap[name].count++;
sourceMap[name].languages.add(a.language || 'de');
if (a.source_url) sourceMap[name].urls.push(a.source_url);
});
const sources = Object.entries(sourceMap)
.sort((a, b) => b[1].count - a[1].count);
// Sprach-Statistik
const langCount = {};
articles.forEach(a => {
const lang = (a.language || 'de').toUpperCase();
langCount[lang] = (langCount[lang] || 0) + 1;
});
const langChips = Object.entries(langCount)
.sort((a, b) => b[1] - a[1])
.map(([lang, count]) => `<span class="source-lang-chip">${lang} <strong>${count}</strong></span>`)
.join('');
let html = `<div class="source-overview-header">`;
html += `<span class="source-overview-stat">${articles.length} Artikel aus ${sources.length} Quellen</span>`;
html += `<div class="source-lang-chips">${langChips}</div>`;
html += `</div>`;
html += '<div class="source-overview-grid">';
sources.forEach(([name, data]) => {
const langs = [...data.languages].map(l => l.toUpperCase()).join('/');
html += `<div class="source-overview-item">
<span class="source-overview-name">${this.escape(name)}</span>
<span class="source-overview-lang">${langs}</span>
<span class="source-overview-count">${data.count}</span>
</div>`;
});
html += '</div>';
return html;
},
/**
* Kategorie-Labels.
*/
_categoryLabels: {
'nachrichtenagentur': 'Agentur',
'oeffentlich-rechtlich': 'ÖR',
'qualitaetszeitung': 'Qualität',
'behoerde': 'Behörde',
'fachmedien': 'Fach',
'think-tank': 'Think Tank',
'international': 'Intl.',
'regional': 'Regional',
'sonstige': 'Sonstige',
},
/**
* Domain-Gruppe rendern (aufklappbar mit Feeds).
*/
renderSourceGroup(domain, feeds, isExcluded, excludedNotes) {
const catLabel = this._categoryLabels[feeds[0]?.category] || feeds[0]?.category || '';
const feedCount = feeds.filter(f => f.source_type !== 'excluded').length;
const hasMultiple = feedCount > 1;
const displayName = domain || feeds[0]?.name || 'Unbekannt';
const escapedDomain = this.escape(domain);
if (isExcluded) {
// Gesperrte Domain
const notesHtml = excludedNotes ? ` <span class="source-group-notes">${this.escape(excludedNotes)}</span>` : '';
return `<div class="source-group">
<div class="source-group-header excluded">
<div class="source-group-info">
<span class="source-group-name">${this.escape(displayName)}</span>${notesHtml}
</div>
<span class="source-excluded-badge">Gesperrt</span>
<div class="source-group-actions">
<button class="btn btn-small btn-secondary" onclick="App.unblockDomain('${escapedDomain}')">Entsperren</button>
<button class="source-delete-btn" onclick="App.deleteDomain('${escapedDomain}')" title="Löschen" aria-label="Löschen">&times;</button>
</div>
</div>
</div>`;
}
// Aktive Domain-Gruppe
const toggleAttr = hasMultiple ? `onclick="App.toggleGroup('${escapedDomain}')" role="button" tabindex="0" aria-expanded="false"` : '';
const toggleIcon = hasMultiple ? '<span class="source-group-toggle" aria-hidden="true">&#9654;</span>' : '<span class="source-group-toggle-placeholder"></span>';
let feedRows = '';
if (hasMultiple) {
const realFeeds = feeds.filter(f => f.source_type !== 'excluded');
feedRows = `<div class="source-group-feeds" data-domain="${escapedDomain}">`;
realFeeds.forEach((feed, i) => {
const isLast = i === realFeeds.length - 1;
const connector = isLast ? '\u2514\u2500' : '\u251C\u2500';
const typeLabel = feed.source_type === 'rss_feed' ? 'RSS' : 'Web';
const urlDisplay = feed.url ? this._shortenUrl(feed.url) : '';
feedRows += `<div class="source-feed-row">
<span class="source-feed-connector">${connector}</span>
<span class="source-feed-name">${this.escape(feed.name)}</span>
<span class="source-type-badge type-${feed.source_type}">${typeLabel}</span>
<span class="source-feed-url" title="${this.escape(feed.url || '')}">${this.escape(urlDisplay)}</span>
<button class="source-delete-btn" onclick="App.deleteSingleFeed(${feed.id})" title="Löschen" aria-label="Löschen">&times;</button>
</div>`;
});
feedRows += '</div>';
}
const feedCountBadge = feedCount > 0
? `<span class="source-feed-count">${feedCount} Feed${feedCount !== 1 ? 's' : ''}</span>`
: '';
return `<div class="source-group">
<div class="source-group-header" ${toggleAttr}>
${toggleIcon}
<div class="source-group-info">
<span class="source-group-name">${this.escape(displayName)}</span>
</div>
<span class="source-category-badge cat-${feeds[0]?.category || 'sonstige'}">${catLabel}</span>
${feedCountBadge}
<div class="source-group-actions" onclick="event.stopPropagation()">
<button class="btn btn-small btn-secondary" onclick="App.blockDomainDirect('${escapedDomain}')">Sperren</button>
<button class="source-delete-btn" onclick="App.deleteDomain('${escapedDomain}')" title="Löschen" aria-label="Löschen">&times;</button>
</div>
</div>
${feedRows}
</div>`;
},
/**
* URL kürzen für die Anzeige in Feed-Zeilen.
*/
_shortenUrl(url) {
try {
const u = new URL(url);
let path = u.pathname;
if (path.length > 40) path = path.substring(0, 37) + '...';
return u.hostname + path;
} catch {
return url.length > 50 ? url.substring(0, 47) + '...' : url;
}
},
/**
* URLs in Evidence-Text als kompakte Hostname-Chips rendern (Legacy-Fallback).
*/
renderEvidenceChips(text) {
return this.renderEvidence(text);
},
/**
* URLs in Evidence-Text als klickbare Links rendern (Legacy).
*/
linkifyEvidence(text) {
if (!text) return '';
const escaped = this.escape(text);
return escaped.replace(
/(https?:\/\/[^\s,)]+)/g,
'<a href="$1" target="_blank" rel="noopener">$1</a>'
);
},
/**
* HTML escapen.
*/
escape(str) {
if (!str) return '';
const div = document.createElement('div');
div.textContent = str;
return div.innerHTML;
},
};

4027
src/static/css/style.css Normale Datei

Datei-Diff unterdrückt, da er zu groß ist Diff laden

541
src/static/dashboard.html Normale Datei
Datei anzeigen

@@ -0,0 +1,541 @@
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script>(function(){var t=localStorage.getItem('osint_theme');if(t)document.documentElement.setAttribute('data-theme',t);try{var a=JSON.parse(localStorage.getItem('osint_a11y')||'{}');Object.keys(a).forEach(function(k){if(a[k])document.documentElement.setAttribute('data-a11y-'+k,'true');});}catch(e){}})()</script>
<link rel="icon" type="image/png" sizes="32x32" href="/static/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/static/favicon-16x16.png">
<link rel="apple-touch-icon" sizes="180x180" href="/static/apple-touch-icon.png">
<link rel="shortcut icon" href="/static/favicon.ico">
<title>AegisSight Monitor</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@400;500;600;700&display=swap" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/gridstack@12/dist/gridstack.min.css" rel="stylesheet">
<link rel="stylesheet" href="/static/css/style.css?v=20260304b">
</head>
<body>
<a href="#main-content" class="skip-link">Zum Hauptinhalt springen</a>
<div class="dashboard">
<!-- Header -->
<header class="header">
<div class="header-left">
<div class="header-logo">Aegis<span>Sight</span> Monitor</div>
<h1 class="sr-only">AegisSight Monitor Dashboard</h1>
</div>
<div class="header-right">
<button class="btn btn-secondary btn-small theme-toggle-btn" id="theme-toggle" onclick="ThemeManager.toggle()" title="Theme wechseln" aria-label="Theme wechseln">&#9788;</button>
<span class="header-user" id="header-user"></span>
<button class="btn btn-secondary btn-small" id="logout-btn">Abmelden</button>
</div>
</header>
<!-- Sidebar -->
<nav class="sidebar" aria-label="Seitenleiste">
<div class="sidebar-section">
<button class="btn btn-primary btn-full btn-small" id="new-incident-btn">+ Neue Lage / Recherche</button>
</div>
<div class="sidebar-filter">
<button class="sidebar-filter-btn active" data-filter="all" onclick="App.setSidebarFilter('all')" aria-pressed="true">Alle</button>
<button class="sidebar-filter-btn" data-filter="mine" onclick="App.setSidebarFilter('mine')" aria-pressed="false">Eigene</button>
</div>
<div class="sidebar-section">
<h2 class="sidebar-section-title collapsible" onclick="App.toggleSidebarSection('active-incidents')" role="button" tabindex="0">
<span class="sidebar-chevron" id="chevron-active-incidents">&#9662;</span>
Aktive Lagen
<span class="sidebar-section-count" id="count-active-incidents"></span>
</h2>
<div id="active-incidents" aria-live="polite"></div>
</div>
<div class="sidebar-section">
<h2 class="sidebar-section-title collapsible" onclick="App.toggleSidebarSection('active-research')" role="button" tabindex="0">
<span class="sidebar-chevron" id="chevron-active-research">&#9662;</span>
Aktive Recherchen
<span class="sidebar-section-count" id="count-active-research"></span>
</h2>
<div id="active-research" aria-live="polite"></div>
</div>
<div class="sidebar-section">
<h2 class="sidebar-section-title collapsible" onclick="App.toggleSidebarSection('archived-incidents')" role="button" tabindex="0">
<span class="sidebar-chevron" id="chevron-archived-incidents">&#9662;</span>
Archiv
<span class="sidebar-section-count" id="count-archived-incidents"></span>
</h2>
<div id="archived-incidents" aria-live="polite"></div>
</div>
<div class="sidebar-sources-link">
<button class="btn btn-secondary btn-full btn-small" onclick="App.openSourceManagement()">Quellen verwalten</button>
<button class="btn btn-secondary btn-full btn-small sidebar-feedback-btn" onclick="App.openFeedback()">Feedback senden</button>
<div class="sidebar-stats-mini">
<span id="stat-sources-count">0 Quellen</span> &middot; <span id="stat-articles-count">0 Artikel</span>
</div>
</div>
</nav>
<!-- Main Content -->
<main class="main-content" id="main-content">
<div class="empty-state" id="empty-state">
<div class="empty-state-icon">&#9737;</div>
<div class="empty-state-title">Kein Vorfall ausgewählt</div>
<div class="empty-state-text">Erstelle eine neue Lage oder wähle einen bestehenden Vorfall aus der Seitenleiste.</div>
</div>
<!-- Lagebild (hidden by default) -->
<div id="incident-view" style="display:none;">
<!-- Header Strip -->
<div class="incident-header-strip" id="incident-header-strip">
<div class="incident-header-row0">
<span class="incident-type-label" id="incident-type-badge"></span>
<span class="auto-refresh-indicator" id="meta-refresh-mode"></span>
</div>
<div class="incident-header-row1">
<div class="incident-header-left">
<h2 class="incident-header-title" id="incident-title"></h2>
</div>
<div class="incident-header-actions">
<button class="btn btn-primary btn-small" id="refresh-btn">Aktualisieren</button>
<button class="btn btn-secondary btn-small" id="edit-incident-btn">Bearbeiten</button>
<div class="export-dropdown" id="export-dropdown">
<button class="btn btn-secondary btn-small" onclick="App.toggleExportDropdown(event)">Exportieren &#9662;</button>
<div class="export-dropdown-menu" id="export-dropdown-menu">
<button class="export-dropdown-item" onclick="App.exportIncident('md','report')">Lagebericht (Markdown)</button>
<button class="export-dropdown-item" onclick="App.exportIncident('json','report')">Lagebericht (JSON)</button>
<hr class="export-dropdown-divider">
<button class="export-dropdown-item" onclick="App.exportIncident('md','full')">Vollexport (Markdown)</button>
<button class="export-dropdown-item" onclick="App.exportIncident('json','full')">Vollexport (JSON)</button>
<hr class="export-dropdown-divider">
<button class="export-dropdown-item" onclick="App.printIncident()">Drucken / PDF</button>
</div>
</div>
<button class="btn btn-secondary btn-small" id="archive-incident-btn">Archivieren</button>
<button class="btn btn-danger btn-small" id="delete-incident-btn">Löschen</button>
</div>
</div>
<div class="incident-header-row2">
<div class="incident-header-row2-left">
<span class="incident-creator-badge">von <strong id="incident-creator"></strong></span>
<span class="intl-badge" id="intl-badge"></span>
<span id="incident-description" class="incident-description-text"></span>
</div>
<div class="incident-header-row2-right">
<div class="summary-meta" id="summary-meta">
<span id="meta-updated" class="meta-updated-link" role="button" tabindex="0" onclick="App.toggleRefreshHistory()" onkeydown="if(event.key==='Enter')App.toggleRefreshHistory()"></span>
</div>
<div class="refresh-history-popover" id="refresh-history-popover" style="display:none;">
<div class="refresh-history-header">
<span class="refresh-history-title">Refresh-Verlauf</span>
<button class="refresh-history-close" onclick="App.closeRefreshHistory()">&times;</button>
</div>
<div class="refresh-history-list" id="refresh-history-list">
<div style="padding:12px;color:var(--text-disabled);font-size:12px;">Lade...</div>
</div>
</div>
</div>
</div>
</div>
<!-- Fortschrittsanzeige -->
<div class="progress-bar" id="progress-bar" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" aria-label="Verarbeitungsfortschritt" style="display:none;">
<div class="progress-steps">
<div class="progress-step" id="step-researching">
<div class="progress-step-dot"></div>
<span>Recherche</span>
</div>
<div class="progress-step" id="step-analyzing">
<div class="progress-step-dot"></div>
<span>Analyse</span>
</div>
<div class="progress-step" id="step-factchecking">
<div class="progress-step-dot"></div>
<span>Faktencheck</span>
</div>
</div>
<div class="progress-track">
<div class="progress-fill" id="progress-fill"></div>
</div>
<div class="progress-label-container">
<span id="progress-label" class="progress-label">Warte auf Start...</span>
<span id="progress-timer" class="progress-timer"></span>
</div>
<button id="progress-cancel-btn" class="progress-cancel-btn" onclick="App.cancelRefresh()">Abbrechen</button>
</div>
<!-- Layout-Toolbar -->
<div class="layout-toolbar" id="layout-toolbar" style="display:none;">
<div class="layout-toggles">
<button class="layout-toggle-btn active" data-tile="lagebild" onclick="LayoutManager.toggleTile('lagebild')" aria-pressed="true">Lagebild</button>
<button class="layout-toggle-btn active" data-tile="faktencheck" onclick="LayoutManager.toggleTile('faktencheck')" aria-pressed="true">Faktencheck</button>
<button class="layout-toggle-btn active" data-tile="quellen" onclick="LayoutManager.toggleTile('quellen')" aria-pressed="true">Quellen</button>
<button class="layout-toggle-btn active" data-tile="timeline" onclick="LayoutManager.toggleTile('timeline')" aria-pressed="true">Timeline</button>
</div>
<button class="btn btn-secondary btn-small" onclick="LayoutManager.reset()">Layout zurücksetzen</button>
</div>
<!-- gridstack Dashboard-Grid -->
<div class="grid-stack">
<div class="grid-stack-item" gs-id="lagebild" gs-x="0" gs-y="0" gs-w="6" gs-h="4" gs-min-w="4" gs-min-h="4">
<div class="grid-stack-item-content">
<div class="card incident-analysis-summary">
<div class="card-header">
<div class="card-title clickable" onclick="openContentModal('Lagebild', 'summary-content')">Lagebild</div>
<span class="lagebild-timestamp" id="lagebild-timestamp"></span>
</div>
<div id="summary-content">
<div id="summary-text" class="summary-text"></div>
</div>
</div>
</div>
</div>
<div class="grid-stack-item" gs-id="faktencheck" gs-x="6" gs-y="0" gs-w="6" gs-h="4" gs-min-w="4" gs-min-h="4">
<div class="grid-stack-item-content">
<div class="card incident-analysis-factcheck" id="factcheck-card">
<div class="card-header">
<div class="card-title clickable" onclick="openContentModal('Faktencheck', 'factcheck-list')">Faktencheck</div>
<div class="fc-filter-bar" id="fc-filters"></div>
</div>
<div class="factcheck-list" id="factcheck-list">
<div class="empty-state" style="padding:20px;">
<div class="empty-state-text">Noch keine Fakten geprüft</div>
</div>
</div>
</div>
</div>
</div>
<div class="grid-stack-item" gs-id="quellen" gs-x="0" gs-y="4" gs-w="12" gs-h="2" gs-min-w="6" gs-min-h="2">
<div class="grid-stack-item-content">
<div class="card source-overview-card">
<div class="card-header source-overview-header-toggle" onclick="App.toggleSourceOverview()" role="button" tabindex="0" aria-expanded="false">
<span class="source-overview-chevron" id="source-overview-chevron" title="Aufklappen" aria-hidden="true">&#9656;</span>
<div class="card-title clickable">Quellenübersicht</div>
<button class="btn btn-secondary btn-small source-detail-btn" onclick="event.stopPropagation(); openContentModal('Quellenübersicht', 'source-overview-content')">Detailansicht</button>
</div>
<div id="source-overview-content" style="display:none;"></div>
</div>
</div>
</div>
<div class="grid-stack-item" gs-id="timeline" gs-x="0" gs-y="5" gs-w="12" gs-h="4" gs-min-w="6" gs-min-h="4">
<div class="grid-stack-item-content">
<div class="card timeline-card">
<div class="card-header">
<div class="card-title clickable" onclick="openContentModal('Ereignis-Timeline', 'timeline')">Ereignis-Timeline</div>
<div class="ht-controls">
<div class="ht-filter-group">
<button class="ht-filter-btn active" data-filter="all" onclick="App.setTimelineFilter('all')" aria-pressed="true">Alle</button>
<button class="ht-filter-btn" data-filter="articles" onclick="App.setTimelineFilter('articles')" aria-pressed="false">Meldungen</button>
<button class="ht-filter-btn" data-filter="snapshots" onclick="App.setTimelineFilter('snapshots')" aria-pressed="false">Lageberichte</button>
</div>
<span class="ht-count" id="article-count"></span>
<div class="ht-range-group">
<button class="ht-range-btn" data-range="24h" onclick="App.setTimelineRange('24h')" aria-pressed="false">24h</button>
<button class="ht-range-btn" data-range="7d" onclick="App.setTimelineRange('7d')" aria-pressed="false">7T</button>
<button class="ht-range-btn active" data-range="all" onclick="App.setTimelineRange('all')" aria-pressed="true">Alles</button>
</div>
<label for="timeline-search" class="sr-only">Timeline durchsuchen</label>
<input type="text" id="timeline-search" class="timeline-filter-input" placeholder="Suche..." oninput="App.debouncedRerenderTimeline()">
</div>
</div>
<div id="timeline" class="ht-timeline-container">
<div class="ht-empty">Noch keine Meldungen</div>
</div>
</div>
</div>
</div>
</div>
<!-- Parkplatz für ausgeblendete Kacheln -->
<div id="tile-parking" style="display:none;"></div>
</div>
</main>
</div>
<!-- Modal: Neue Lage -->
<div class="modal-overlay" id="modal-new" role="dialog" aria-modal="true" aria-labelledby="modal-new-title">
<div class="modal">
<div class="modal-header">
<div class="modal-title" id="modal-new-title">Neue Lage anlegen</div>
<button class="modal-close" onclick="closeModal('modal-new')" aria-label="Schließen">&times;</button>
</div>
<form id="new-incident-form">
<div class="modal-body">
<div class="form-group">
<label for="inc-title">Titel des Vorfalls</label>
<input type="text" id="inc-title" required placeholder="z.B. Explosion in Madrid">
</div>
<div class="form-group">
<label for="inc-description">Beschreibung / Kontext</label>
<textarea id="inc-description" placeholder="Weitere Details zum Vorfall (optional)"></textarea>
</div>
<div class="form-group">
<label for="inc-type">Art der Lage</label>
<select id="inc-type" onchange="toggleTypeDefaults()">
<option value="adhoc">Ad-hoc Lage (Breaking News)</option>
<option value="research">Recherche (Hintergrund)</option>
</select>
<div class="form-hint" id="type-hint">
RSS-Feeds + WebSearch, automatische Aktualisierung empfohlen
</div>
</div>
<div class="form-group">
<label>Quellen</label>
<div class="toggle-group">
<label class="toggle-label">
<input type="checkbox" id="inc-international" checked>
<span class="toggle-switch"></span>
<span class="toggle-text">Internationale Quellen einbeziehen</span>
</label>
<div class="form-hint" id="sources-hint">DE + internationale Feeds (Reuters, BBC, Al Jazeera etc.)</div>
</div>
</div>
<div class="form-group">
<label>Sichtbarkeit</label>
<div class="toggle-group">
<label class="toggle-label">
<input type="checkbox" id="inc-visibility" checked>
<span class="toggle-switch"></span>
<span class="toggle-text" id="visibility-text">Öffentlich — für alle Nutzer sichtbar</span>
</label>
</div>
</div>
<div class="form-group">
<label for="inc-refresh-mode">Aktualisierung</label>
<select id="inc-refresh-mode" onchange="toggleRefreshInterval()">
<option value="manual">Manuell</option>
<option value="auto">Automatisch</option>
</select>
</div>
<div class="form-group conditional-field" id="refresh-interval-field">
<label for="inc-refresh-value">Intervall</label>
<div class="interval-input-group">
<input type="number" id="inc-refresh-value" min="10" value="15">
<select id="inc-refresh-unit" onchange="updateIntervalMin()">
<option value="1" selected>Minuten</option>
<option value="60">Stunden</option>
<option value="1440">Tage</option>
<option value="10080">Wochen</option>
</select>
</div>
</div>
<div class="form-group">
<label for="inc-retention">Aufbewahrung (Tage)</label>
<input type="number" id="inc-retention" min="0" max="999" value="30" placeholder="0 = Unbegrenzt">
<div class="form-hint">0 = Unbegrenzt, max. 999 Tage</div>
</div>
<div class="form-group" style="margin-top: 8px;">
<label>E-Mail-Benachrichtigungen</label>
<div class="form-hint" style="margin-bottom: 8px;">Per E-Mail benachrichtigen bei:</div>
<div class="toggle-group">
<label class="toggle-label">
<input type="checkbox" id="inc-notify-summary">
<span class="toggle-switch"></span>
<span class="toggle-text">Neues Lagebild</span>
</label>
</div>
<div class="toggle-group" style="margin-top: 8px;">
<label class="toggle-label">
<input type="checkbox" id="inc-notify-new-articles">
<span class="toggle-switch"></span>
<span class="toggle-text">Neue Artikel</span>
</label>
</div>
<div class="toggle-group" style="margin-top: 8px;">
<label class="toggle-label">
<input type="checkbox" id="inc-notify-status-change">
<span class="toggle-switch"></span>
<span class="toggle-text">Statusänderung Faktencheck</span>
</label>
</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" onclick="closeModal('modal-new')">Abbrechen</button>
<button type="submit" class="btn btn-primary" id="modal-new-submit">Lage anlegen</button>
</div>
</form>
</div>
</div>
<!-- Modal: Quellenverwaltung -->
<div class="modal-overlay" id="modal-sources" role="dialog" aria-modal="true" aria-labelledby="modal-sources-title">
<div class="modal modal-wide">
<div class="modal-header">
<div class="modal-title" id="modal-sources-title">Quellenverwaltung</div>
<button class="modal-close" onclick="closeModal('modal-sources')" aria-label="Schließen">&times;</button>
</div>
<div class="modal-body sources-modal-body">
<!-- Stats-Leiste -->
<div class="sources-stats-bar" id="sources-stats-bar"></div>
<!-- Toolbar -->
<div class="sources-toolbar">
<div class="sources-filters">
<label for="sources-filter-type" class="sr-only">Quellentyp filtern</label>
<select id="sources-filter-type" class="timeline-filter-select" onchange="App.filterSources()">
<option value="">Alle Typen</option>
<option value="rss_feed">RSS-Feed</option>
<option value="web_source">Web-Quelle</option>
<option value="excluded">Gesperrt</option>
</select>
<label for="sources-filter-category" class="sr-only">Kategorie filtern</label>
<select id="sources-filter-category" class="timeline-filter-select" onchange="App.filterSources()">
<option value="">Alle Kategorien</option>
<option value="nachrichtenagentur">Nachrichtenagentur</option>
<option value="oeffentlich-rechtlich">Öffentlich-Rechtlich</option>
<option value="qualitaetszeitung">Qualitätszeitung</option>
<option value="behoerde">Behörde</option>
<option value="fachmedien">Fachmedien</option>
<option value="think-tank">Think Tank</option>
<option value="international">International</option>
<option value="regional">Regional</option>
<option value="sonstige">Sonstige</option>
</select>
<label for="sources-search" class="sr-only">Quellen durchsuchen</label>
<input type="text" id="sources-search" class="timeline-filter-input sources-search-input" placeholder="Suche..." oninput="App.filterSources()">
</div>
<div class="sources-toolbar-actions">
<button class="btn btn-secondary btn-small source-block-btn" onclick="App.showBlockDomainDialog()">Domain sperren</button>
<button class="btn btn-primary btn-small" onclick="App.toggleSourceForm()">+ Quelle</button>
</div>
</div>
<!-- Inline-Formular: Domain sperren (ein-/ausklappbar) -->
<div class="sources-add-form" id="sources-block-form" style="display:none;">
<div class="sources-form-row">
<div class="form-group flex-1">
<label for="block-domain-input">Domain</label>
<input type="text" id="block-domain-input" placeholder="z.B. bild.de">
</div>
<div class="form-group">
<label for="block-domain-notes">Notizen</label>
<input type="text" id="block-domain-notes" class="source-notes-input" placeholder="Optional">
</div>
<button class="btn btn-danger btn-small" onclick="App.blockDomain()">Sperren</button>
<button class="btn btn-secondary btn-small" onclick="App.showBlockDomainDialog(false)">Abbrechen</button>
</div>
</div>
<!-- Inline-Formular: Quelle hinzufügen (ein-/ausklappbar) -->
<div class="sources-add-form" id="sources-add-form" style="display:none;">
<div class="sources-form-row">
<div class="form-group flex-1">
<label for="src-discover-url">URL oder Domain</label>
<input type="text" id="src-discover-url" placeholder="z.B. netzpolitik.org">
</div>
<button class="btn btn-secondary btn-small" id="src-discover-btn" onclick="App.discoverSource()">Erkennen</button>
</div>
<!-- Ergebnis-Anzeige (nach Discovery) -->
<div id="src-discovery-result" class="sources-discovery-result" style="display:none;">
<div class="sources-add-form-grid">
<div class="form-group">
<label for="src-name">Name</label>
<input type="text" id="src-name" placeholder="Wird erkannt...">
</div>
<div class="form-group">
<label for="src-category">Kategorie</label>
<select id="src-category">
<option value="nachrichtenagentur">Nachrichtenagentur</option>
<option value="oeffentlich-rechtlich">Öffentlich-Rechtlich</option>
<option value="qualitaetszeitung">Qualitätszeitung</option>
<option value="behoerde">Behörde</option>
<option value="fachmedien">Fachmedien</option>
<option value="think-tank">Think Tank</option>
<option value="international">International</option>
<option value="regional">Regional</option>
<option value="sonstige" selected>Sonstige</option>
</select>
</div>
<div class="form-group">
<label>Typ</label>
<input type="text" id="src-type-display" class="input-readonly" readonly>
</div>
<div class="form-group" id="src-rss-url-group">
<label>RSS-Feed URL</label>
<input type="text" id="src-rss-url" class="input-readonly" readonly>
</div>
<div class="form-group">
<label>Domain</label>
<input type="text" id="src-domain" class="input-readonly" readonly>
</div>
<div class="form-group">
<label for="src-notes">Notizen</label>
<input type="text" id="src-notes" placeholder="Optional">
</div>
</div>
<div class="sources-discovery-actions">
<button class="btn btn-primary btn-small" onclick="App.saveSource()">Speichern</button>
<button class="btn btn-secondary btn-small" onclick="App.toggleSourceForm(false)">Abbrechen</button>
</div>
</div>
</div>
<!-- Quellen-Liste (gruppiert) -->
<div class="sources-list" id="sources-list">
<div class="empty-state-text" style="padding:var(--sp-3xl);text-align:center;">Lade Quellen...</div>
</div>
</div>
</div>
</div>
<!-- Modal: Content-Viewer (wiederverwendbar für Lagebild, Faktencheck, Quellenübersicht, Timeline) -->
<div class="modal-overlay" id="modal-content-viewer" role="dialog" aria-modal="true" aria-labelledby="content-viewer-title">
<div class="modal modal-content-viewer">
<div class="modal-header">
<div class="modal-title" id="content-viewer-title"></div>
<div class="modal-header-extra" id="content-viewer-header-extra"></div>
<button class="modal-close" onclick="closeModal('modal-content-viewer')" aria-label="Schließen">&times;</button>
</div>
<div class="modal-body" id="content-viewer-body"></div>
</div>
</div>
<!-- Modal: Feedback -->
<div class="modal-overlay" id="modal-feedback" role="dialog" aria-modal="true" aria-labelledby="modal-feedback-title">
<div class="modal">
<div class="modal-header">
<div class="modal-title" id="modal-feedback-title">Feedback senden</div>
<button class="modal-close" onclick="closeModal('modal-feedback')" aria-label="Schliessen">&times;</button>
</div>
<form id="feedback-form">
<div class="modal-body">
<div class="form-group">
<label for="fb-category">Kategorie</label>
<select id="fb-category">
<option value="bug">Fehlerbericht</option>
<option value="feature">Feature-Wunsch</option>
<option value="question">Frage</option>
<option value="other">Sonstiges</option>
</select>
</div>
<div class="form-group">
<label for="fb-message">Nachricht</label>
<textarea id="fb-message" required minlength="10" maxlength="5000" rows="6" placeholder="Beschreibe dein Anliegen (mind. 10 Zeichen)..."></textarea>
<div class="form-hint"><span id="fb-char-count">0</span> / 5.000 Zeichen</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" onclick="closeModal('modal-feedback')">Abbrechen</button>
<button type="submit" class="btn btn-primary" id="fb-submit-btn">Absenden</button>
</div>
</form>
</div>
</div>
<!-- Toast Container -->
<div class="toast-container" id="toast-container" aria-live="polite" aria-atomic="true"></div>
<script src="https://cdn.jsdelivr.net/npm/gridstack@12/dist/gridstack-all.js"></script>
<script src="/static/js/api.js?v=20260304a"></script>
<script src="/static/js/ws.js?v=20260304a"></script>
<script src="/static/js/components.js?v=20260304a"></script>
<script src="/static/js/layout.js?v=20260304a"></script>
<script src="/static/js/app.js?v=20260304a"></script>
</body>
</html>

BIN
src/static/favicon-16x16.png Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 568 B

BIN
src/static/favicon-192x192.png Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 9.3 KiB

BIN
src/static/favicon-32x32.png Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 1.3 KiB

BIN
src/static/favicon-48x48.png Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 2.1 KiB

BIN
src/static/favicon-512x512.png Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 27 KiB

BIN
src/static/favicon.ico Normale Datei

Binäre Datei nicht angezeigt.

Nachher

Breite:  |  Höhe:  |  Größe: 590 B

212
src/static/index.html Normale Datei
Datei anzeigen

@@ -0,0 +1,212 @@
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script>(function(){var t=localStorage.getItem('osint_theme');if(t)document.documentElement.setAttribute('data-theme',t);try{var a=JSON.parse(localStorage.getItem('osint_a11y')||'{}');Object.keys(a).forEach(function(k){if(a[k])document.documentElement.setAttribute('data-a11y-'+k,'true');});}catch(e){}})()</script>
<link rel="icon" type="image/png" sizes="32x32" href="/static/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/static/favicon-16x16.png">
<link rel="apple-touch-icon" sizes="180x180" href="/static/apple-touch-icon.png">
<link rel="shortcut icon" href="/static/favicon.ico">
<title>AegisSight Monitor - Login</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Poppins:wght@400;500;600;700&display=swap" rel="stylesheet">
<link rel="stylesheet" href="/static/css/style.css?v=20260304a">
</head>
<body>
<a href="#login-form" class="skip-link">Zum Anmeldeformular springen</a>
<main class="login-container">
<div class="login-box">
<div class="login-logo">
<h1>Aegis<span style="color: var(--accent)">Sight</span></h1>
<div class="subtitle">Lagemonitor</div>
</div>
<div id="login-error" class="login-error" role="alert" aria-live="assertive"></div>
<div id="login-success" class="login-success" role="status" aria-live="polite" style="display:none;"></div>
<!-- Schritt 1: E-Mail eingeben -->
<form id="email-form">
<div class="form-group">
<label for="email">E-Mail-Adresse</label>
<input type="email" id="email" name="email" autocomplete="email" required placeholder="name@organisation.de">
</div>
<button type="submit" class="btn btn-primary btn-full" id="email-btn">Anmelden</button>
</form>
<!-- Schritt 2: Code eingeben -->
<form id="code-form" style="display:none;">
<p style="color: var(--text-secondary); margin: 0 0 16px 0; font-size: 14px;">
Ein 6-stelliger Code wurde an <strong id="sent-email"></strong> gesendet.
</p>
<div class="form-group">
<label for="code">Code eingeben</label>
<input type="text" id="code" name="code" autocomplete="one-time-code" required
placeholder="000000" maxlength="6" pattern="[0-9]{6}"
style="text-align:center; font-size:24px; letter-spacing:8px; font-family:monospace;">
</div>
<button type="submit" class="btn btn-primary btn-full" id="code-btn">Verifizieren</button>
<button type="button" class="btn btn-secondary btn-full" id="back-btn" style="margin-top:8px;">Zurueck</button>
</form>
<div style="text-align:center;margin-top:16px;">
<button class="btn btn-secondary btn-small theme-toggle-btn" id="theme-toggle" onclick="ThemeManager.toggle()" title="Theme wechseln" aria-label="Theme wechseln">&#9788;</button>
</div>
</div>
</main>
<script>
const ThemeManager = {
_key: 'osint_theme',
init() {
const saved = localStorage.getItem(this._key);
const theme = saved || 'dark';
document.documentElement.setAttribute('data-theme', theme);
this._updateIcon(theme);
},
toggle() {
const current = document.documentElement.getAttribute('data-theme') || 'dark';
const next = current === 'dark' ? 'light' : 'dark';
document.documentElement.setAttribute('data-theme', next);
localStorage.setItem(this._key, next);
this._updateIcon(next);
},
_updateIcon(theme) {
const btn = document.getElementById('theme-toggle');
if (btn) btn.textContent = theme === 'dark' ? '\u2600' : '\u263D';
}
};
ThemeManager.init();
</script>
<script>
// Pruefen ob bereits eingeloggt
const token = localStorage.getItem('osint_token');
if (token) {
fetch('/api/auth/me', {
headers: { 'Authorization': 'Bearer ' + token }
}).then(r => {
if (r.ok) window.location.href = '/dashboard';
});
}
// URL-Parameter pruefen (Magic Link Token)
const urlParams = new URLSearchParams(window.location.search);
const verifyToken = urlParams.get('token');
if (verifyToken) {
// Direkte Token-Verifikation
(async () => {
try {
const response = await fetch('/api/auth/verify', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ token: verifyToken }),
});
if (!response.ok) {
const data = await response.json();
throw new Error(data.detail || 'Verifikation fehlgeschlagen');
}
const data = await response.json();
localStorage.setItem('osint_token', data.access_token);
localStorage.setItem('osint_username', data.username);
window.location.href = '/dashboard';
} catch (err) {
const errorEl = document.getElementById('login-error');
errorEl.textContent = err.message;
errorEl.style.display = 'block';
// URL bereinigen
window.history.replaceState({}, '', '/');
}
})();
}
let currentEmail = '';
// Schritt 1: E-Mail senden
document.getElementById('email-form').addEventListener('submit', async (e) => {
e.preventDefault();
const errorEl = document.getElementById('login-error');
const successEl = document.getElementById('login-success');
const btn = document.getElementById('email-btn');
errorEl.style.display = 'none';
successEl.style.display = 'none';
btn.disabled = true;
btn.textContent = 'Wird gesendet...';
currentEmail = document.getElementById('email').value.trim();
try {
const response = await fetch('/api/auth/magic-link', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ email: currentEmail }),
});
if (!response.ok) {
const data = await response.json();
throw new Error(data.detail || 'Anfrage fehlgeschlagen');
}
// Zu Code-Eingabe wechseln
document.getElementById('email-form').style.display = 'none';
document.getElementById('code-form').style.display = 'block';
document.getElementById('sent-email').textContent = currentEmail;
document.getElementById('code').focus();
} catch (err) {
errorEl.textContent = err.message;
errorEl.style.display = 'block';
} finally {
btn.disabled = false;
btn.textContent = 'Anmelden';
}
});
// Schritt 2: Code verifizieren
document.getElementById('code-form').addEventListener('submit', async (e) => {
e.preventDefault();
const errorEl = document.getElementById('login-error');
const btn = document.getElementById('code-btn');
errorEl.style.display = 'none';
btn.disabled = true;
btn.textContent = 'Wird geprueft...';
try {
const response = await fetch('/api/auth/verify-code', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
email: currentEmail,
code: document.getElementById('code').value.trim(),
}),
});
if (!response.ok) {
const data = await response.json();
throw new Error(data.detail || 'Verifizierung fehlgeschlagen');
}
const data = await response.json();
localStorage.setItem('osint_token', data.access_token);
localStorage.setItem('osint_username', data.username);
window.location.href = '/dashboard';
} catch (err) {
errorEl.textContent = err.message;
errorEl.style.display = 'block';
} finally {
btn.disabled = false;
btn.textContent = 'Verifizieren';
}
});
// Zurueck-Button
document.getElementById('back-btn').addEventListener('click', () => {
document.getElementById('code-form').style.display = 'none';
document.getElementById('email-form').style.display = 'block';
document.getElementById('login-error').style.display = 'none';
document.getElementById('code').value = '';
});
</script>
</body>
</html>

196
src/static/js/api.js Normale Datei
Datei anzeigen

@@ -0,0 +1,196 @@
/**
* API-Client für den OSINT Lagemonitor.
*/
const API = {
baseUrl: '/api',
_getHeaders() {
const token = localStorage.getItem('osint_token');
return {
'Content-Type': 'application/json',
'Authorization': token ? `Bearer ${token}` : '',
};
},
async _request(method, path, body = null) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 30000);
const options = {
method,
headers: this._getHeaders(),
signal: controller.signal,
};
if (body) {
options.body = JSON.stringify(body);
}
let response;
try {
response = await fetch(`${this.baseUrl}${path}`, options);
} catch (err) {
clearTimeout(timeout);
if (err.name === 'AbortError') {
throw new Error('Zeitüberschreitung bei der Anfrage');
}
throw err;
}
clearTimeout(timeout);
if (response.status === 401) {
localStorage.removeItem('osint_token');
localStorage.removeItem('osint_username');
window.location.href = '/';
return;
}
if (!response.ok) {
const data = await response.json().catch(() => ({}));
let detail = data.detail;
if (Array.isArray(detail)) {
detail = detail.map(e => e.msg || JSON.stringify(e)).join('; ');
} else if (typeof detail === 'object' && detail !== null) {
detail = JSON.stringify(detail);
}
throw new Error(detail || `Fehler ${response.status}`);
}
if (response.status === 204) return null;
return response.json();
},
// Auth
getMe() {
return this._request('GET', '/auth/me');
},
// Incidents
listIncidents(statusFilter = null) {
const query = statusFilter ? `?status_filter=${statusFilter}` : '';
return this._request('GET', `/incidents${query}`);
},
createIncident(data) {
return this._request('POST', '/incidents', data);
},
getRefreshingIncidents() {
return this._request('GET', '/incidents/refreshing');
},
getIncident(id) {
return this._request('GET', `/incidents/${id}`);
},
updateIncident(id, data) {
return this._request('PUT', `/incidents/${id}`, data);
},
deleteIncident(id) {
return this._request('DELETE', `/incidents/${id}`);
},
getArticles(incidentId) {
return this._request('GET', `/incidents/${incidentId}/articles`);
},
getFactChecks(incidentId) {
return this._request('GET', `/incidents/${incidentId}/factchecks`);
},
getSnapshots(incidentId) {
return this._request('GET', `/incidents/${incidentId}/snapshots`);
},
refreshIncident(id) {
return this._request('POST', `/incidents/${id}/refresh`);
},
getRefreshLog(incidentId, limit = 20) {
return this._request('GET', `/incidents/${incidentId}/refresh-log?limit=${limit}`);
},
// Sources (Quellenverwaltung)
listSources(params = {}) {
const query = new URLSearchParams();
if (params.source_type) query.set('source_type', params.source_type);
if (params.category) query.set('category', params.category);
if (params.source_status) query.set('source_status', params.source_status);
const qs = query.toString();
return this._request('GET', `/sources${qs ? '?' + qs : ''}`);
},
createSource(data) {
return this._request('POST', '/sources', data);
},
updateSource(id, data) {
return this._request('PUT', `/sources/${id}`, data);
},
deleteSource(id) {
return this._request('DELETE', `/sources/${id}`);
},
getSourceStats() {
return this._request('GET', '/sources/stats');
},
refreshSourceCounts() {
return this._request('POST', '/sources/refresh-counts');
},
discoverSource(url) {
return this._request('POST', '/sources/discover', { url });
},
discoverMulti(url) {
return this._request('POST', '/sources/discover-multi', { url });
},
rediscoverExisting() {
return this._request('POST', '/sources/rediscover-existing');
},
blockDomain(domain, notes) {
return this._request('POST', '/sources/block-domain', { domain, notes });
},
unblockDomain(domain) {
return this._request('POST', '/sources/unblock-domain', { domain });
},
deleteDomain(domain) {
return this._request('DELETE', `/sources/domain/${encodeURIComponent(domain)}`);
},
cancelRefresh(id) {
return this._request('POST', `/incidents/${id}/cancel-refresh`);
},
// Notifications
listNotifications(limit = 50) {
return this._request('GET', `/notifications?limit=${limit}`);
},
getUnreadCount() {
return this._request('GET', '/notifications/unread-count');
},
markNotificationsRead(ids = null) {
return this._request('PUT', '/notifications/mark-read', { notification_ids: ids });
},
// Feedback
sendFeedback(data) {
return this._request('POST', '/feedback', data);
},
// Export
exportIncident(id, format, scope) {
const token = localStorage.getItem('osint_token');
return fetch(`${this.baseUrl}/incidents/${id}/export?format=${format}&scope=${scope}`, {
headers: { 'Authorization': `Bearer ${token}` },
});
},
};

2813
src/static/js/app.js Normale Datei

Datei-Diff unterdrückt, da er zu groß ist Diff laden

625
src/static/js/components.js Normale Datei
Datei anzeigen

@@ -0,0 +1,625 @@
/**
* UI-Komponenten für das Dashboard.
*/
const UI = {
/**
* Sidebar-Eintrag für eine Lage rendern.
*/
renderIncidentItem(incident, isActive) {
const isRefreshing = App._refreshingIncidents && App._refreshingIncidents.has(incident.id);
const dotClass = isRefreshing ? 'refreshing' : (incident.status === 'active' ? 'active' : 'archived');
const activeClass = isActive ? 'active' : '';
const creator = incident.created_by_username || '';
return `
<div class="incident-item ${activeClass}" data-id="${incident.id}" onclick="App.selectIncident(${incident.id})" role="button" tabindex="0">
<span class="incident-dot ${dotClass}" id="dot-${incident.id}"></span>
<div style="flex:1;min-width:0;">
<div class="incident-name">${this.escape(incident.title)}</div>
<div class="incident-meta">${incident.article_count} Artikel &middot; ${this.escape(creator)}</div>
</div>
${incident.visibility === 'private' ? '<span class="badge badge-private" style="font-size:9px;">PRIVAT</span>' : ''}
${incident.refresh_mode === 'auto' ? '<span class="badge badge-auto" title="Auto-Refresh aktiv">&#x21bb;</span>' : ''}
</div>
`;
},
/**
* Faktencheck-Eintrag rendern.
*/
factCheckLabels: {
confirmed: 'Bestätigt durch mehrere Quellen',
unconfirmed: 'Nicht unabhängig bestätigt',
contradicted: 'Widerlegt',
developing: 'Faktenlage noch im Fluss',
established: 'Gesicherter Fakt (3+ Quellen)',
disputed: 'Umstrittener Sachverhalt',
unverified: 'Nicht unabhängig verifizierbar',
},
factCheckTooltips: {
confirmed: 'Bestätigt: Mindestens zwei unabhängige, seriöse Quellen stützen diese Aussage übereinstimmend.',
established: 'Gesichert: Drei oder mehr unabhängige Quellen bestätigen den Sachverhalt. Hohe Verlässlichkeit.',
developing: 'Unklar: Die Faktenlage ist noch im Fluss. Neue Informationen können das Bild verändern.',
unconfirmed: 'Unbestätigt: Bisher nur aus einer Quelle bekannt. Eine unabhängige Bestätigung steht aus.',
unverified: 'Ungeprüft: Die Aussage konnte bisher nicht anhand verfügbarer Quellen überprüft werden.',
disputed: 'Umstritten: Quellen widersprechen sich. Es gibt sowohl stützende als auch widersprechende Belege.',
contradicted: 'Widerlegt: Zuverlässige Quellen widersprechen dieser Aussage. Wahrscheinlich falsch.',
},
factCheckChipLabels: {
confirmed: 'Bestätigt',
unconfirmed: 'Unbestätigt',
contradicted: 'Widerlegt',
developing: 'Unklar',
established: 'Gesichert',
disputed: 'Umstritten',
unverified: 'Ungeprüft',
},
factCheckIcons: {
confirmed: '&#10003;',
unconfirmed: '?',
contradicted: '&#10007;',
developing: '&#8635;',
established: '&#10003;',
disputed: '&#9888;',
unverified: '?',
},
/**
* Faktencheck-Filterleiste rendern.
*/
renderFactCheckFilters(factchecks) {
// Welche Stati kommen tatsächlich vor + Zähler
const statusCounts = {};
factchecks.forEach(fc => {
statusCounts[fc.status] = (statusCounts[fc.status] || 0) + 1;
});
const statusOrder = ['confirmed', 'established', 'developing', 'unconfirmed', 'unverified', 'disputed', 'contradicted'];
const usedStatuses = statusOrder.filter(s => statusCounts[s]);
if (usedStatuses.length <= 1) return '';
const items = usedStatuses.map(status => {
const icon = this.factCheckIcons[status] || '?';
const chipLabel = this.factCheckChipLabels[status] || status;
const tooltip = this.factCheckTooltips[status] || '';
const count = statusCounts[status];
return `<label class="fc-dropdown-item" data-status="${status}" title="${tooltip}">
<input type="checkbox" checked onchange="App.toggleFactCheckFilter('${status}')">
<span class="factcheck-icon ${status}">${icon}</span>
<span class="fc-dropdown-label">${chipLabel}</span>
<span class="fc-dropdown-count">${count}</span>
</label>`;
}).join('');
return `<button class="fc-dropdown-toggle" onclick="App.toggleFcDropdown(event)">Filter</button>
<div class="fc-dropdown-menu" id="fc-dropdown-menu">${items}</div>`;
},
renderFactCheck(fc) {
const urls = (fc.evidence || '').match(/https?:\/\/[^\s,)]+/g) || [];
const count = urls.length;
return `
<div class="factcheck-item" data-fc-status="${fc.status}">
<div class="factcheck-icon ${fc.status}" title="${this.factCheckTooltips[fc.status] || this.factCheckLabels[fc.status] || fc.status}" aria-hidden="true">${this.factCheckIcons[fc.status] || '?'}</div>
<span class="sr-only">${this.factCheckLabels[fc.status] || fc.status}</span>
<div style="flex:1;">
<div class="factcheck-claim">${this.escape(fc.claim)}</div>
<div style="display:flex;align-items:center;gap:6px;margin-top:2px;">
<span class="factcheck-sources">${count} Quelle${count !== 1 ? 'n' : ''}</span>
</div>
<div class="evidence-block">${this.renderEvidence(fc.evidence || '')}</div>
</div>
</div>
`;
},
/**
* Evidence mit erklärenden Text UND Quellen-Chips rendern.
*/
renderEvidence(text) {
if (!text) return '<span class="evidence-empty">Keine Belege</span>';
const urls = text.match(/https?:\/\/[^\s,)]+/g) || [];
if (urls.length === 0) {
return `<span class="evidence-text">${this.escape(text)}</span>`;
}
// Erklärenden Text extrahieren (URLs entfernen)
let explanation = text;
urls.forEach(url => { explanation = explanation.replace(url, '').trim(); });
// Aufräumen: Klammern, mehrfache Kommas/Leerzeichen
explanation = explanation.replace(/\(\s*\)/g, '');
explanation = explanation.replace(/,\s*,/g, ',');
explanation = explanation.replace(/\s+/g, ' ').trim();
explanation = explanation.replace(/[,.:;]+$/, '').trim();
// Chips für jede URL
const chips = urls.map(url => {
let label;
try { label = new URL(url).hostname.replace('www.', ''); } catch { label = url; }
return `<a href="${this.escape(url)}" target="_blank" rel="noopener" class="evidence-chip" title="${this.escape(url)}">${this.escape(label)}</a>`;
}).join('');
const explanationHtml = explanation
? `<span class="evidence-text">${this.escape(explanation)}</span>`
: '';
return `${explanationHtml}<div class="evidence-chips">${chips}</div>`;
},
/**
* Verifizierungs-Badge.
*/
verificationBadge(status) {
const map = {
verified: { class: 'badge-verified', text: 'Verifiziert' },
unverified: { class: 'badge-unverified', text: 'Offen' },
contradicted: { class: 'badge-contradicted', text: 'Widerlegt' },
};
const badge = map[status] || map.unverified;
return `<span class="badge ${badge.class}">${badge.text}</span>`;
},
/**
* Toast-Benachrichtigung anzeigen.
*/
_toastTimers: new Map(),
showToast(message, type = 'info', duration = 5000) {
const container = document.getElementById('toast-container');
// Duplikat? Bestehenden Toast neu animieren
const existing = Array.from(container.children).find(
t => t.dataset.msg === message && t.dataset.type === type
);
if (existing) {
clearTimeout(this._toastTimers.get(existing));
// Kurz rausschieben, dann neu reingleiten
existing.style.transition = 'none';
existing.style.opacity = '0';
existing.style.transform = 'translateX(100%)';
void existing.offsetWidth; // Reflow erzwingen
existing.style.transition = 'all 0.3s ease';
existing.style.opacity = '1';
existing.style.transform = 'translateX(0)';
const timer = setTimeout(() => {
existing.style.opacity = '0';
existing.style.transform = 'translateX(100%)';
setTimeout(() => { existing.remove(); this._toastTimers.delete(existing); }, 300);
}, duration);
this._toastTimers.set(existing, timer);
return;
}
const toast = document.createElement('div');
toast.className = `toast toast-${type}`;
toast.setAttribute('role', 'status');
toast.dataset.msg = message;
toast.dataset.type = type;
toast.innerHTML = `<span class="toast-text">${this.escape(message)}</span>`;
container.appendChild(toast);
const timer = setTimeout(() => {
toast.style.opacity = '0';
toast.style.transform = 'translateX(100%)';
toast.style.transition = 'all 0.3s ease';
setTimeout(() => { toast.remove(); this._toastTimers.delete(toast); }, 300);
}, duration);
this._toastTimers.set(toast, timer);
},
_progressStartTime: null,
_progressTimer: null,
/**
* Fortschrittsanzeige einblenden und Status setzen.
*/
showProgress(status, extra = {}) {
const bar = document.getElementById('progress-bar');
if (!bar) return;
bar.style.display = 'block';
bar.classList.remove('progress-bar--complete', 'progress-bar--error');
const steps = {
queued: { active: 0, label: 'In Warteschlange...' },
researching: { active: 1, label: 'Recherchiert Quellen...' },
deep_researching: { active: 1, label: 'Tiefenrecherche läuft...' },
analyzing: { active: 2, label: 'Analysiert Meldungen...' },
factchecking: { active: 3, label: 'Faktencheck läuft...' },
};
const step = steps[status] || steps.queued;
// Queue-Position anzeigen
let labelText = step.label;
if (status === 'queued' && extra.queue_position > 1) {
labelText = `In Warteschlange (Position ${extra.queue_position})...`;
} else if (extra.detail) {
labelText = extra.detail;
}
// Timer starten beim Übergang von queued zu aktivem Status
if (step.active > 0 && !this._progressStartTime) {
if (extra.started_at) {
// Echte Startzeit vom Server verwenden
const serverStart = parseUTC(extra.started_at);
this._progressStartTime = serverStart ? serverStart.getTime() : Date.now();
} else {
this._progressStartTime = Date.now();
}
this._startProgressTimer();
}
const stepIds = ['step-researching', 'step-analyzing', 'step-factchecking'];
stepIds.forEach((id, i) => {
const el = document.getElementById(id);
if (!el) return;
el.className = 'progress-step';
if (i + 1 < step.active) el.classList.add('done');
else if (i + 1 === step.active) el.classList.add('active');
});
const fill = document.getElementById('progress-fill');
const percent = step.active === 0 ? 5 : Math.round((step.active / 3) * 100);
if (fill) {
fill.style.width = percent + '%';
}
// ARIA-Werte auf der Progressbar aktualisieren
bar.setAttribute('aria-valuenow', String(percent));
bar.setAttribute('aria-valuetext', labelText);
const label = document.getElementById('progress-label');
if (label) label.textContent = labelText;
// Cancel-Button sichtbar machen
const cancelBtn = document.getElementById('progress-cancel-btn');
if (cancelBtn) cancelBtn.style.display = '';
},
/**
* Timer-Intervall starten (1x pro Sekunde).
*/
_startProgressTimer() {
if (this._progressTimer) return;
const timerEl = document.getElementById('progress-timer');
if (!timerEl) return;
this._progressTimer = setInterval(() => {
if (!this._progressStartTime) return;
const elapsed = Math.floor((Date.now() - this._progressStartTime) / 1000);
const mins = Math.floor(elapsed / 60);
const secs = elapsed % 60;
timerEl.textContent = `${mins}:${String(secs).padStart(2, '0')}`;
}, 1000);
},
/**
* Abschluss-Animation: Grüner Balken mit Summary-Text.
*/
showProgressComplete(data) {
const bar = document.getElementById('progress-bar');
if (!bar) return;
// Timer stoppen
this._stopProgressTimer();
// Alle Steps auf done
['step-researching', 'step-analyzing', 'step-factchecking'].forEach(id => {
const el = document.getElementById(id);
if (el) { el.className = 'progress-step done'; }
});
// Fill auf 100%
const fill = document.getElementById('progress-fill');
if (fill) fill.style.width = '100%';
// Complete-Klasse
bar.classList.remove('progress-bar--error');
bar.classList.add('progress-bar--complete');
// Label mit Summary
const parts = [];
if (data.new_articles > 0) {
parts.push(`${data.new_articles} neue Artikel`);
}
if (data.confirmed_count > 0) {
parts.push(`${data.confirmed_count} Fakten bestätigt`);
}
if (data.contradicted_count > 0) {
parts.push(`${data.contradicted_count} widerlegt`);
}
const summaryText = parts.length > 0 ? parts.join(', ') : 'Keine neuen Entwicklungen';
const label = document.getElementById('progress-label');
if (label) label.textContent = `Abgeschlossen: ${summaryText}`;
// Cancel-Button ausblenden
const cancelBtn = document.getElementById('progress-cancel-btn');
if (cancelBtn) cancelBtn.style.display = 'none';
bar.setAttribute('aria-valuenow', '100');
bar.setAttribute('aria-valuetext', 'Abgeschlossen');
},
/**
* Fehler-Zustand: Roter Balken mit Fehlermeldung.
*/
showProgressError(errorMsg, willRetry = false, delay = 0) {
const bar = document.getElementById('progress-bar');
if (!bar) return;
bar.style.display = 'block';
// Timer stoppen
this._stopProgressTimer();
// Error-Klasse
bar.classList.remove('progress-bar--complete');
bar.classList.add('progress-bar--error');
const label = document.getElementById('progress-label');
if (label) {
label.textContent = willRetry
? `Fehlgeschlagen \u2014 erneuter Versuch in ${delay}s...`
: `Fehlgeschlagen: ${errorMsg}`;
}
// Cancel-Button ausblenden
const cancelBtn = document.getElementById('progress-cancel-btn');
if (cancelBtn) cancelBtn.style.display = 'none';
// Bei finalem Fehler nach 6s ausblenden
if (!willRetry) {
setTimeout(() => this.hideProgress(), 6000);
}
},
/**
* Timer-Intervall stoppen und zurücksetzen.
*/
_stopProgressTimer() {
if (this._progressTimer) {
clearInterval(this._progressTimer);
this._progressTimer = null;
}
this._progressStartTime = null;
const timerEl = document.getElementById('progress-timer');
if (timerEl) timerEl.textContent = '';
},
/**
* Fortschrittsanzeige ausblenden.
*/
hideProgress() {
const bar = document.getElementById('progress-bar');
if (bar) {
bar.style.display = 'none';
bar.classList.remove('progress-bar--complete', 'progress-bar--error');
}
this._stopProgressTimer();
},
/**
* Zusammenfassung mit Inline-Zitaten und Quellenverzeichnis rendern.
*/
renderSummary(summary, sourcesJson, incidentType) {
if (!summary) return '<span style="color:var(--text-tertiary);">Noch keine Zusammenfassung.</span>';
let sources = [];
try { sources = JSON.parse(sourcesJson || '[]'); } catch(e) {}
// Markdown-Rendering
let html = this.escape(summary);
// ## Überschriften
html = html.replace(/^## (.+)$/gm, '<h3 class="briefing-heading">$1</h3>');
// **Fettdruck**
html = html.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>');
// Listen (- Item)
html = html.replace(/^- (.+)$/gm, '<li>$1</li>');
html = html.replace(/(<li>.*<\/li>\n?)+/gs, '<ul>$&</ul>');
// Zeilenumbrüche (aber nicht nach Headings/Listen)
html = html.replace(/\n(?!<)/g, '<br>');
// Überflüssige <br> nach Block-Elementen entfernen + doppelte <br> zusammenfassen
html = html.replace(/<\/h3>(<br>)+/g, '</h3>');
html = html.replace(/<\/ul>(<br>)+/g, '</ul>');
html = html.replace(/(<br>){2,}/g, '<br>');
// Inline-Zitate [1], [2] etc. als klickbare Links rendern
if (sources.length > 0) {
html = html.replace(/\[(\d+)\]/g, (match, num) => {
const src = sources.find(s => s.nr === parseInt(num));
if (src && src.url) {
return `<a href="${this.escape(src.url)}" target="_blank" rel="noopener" class="citation" title="${this.escape(src.name)}">[${num}]</a>`;
}
return match;
});
}
return `<div class="briefing-content">${html}</div>`;
},
/**
* Quellenübersicht für eine Lage rendern.
*/
renderSourceOverview(articles) {
if (!articles || articles.length === 0) return '';
// Nach Quelle aggregieren
const sourceMap = {};
articles.forEach(a => {
const name = a.source || 'Unbekannt';
if (!sourceMap[name]) {
sourceMap[name] = { count: 0, languages: new Set(), urls: [] };
}
sourceMap[name].count++;
sourceMap[name].languages.add(a.language || 'de');
if (a.source_url) sourceMap[name].urls.push(a.source_url);
});
const sources = Object.entries(sourceMap)
.sort((a, b) => b[1].count - a[1].count);
// Sprach-Statistik
const langCount = {};
articles.forEach(a => {
const lang = (a.language || 'de').toUpperCase();
langCount[lang] = (langCount[lang] || 0) + 1;
});
const langChips = Object.entries(langCount)
.sort((a, b) => b[1] - a[1])
.map(([lang, count]) => `<span class="source-lang-chip">${lang} <strong>${count}</strong></span>`)
.join('');
let html = `<div class="source-overview-header">`;
html += `<span class="source-overview-stat">${articles.length} Artikel aus ${sources.length} Quellen</span>`;
html += `<div class="source-lang-chips">${langChips}</div>`;
html += `</div>`;
html += '<div class="source-overview-grid">';
sources.forEach(([name, data]) => {
const langs = [...data.languages].map(l => l.toUpperCase()).join('/');
html += `<div class="source-overview-item">
<span class="source-overview-name">${this.escape(name)}</span>
<span class="source-overview-lang">${langs}</span>
<span class="source-overview-count">${data.count}</span>
</div>`;
});
html += '</div>';
return html;
},
/**
* Kategorie-Labels.
*/
_categoryLabels: {
'nachrichtenagentur': 'Agentur',
'oeffentlich-rechtlich': 'ÖR',
'qualitaetszeitung': 'Qualität',
'behoerde': 'Behörde',
'fachmedien': 'Fach',
'think-tank': 'Think Tank',
'international': 'Intl.',
'regional': 'Regional',
'sonstige': 'Sonstige',
},
/**
* Domain-Gruppe rendern (aufklappbar mit Feeds).
*/
renderSourceGroup(domain, feeds, isExcluded, excludedNotes) {
const catLabel = this._categoryLabels[feeds[0]?.category] || feeds[0]?.category || '';
const feedCount = feeds.filter(f => f.source_type !== 'excluded').length;
const hasMultiple = feedCount > 1;
const displayName = domain || feeds[0]?.name || 'Unbekannt';
const escapedDomain = this.escape(domain);
if (isExcluded) {
// Gesperrte Domain
const notesHtml = excludedNotes ? ` <span class="source-group-notes">${this.escape(excludedNotes)}</span>` : '';
return `<div class="source-group">
<div class="source-group-header excluded">
<div class="source-group-info">
<span class="source-group-name">${this.escape(displayName)}</span>${notesHtml}
</div>
<span class="source-excluded-badge">Gesperrt</span>
<div class="source-group-actions">
<button class="btn btn-small btn-secondary" onclick="App.unblockDomain('${escapedDomain}')">Entsperren</button>
<button class="source-delete-btn" onclick="App.deleteDomain('${escapedDomain}')" title="Löschen" aria-label="Löschen">&times;</button>
</div>
</div>
</div>`;
}
// Aktive Domain-Gruppe
const toggleAttr = hasMultiple ? `onclick="App.toggleGroup('${escapedDomain}')" role="button" tabindex="0" aria-expanded="false"` : '';
const toggleIcon = hasMultiple ? '<span class="source-group-toggle" aria-hidden="true">&#9654;</span>' : '<span class="source-group-toggle-placeholder"></span>';
let feedRows = '';
if (hasMultiple) {
const realFeeds = feeds.filter(f => f.source_type !== 'excluded');
feedRows = `<div class="source-group-feeds" data-domain="${escapedDomain}">`;
realFeeds.forEach((feed, i) => {
const isLast = i === realFeeds.length - 1;
const connector = isLast ? '\u2514\u2500' : '\u251C\u2500';
const typeLabel = feed.source_type === 'rss_feed' ? 'RSS' : 'Web';
const urlDisplay = feed.url ? this._shortenUrl(feed.url) : '';
feedRows += `<div class="source-feed-row">
<span class="source-feed-connector">${connector}</span>
<span class="source-feed-name">${this.escape(feed.name)}</span>
<span class="source-type-badge type-${feed.source_type}">${typeLabel}</span>
<span class="source-feed-url" title="${this.escape(feed.url || '')}">${this.escape(urlDisplay)}</span>
<button class="source-delete-btn" onclick="App.deleteSingleFeed(${feed.id})" title="Löschen" aria-label="Löschen">&times;</button>
</div>`;
});
feedRows += '</div>';
}
const feedCountBadge = feedCount > 0
? `<span class="source-feed-count">${feedCount} Feed${feedCount !== 1 ? 's' : ''}</span>`
: '';
return `<div class="source-group">
<div class="source-group-header" ${toggleAttr}>
${toggleIcon}
<div class="source-group-info">
<span class="source-group-name">${this.escape(displayName)}</span>
</div>
<span class="source-category-badge cat-${feeds[0]?.category || 'sonstige'}">${catLabel}</span>
${feedCountBadge}
<div class="source-group-actions" onclick="event.stopPropagation()">
<button class="btn btn-small btn-secondary" onclick="App.blockDomainDirect('${escapedDomain}')">Sperren</button>
<button class="source-delete-btn" onclick="App.deleteDomain('${escapedDomain}')" title="Löschen" aria-label="Löschen">&times;</button>
</div>
</div>
${feedRows}
</div>`;
},
/**
* URL kürzen für die Anzeige in Feed-Zeilen.
*/
_shortenUrl(url) {
try {
const u = new URL(url);
let path = u.pathname;
if (path.length > 40) path = path.substring(0, 37) + '...';
return u.hostname + path;
} catch {
return url.length > 50 ? url.substring(0, 47) + '...' : url;
}
},
/**
* URLs in Evidence-Text als kompakte Hostname-Chips rendern (Legacy-Fallback).
*/
renderEvidenceChips(text) {
return this.renderEvidence(text);
},
/**
* URLs in Evidence-Text als klickbare Links rendern (Legacy).
*/
linkifyEvidence(text) {
if (!text) return '';
const escaped = this.escape(text);
return escaped.replace(
/(https?:\/\/[^\s,)]+)/g,
'<a href="$1" target="_blank" rel="noopener">$1</a>'
);
},
/**
* HTML escapen.
*/
escape(str) {
if (!str) return '';
const div = document.createElement('div');
div.textContent = str;
return div.innerHTML;
},
};

282
src/static/js/layout.js Normale Datei
Datei anzeigen

@@ -0,0 +1,282 @@
/**
* LayoutManager: Drag & Resize Dashboard-Layout mit gridstack.js
* Persistenz über localStorage, Reset auf Standard-Layout möglich.
*/
const LayoutManager = {
_grid: null,
_storageKey: 'osint_layout',
_initialized: false,
_saveTimeout: null,
_hiddenTiles: {},
DEFAULT_LAYOUT: [
{ id: 'lagebild', x: 0, y: 0, w: 6, h: 4, minW: 4, minH: 4 },
{ id: 'faktencheck', x: 6, y: 0, w: 6, h: 4, minW: 4, minH: 4 },
{ id: 'quellen', x: 0, y: 4, w: 12, h: 2, minW: 6, minH: 2 },
{ id: 'timeline', x: 0, y: 5, w: 12, h: 4, minW: 6, minH: 4 },
],
TILE_MAP: {
lagebild: '.incident-analysis-summary',
faktencheck: '.incident-analysis-factcheck',
quellen: '.source-overview-card',
timeline: '.timeline-card',
},
init() {
if (this._initialized) return;
const container = document.querySelector('.grid-stack');
if (!container) return;
this._grid = GridStack.init({
column: 12,
cellHeight: 80,
margin: 12,
animate: true,
handle: '.card-header',
float: false,
disableOneColumnMode: true,
}, container);
const saved = this._load();
if (saved) {
this._applyLayout(saved);
}
this._grid.on('change', () => this._debouncedSave());
const toolbar = document.getElementById('layout-toolbar');
if (toolbar) toolbar.style.display = 'flex';
this._syncToggles();
this._initialized = true;
},
_applyLayout(layout) {
if (!this._grid) return;
this._hiddenTiles = {};
layout.forEach(item => {
const el = this._grid.engine.nodes.find(n => n.el && n.el.getAttribute('gs-id') === item.id);
if (!el) return;
if (item.visible === false) {
this._hiddenTiles[item.id] = item;
this._grid.removeWidget(el.el, true, false);
} else {
this._grid.update(el.el, { x: item.x, y: item.y, w: item.w, h: item.h });
}
});
this._syncToggles();
},
save() {
if (!this._grid) return;
const items = [];
this._grid.engine.nodes.forEach(node => {
const id = node.el ? node.el.getAttribute('gs-id') : null;
if (!id) return;
items.push({
id, x: node.x, y: node.y, w: node.w, h: node.h, visible: true,
});
});
Object.keys(this._hiddenTiles).forEach(id => {
items.push({ ...this._hiddenTiles[id], visible: false });
});
try {
localStorage.setItem(this._storageKey, JSON.stringify(items));
} catch (e) { /* quota */ }
},
_debouncedSave() {
clearTimeout(this._saveTimeout);
this._saveTimeout = setTimeout(() => this.save(), 300);
},
_load() {
try {
const raw = localStorage.getItem(this._storageKey);
if (!raw) return null;
const parsed = JSON.parse(raw);
if (!Array.isArray(parsed) || parsed.length === 0) return null;
return parsed;
} catch (e) {
return null;
}
},
toggleTile(tileId) {
if (!this._grid) return;
const selector = this.TILE_MAP[tileId];
if (!selector) return;
if (this._hiddenTiles[tileId]) {
// Kachel einblenden
const cfg = this._hiddenTiles[tileId];
delete this._hiddenTiles[tileId];
const cardEl = document.querySelector(selector);
if (!cardEl) return;
// Wrapper erstellen
const wrapper = document.createElement('div');
wrapper.className = 'grid-stack-item';
wrapper.setAttribute('gs-id', tileId);
wrapper.setAttribute('gs-x', cfg.x);
wrapper.setAttribute('gs-y', cfg.y);
wrapper.setAttribute('gs-w', cfg.w);
wrapper.setAttribute('gs-h', cfg.h);
wrapper.setAttribute('gs-min-w', cfg.minW || '');
wrapper.setAttribute('gs-min-h', cfg.minH || '');
const content = document.createElement('div');
content.className = 'grid-stack-item-content';
content.appendChild(cardEl);
wrapper.appendChild(content);
this._grid.addWidget(wrapper);
} else {
// Kachel ausblenden
const node = this._grid.engine.nodes.find(
n => n.el && n.el.getAttribute('gs-id') === tileId
);
if (!node) return;
const defaults = this.DEFAULT_LAYOUT.find(d => d.id === tileId);
this._hiddenTiles[tileId] = {
id: tileId,
x: node.x, y: node.y, w: node.w, h: node.h,
minW: defaults ? defaults.minW : 4,
minH: defaults ? defaults.minH : 2,
visible: false,
};
// Card aus dem Widget retten bevor es entfernt wird
const cardEl = node.el.querySelector(selector);
if (cardEl) {
// Temporär im incident-view parken (unsichtbar)
const parking = document.getElementById('tile-parking');
if (parking) parking.appendChild(cardEl);
}
this._grid.removeWidget(node.el, true, false);
}
this._syncToggles();
this.save();
},
_syncToggles() {
document.querySelectorAll('.layout-toggle-btn').forEach(btn => {
const tileId = btn.getAttribute('data-tile');
const isHidden = !!this._hiddenTiles[tileId];
btn.classList.toggle('active', !isHidden);
btn.setAttribute('aria-pressed', String(!isHidden));
});
},
reset() {
localStorage.removeItem(this._storageKey);
// Cards einsammeln BEVOR der Grid zerstört wird (aus Grid + Parking)
const cards = {};
Object.entries(this.TILE_MAP).forEach(([id, selector]) => {
const card = document.querySelector(selector);
if (card) cards[id] = card;
});
this._hiddenTiles = {};
if (this._grid) {
this._grid.destroy(false);
this._grid = null;
}
this._initialized = false;
const gridEl = document.querySelector('.grid-stack');
if (!gridEl) return;
// Grid leeren (Cards sind bereits in cards-Map gesichert)
gridEl.innerHTML = '';
// Cards in Default-Layout neu aufbauen
this.DEFAULT_LAYOUT.forEach(cfg => {
const cardEl = cards[cfg.id];
if (!cardEl) return;
const wrapper = document.createElement('div');
wrapper.className = 'grid-stack-item';
wrapper.setAttribute('gs-id', cfg.id);
wrapper.setAttribute('gs-x', cfg.x);
wrapper.setAttribute('gs-y', cfg.y);
wrapper.setAttribute('gs-w', cfg.w);
wrapper.setAttribute('gs-h', cfg.h);
wrapper.setAttribute('gs-min-w', cfg.minW);
wrapper.setAttribute('gs-min-h', cfg.minH);
const content = document.createElement('div');
content.className = 'grid-stack-item-content';
content.appendChild(cardEl);
wrapper.appendChild(content);
gridEl.appendChild(wrapper);
});
this.init();
},
resizeTileToContent(tileId) {
if (!this._grid) return;
const node = this._grid.engine.nodes.find(
n => n.el && n.el.getAttribute('gs-id') === tileId
);
if (!node || !node.el) return;
const wrapper = node.el.querySelector('.grid-stack-item-content');
if (!wrapper) return;
const card = wrapper.firstElementChild;
if (!card) return;
const cellH = this._grid.opts.cellHeight || 80;
const margin = this._grid.opts.margin || 12;
// Temporär alle height-Constraints aufheben
node.el.classList.add('gs-measuring');
const naturalHeight = card.scrollHeight;
node.el.classList.remove('gs-measuring');
// In Grid-Units umrechnen (aufrunden + 1 Puffer)
const neededH = Math.ceil(naturalHeight / (cellH + margin)) + 1;
const minH = node.minH || 2;
const finalH = Math.max(neededH, minH);
this._grid.update(node.el, { h: finalH });
this._debouncedSave();
},
resetAllTilesToDefault() {
if (!this._grid) return;
this.DEFAULT_LAYOUT.forEach(cfg => {
const node = this._grid.engine.nodes.find(
n => n.el && n.el.getAttribute('gs-id') === cfg.id
);
if (node) this._grid.update(node.el, { h: cfg.h });
});
},
destroy() {
if (this._grid) {
this._grid.destroy(false);
this._grid = null;
}
this._initialized = false;
this._hiddenTiles = {};
},
};

105
src/static/js/ws.js Normale Datei
Datei anzeigen

@@ -0,0 +1,105 @@
/**
* WebSocket-Client für Echtzeit-Updates.
*/
const WS = {
socket: null,
reconnectDelay: 2000,
maxReconnectDelay: 30000,
_handlers: {},
_pingInterval: null,
connect() {
const token = localStorage.getItem('osint_token');
if (!token) return;
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
const url = `${protocol}//${window.location.host}/api/ws`;
try {
this.socket = new WebSocket(url);
} catch (e) {
console.error('WebSocket-Verbindungsfehler:', e);
this._scheduleReconnect();
return;
}
this.socket.onopen = () => {
// Token als erste Nachricht senden (nicht in URL)
this.socket.send(token);
};
this.socket.onmessage = (event) => {
if (event.data === 'pong') return;
if (event.data === 'authenticated') {
console.log('WebSocket verbunden');
this.reconnectDelay = 2000;
this._startPing();
return;
}
try {
const msg = JSON.parse(event.data);
this._dispatch(msg);
} catch (e) {
console.error('WebSocket Parse-Fehler:', e);
}
};
this.socket.onclose = () => {
console.log('WebSocket getrennt');
this._stopPing();
this._scheduleReconnect();
};
this.socket.onerror = () => {};
},
disconnect() {
this._stopPing();
if (this.socket) {
this.socket.close();
this.socket = null;
}
},
on(type, handler) {
if (!this._handlers[type]) {
this._handlers[type] = [];
}
this._handlers[type].push(handler);
},
_dispatch(msg) {
const handlers = this._handlers[msg.type] || [];
handlers.forEach(h => h(msg));
// Globale Handler
const allHandlers = this._handlers['*'] || [];
allHandlers.forEach(h => h(msg));
},
_startPing() {
this._pingInterval = setInterval(() => {
if (this.socket && this.socket.readyState === WebSocket.OPEN) {
this.socket.send('ping');
}
}, 30000);
},
_stopPing() {
if (this._pingInterval) {
clearInterval(this._pingInterval);
this._pingInterval = null;
}
},
_scheduleReconnect() {
setTimeout(() => {
if (!this.socket || this.socket.readyState === WebSocket.CLOSED) {
this.connect();
}
}, this.reconnectDelay);
this.reconnectDelay = Math.min(this.reconnectDelay * 1.5, this.maxReconnectDelay);
},
};

272
src/static/layout.js Normale Datei
Datei anzeigen

@@ -0,0 +1,272 @@
/**
* LayoutManager: Drag & Resize Dashboard-Layout mit gridstack.js
* Persistenz über localStorage, Reset auf Standard-Layout möglich.
*/
const LayoutManager = {
_grid: null,
_storageKey: 'osint_layout',
_initialized: false,
_saveTimeout: null,
_hiddenTiles: {},
DEFAULT_LAYOUT: [
{ id: 'lagebild', x: 0, y: 0, w: 6, h: 4, minW: 4, minH: 4 },
{ id: 'faktencheck', x: 6, y: 0, w: 6, h: 4, minW: 4, minH: 4 },
{ id: 'quellen', x: 0, y: 4, w: 12, h: 2, minW: 6, minH: 2 },
{ id: 'timeline', x: 0, y: 5, w: 12, h: 4, minW: 6, minH: 4 },
],
TILE_MAP: {
lagebild: '.incident-analysis-summary',
faktencheck: '.incident-analysis-factcheck',
quellen: '.source-overview-card',
timeline: '.timeline-card',
},
init() {
if (this._initialized) return;
const container = document.querySelector('.grid-stack');
if (!container) return;
this._grid = GridStack.init({
column: 12,
cellHeight: 80,
margin: 12,
animate: true,
handle: '.card-header',
float: false,
disableOneColumnMode: true,
}, container);
const saved = this._load();
if (saved) {
this._applyLayout(saved);
}
this._grid.on('change', () => this._debouncedSave());
const toolbar = document.getElementById('layout-toolbar');
if (toolbar) toolbar.style.display = 'flex';
this._syncToggles();
this._initialized = true;
},
_applyLayout(layout) {
if (!this._grid) return;
this._hiddenTiles = {};
layout.forEach(item => {
const el = this._grid.engine.nodes.find(n => n.el && n.el.getAttribute('gs-id') === item.id);
if (!el) return;
if (item.visible === false) {
this._hiddenTiles[item.id] = item;
this._grid.removeWidget(el.el, true, false);
} else {
this._grid.update(el.el, { x: item.x, y: item.y, w: item.w, h: item.h });
}
});
this._syncToggles();
},
save() {
if (!this._grid) return;
const items = [];
this._grid.engine.nodes.forEach(node => {
const id = node.el ? node.el.getAttribute('gs-id') : null;
if (!id) return;
items.push({
id, x: node.x, y: node.y, w: node.w, h: node.h, visible: true,
});
});
Object.keys(this._hiddenTiles).forEach(id => {
items.push({ ...this._hiddenTiles[id], visible: false });
});
try {
localStorage.setItem(this._storageKey, JSON.stringify(items));
} catch (e) { /* quota */ }
},
_debouncedSave() {
clearTimeout(this._saveTimeout);
this._saveTimeout = setTimeout(() => this.save(), 300);
},
_load() {
try {
const raw = localStorage.getItem(this._storageKey);
if (!raw) return null;
const parsed = JSON.parse(raw);
if (!Array.isArray(parsed) || parsed.length === 0) return null;
return parsed;
} catch (e) {
return null;
}
},
toggleTile(tileId) {
if (!this._grid) return;
const selector = this.TILE_MAP[tileId];
if (!selector) return;
if (this._hiddenTiles[tileId]) {
// Kachel einblenden
const cfg = this._hiddenTiles[tileId];
delete this._hiddenTiles[tileId];
const cardEl = document.querySelector(selector);
if (!cardEl) return;
// Wrapper erstellen
const wrapper = document.createElement('div');
wrapper.className = 'grid-stack-item';
wrapper.setAttribute('gs-id', tileId);
wrapper.setAttribute('gs-x', cfg.x);
wrapper.setAttribute('gs-y', cfg.y);
wrapper.setAttribute('gs-w', cfg.w);
wrapper.setAttribute('gs-h', cfg.h);
wrapper.setAttribute('gs-min-w', cfg.minW || '');
wrapper.setAttribute('gs-min-h', cfg.minH || '');
const content = document.createElement('div');
content.className = 'grid-stack-item-content';
content.appendChild(cardEl);
wrapper.appendChild(content);
this._grid.addWidget(wrapper);
} else {
// Kachel ausblenden
const node = this._grid.engine.nodes.find(
n => n.el && n.el.getAttribute('gs-id') === tileId
);
if (!node) return;
const defaults = this.DEFAULT_LAYOUT.find(d => d.id === tileId);
this._hiddenTiles[tileId] = {
id: tileId,
x: node.x, y: node.y, w: node.w, h: node.h,
minW: defaults ? defaults.minW : 4,
minH: defaults ? defaults.minH : 2,
visible: false,
};
// Card aus dem Widget retten bevor es entfernt wird
const cardEl = node.el.querySelector(selector);
if (cardEl) {
// Temporär im incident-view parken (unsichtbar)
const parking = document.getElementById('tile-parking');
if (parking) parking.appendChild(cardEl);
}
this._grid.removeWidget(node.el, true, false);
}
this._syncToggles();
this.save();
},
_syncToggles() {
document.querySelectorAll('.layout-toggle-btn').forEach(btn => {
const tileId = btn.getAttribute('data-tile');
const isHidden = !!this._hiddenTiles[tileId];
btn.classList.toggle('active', !isHidden);
btn.setAttribute('aria-pressed', String(!isHidden));
});
},
reset() {
localStorage.removeItem(this._storageKey);
// Cards einsammeln BEVOR der Grid zerstört wird (aus Grid + Parking)
const cards = {};
Object.entries(this.TILE_MAP).forEach(([id, selector]) => {
const card = document.querySelector(selector);
if (card) cards[id] = card;
});
this._hiddenTiles = {};
if (this._grid) {
this._grid.destroy(false);
this._grid = null;
}
this._initialized = false;
const gridEl = document.querySelector('.grid-stack');
if (!gridEl) return;
// Grid leeren (Cards sind bereits in cards-Map gesichert)
gridEl.innerHTML = '';
// Cards in Default-Layout neu aufbauen
this.DEFAULT_LAYOUT.forEach(cfg => {
const cardEl = cards[cfg.id];
if (!cardEl) return;
const wrapper = document.createElement('div');
wrapper.className = 'grid-stack-item';
wrapper.setAttribute('gs-id', cfg.id);
wrapper.setAttribute('gs-x', cfg.x);
wrapper.setAttribute('gs-y', cfg.y);
wrapper.setAttribute('gs-w', cfg.w);
wrapper.setAttribute('gs-h', cfg.h);
wrapper.setAttribute('gs-min-w', cfg.minW);
wrapper.setAttribute('gs-min-h', cfg.minH);
const content = document.createElement('div');
content.className = 'grid-stack-item-content';
content.appendChild(cardEl);
wrapper.appendChild(content);
gridEl.appendChild(wrapper);
});
this.init();
},
resizeTileToContent(tileId) {
if (!this._grid) return;
const node = this._grid.engine.nodes.find(
n => n.el && n.el.getAttribute('gs-id') === tileId
);
if (!node || !node.el) return;
const wrapper = node.el.querySelector('.grid-stack-item-content');
if (!wrapper) return;
const card = wrapper.firstElementChild;
if (!card) return;
const cellH = this._grid.opts.cellHeight || 80;
const margin = this._grid.opts.margin || 12;
// Temporär alle height-Constraints aufheben
node.el.classList.add('gs-measuring');
const naturalHeight = card.scrollHeight;
node.el.classList.remove('gs-measuring');
// In Grid-Units umrechnen (aufrunden + 1 Puffer)
const neededH = Math.ceil(naturalHeight / (cellH + margin)) + 1;
const minH = node.minH || 2;
const finalH = Math.max(neededH, minH);
this._grid.update(node.el, { h: finalH });
this._debouncedSave();
},
destroy() {
if (this._grid) {
this._grid.destroy(false);
this._grid = null;
}
this._initialized = false;
this._hiddenTiles = {};
},
};

3653
src/static/style.css Normale Datei

Datei-Diff unterdrückt, da er zu groß ist Diff laden