From d0f99f4e5b1d75bff34032dacc92e8f0cbeb59e6 Mon Sep 17 00:00:00 2001
From: Claude Dev
Date: Wed, 25 Mar 2026 23:50:57 +0100
Subject: [PATCH] Export: Klassifizierung (offen/dienstgebrauch/vertraulich)
komplett entfernt
---
src/report_generator.py | 22 +-
src/report_generator.py.bak | 485 ++++
src/report_templates/report.html | 12 -
src/report_templates/report.html.bak | 199 ++
src/routers/incidents.py | 5 +-
src/routers/incidents.py.bak | 708 ++++++
src/static/dashboard.html | 8 -
src/static/dashboard.html.bak | 700 ++++++
src/static/js/api.js | 4 +-
src/static/js/api.js.bak | 237 ++
src/static/js/app.js | 3 +-
src/static/js/app.js.bak | 3298 ++++++++++++++++++++++++++
12 files changed, 5634 insertions(+), 47 deletions(-)
create mode 100644 src/report_generator.py.bak
create mode 100644 src/report_templates/report.html.bak
create mode 100644 src/routers/incidents.py.bak
create mode 100644 src/static/dashboard.html.bak
create mode 100644 src/static/js/api.js.bak
create mode 100644 src/static/js/app.js.bak
diff --git a/src/report_generator.py b/src/report_generator.py
index f472f5a..1a161ec 100644
--- a/src/report_generator.py
+++ b/src/report_generator.py
@@ -22,11 +22,6 @@ logger = logging.getLogger("osint.report")
TEMPLATE_DIR = Path(__file__).parent / "report_templates"
LOGO_PATH = Path(__file__).parent / "static" / "favicon.svg"
-CLASSIFICATION_LABELS = {
- "offen": "Offen",
- "dienstgebrauch": "Nur für den Dienstgebrauch",
- "vertraulich": "Vertraulich",
-}
FC_STATUS_LABELS = {
"confirmed": "Bestätigt",
@@ -250,7 +245,7 @@ LAGEBILD:
async def generate_pdf(
incident: dict, articles: list, fact_checks: list, snapshots: list,
- scope: str, classification: str, creator: str, executive_summary_html: str,
+ scope: str, creator: str, executive_summary_html: str,
) -> bytes:
"""PDF-Report via WeasyPrint generieren."""
env = Environment(loader=FileSystemLoader(str(TEMPLATE_DIR)))
@@ -262,8 +257,6 @@ async def generate_pdf(
html_content = template.render(
incident=incident,
incident_type_label=incident_type_label,
- classification=classification,
- classification_label=CLASSIFICATION_LABELS.get(classification, classification),
report_date=now.strftime("%d.%m.%Y, %H:%M Uhr"),
creator=creator,
logo_base64=_get_logo_base64(),
@@ -298,7 +291,7 @@ async def generate_pdf(
async def generate_docx(
incident: dict, articles: list, fact_checks: list, snapshots: list,
- scope: str, classification: str, creator: str, executive_summary_text: str,
+ scope: str, creator: str, executive_summary_text: str,
) -> bytes:
"""Word-Report via python-docx generieren."""
doc = Document()
@@ -342,17 +335,6 @@ async def generate_docx(
run.font.color.rgb = RGBColor(0x66, 0x66, 0x66)
doc.add_paragraph()
-
- # Klassifizierung
- class_label = CLASSIFICATION_LABELS.get(classification, classification)
- class_para = doc.add_paragraph()
- class_para.alignment = WD_ALIGN_PARAGRAPH.CENTER
- run = class_para.add_run(f"— {class_label} —")
- run.font.size = Pt(11)
- run.font.bold = True
- colors = {"offen": RGBColor(0x22, 0xc5, 0x5e), "dienstgebrauch": RGBColor(0xf0, 0xb4, 0x29), "vertraulich": RGBColor(0xef, 0x44, 0x44)}
- run.font.color.rgb = colors.get(classification, RGBColor(0x88, 0x88, 0x88))
-
for _ in range(3):
doc.add_paragraph()
diff --git a/src/report_generator.py.bak b/src/report_generator.py.bak
new file mode 100644
index 0000000..f472f5a
--- /dev/null
+++ b/src/report_generator.py.bak
@@ -0,0 +1,485 @@
+"""Report-Generator: PDF und Word Berichte aus Lage-Daten."""
+import base64
+import io
+import json
+import logging
+import re
+from collections import defaultdict
+from datetime import datetime
+from pathlib import Path
+
+from jinja2 import Environment, FileSystemLoader
+from weasyprint import HTML
+from docx import Document
+from docx.shared import Inches, Pt, Cm, RGBColor
+from docx.enum.text import WD_ALIGN_PARAGRAPH
+from docx.enum.table import WD_TABLE_ALIGNMENT
+
+from config import TIMEZONE, CLAUDE_MODEL_FAST
+
+logger = logging.getLogger("osint.report")
+
+TEMPLATE_DIR = Path(__file__).parent / "report_templates"
+LOGO_PATH = Path(__file__).parent / "static" / "favicon.svg"
+
+CLASSIFICATION_LABELS = {
+ "offen": "Offen",
+ "dienstgebrauch": "Nur für den Dienstgebrauch",
+ "vertraulich": "Vertraulich",
+}
+
+FC_STATUS_LABELS = {
+ "confirmed": "Bestätigt",
+ "unconfirmed": "Unbestätigt",
+ "disputed": "Umstritten",
+ "false": "Falsch",
+}
+
+
+def _get_logo_base64() -> str:
+ """Logo als Base64 für HTML-Embedding."""
+ try:
+ return base64.b64encode(LOGO_PATH.read_bytes()).decode()
+ except Exception:
+ return ""
+
+
+def _prepare_sources(incident: dict) -> list:
+ """Quellenverzeichnis aus sources_json parsen."""
+ raw = incident.get("sources_json")
+ if not raw:
+ return []
+ try:
+ return json.loads(raw) if isinstance(raw, str) else raw
+ except (json.JSONDecodeError, TypeError):
+ return []
+
+
+def _prepare_source_stats(articles: list) -> list:
+ """Quellenstatistik: Artikel pro Quelle + Sprachen."""
+ source_map = defaultdict(lambda: {"count": 0, "langs": set()})
+ for art in articles:
+ name = art.get("source") or "Unbekannt"
+ source_map[name]["count"] += 1
+ source_map[name]["langs"].add((art.get("language") or "de").upper())
+ stats = []
+ for name, data in sorted(source_map.items(), key=lambda x: -x[1]["count"]):
+ stats.append({"name": name, "count": data["count"], "languages": ", ".join(sorted(data["langs"]))})
+ return stats
+
+
+def _prepare_fact_checks(fact_checks: list) -> list:
+ """Faktenchecks mit Label aufbereiten."""
+ result = []
+ for fc in fact_checks:
+ fc_copy = dict(fc)
+ fc_copy["status_label"] = FC_STATUS_LABELS.get(fc.get("status", ""), fc.get("status", "Unbekannt"))
+ result.append(fc_copy)
+ return result
+
+
+def _prepare_timeline(articles: list) -> list:
+ """Timeline aus Artikeln: sortiert nach Datum."""
+ timeline = []
+ for art in articles:
+ pub = art.get("published_at") or art.get("collected_at") or ""
+ headline = art.get("headline_de") or art.get("headline") or "Ohne Titel"
+ source = art.get("source") or ""
+ if pub:
+ try:
+ dt = datetime.fromisoformat(pub.replace("Z", "+00:00"))
+ date_str = dt.strftime("%d.%m.%Y %H:%M")
+ except Exception:
+ date_str = pub[:16]
+ else:
+ date_str = ""
+ timeline.append({"date": date_str, "headline": headline, "source": source, "sort_key": pub})
+ timeline.sort(key=lambda x: x["sort_key"], reverse=True)
+ return timeline[:100] # Max 100 Einträge
+
+
+def _markdown_to_html(text: str) -> str:
+ """Einfache Markdown -> HTML Konvertierung für Lagebild."""
+ if not text:
+ return "Kein Lagebild verfügbar.
"
+ # Basic Markdown -> HTML
+ html = text
+ # Headlines
+ html = re.sub(r'^### (.+)$', r'\1 ', html, flags=re.MULTILINE)
+ html = re.sub(r'^## (.+)$', r'\1 ', html, flags=re.MULTILINE)
+ # Bold
+ html = re.sub(r'\*\*(.+?)\*\*', r'\1 ', html)
+ # Links [text](url)
+ html = re.sub(r'\[([^\]]+)\]\(([^)]+)\)', r'\1 ', html)
+ # Bullet lists
+ html = re.sub(r'^- (.+)$', r'\1 ', html, flags=re.MULTILINE)
+ html = re.sub(r'(.* \n?)+', lambda m: '', html)
+ # Paragraphs
+ paragraphs = html.split('\n\n')
+ result = []
+ for p in paragraphs:
+ p = p.strip()
+ if not p:
+ continue
+ if p.startswith('{p}
')
+ return '\n'.join(result)
+
+
+def _truncate_lagebild(summary_text: str, max_chars: int = 4000) -> str:
+ """Lagebild für den Lagebericht auf die Zusammenfassung kürzen.
+
+ Nimmt nur den ersten Abschnitt (bis zur zweiten H2/H3-Überschrift)
+ oder kürzt auf max_chars Zeichen mit sauberem Abbruch am Absatzende.
+ """
+ if not summary_text or len(summary_text) <= max_chars:
+ return summary_text
+
+ lines = summary_text.split("\n")
+ result_lines = []
+ heading_count = 0
+ char_count = 0
+
+ for line in lines:
+ stripped = line.strip()
+ # Zähle Überschriften (## oder ###)
+ if stripped.startswith("## ") or stripped.startswith("### "):
+ heading_count += 1
+ # Nach der 3. Überschrift abbrechen (= 2 Abschnitte)
+ if heading_count > 3:
+ break
+
+ result_lines.append(line)
+ char_count += len(line) + 1
+
+ # Hard-Limit bei max_chars, aber am Absatzende abbrechen
+ if char_count > max_chars and stripped == "":
+ break
+
+ text = "\n".join(result_lines).rstrip()
+ if len(text) < len(summary_text) - 100:
+ text += "\n\n*[Vollständiges Lagebild im Vollständigen Bericht]*"
+ return text
+
+
+def _strip_citation_numbers(text: str) -> str:
+ """Entfernt [1234]-Quellenreferenzen aus dem Text."""
+ # Einzelne Referenzen: [1302]
+ text = re.sub(r"\s*\[\d{1,5}\]", "", text)
+ # Mehrfach-Referenzen: [725][765][768]
+ text = re.sub(r"(\[\d{1,5}\]){2,}", "", text)
+ # Aufräumen: Doppelte Leerzeichen
+ text = re.sub(r" +", " ", text)
+ return text
+
+
+
+async def generate_executive_summary(summary_text: str) -> str:
+ """KI-verdichtetes Executive Summary aus dem Lagebild."""
+ if not summary_text or len(summary_text.strip()) < 50:
+ return "
diff --git a/src/report_templates/report.html.bak b/src/report_templates/report.html.bak
new file mode 100644
index 0000000..50da798
--- /dev/null
+++ b/src/report_templates/report.html.bak
@@ -0,0 +1,199 @@
+
+
+
+
+
+
+
+
+
+
+
{{ incident_type_label }}
+
{{ incident.title }}
+
+
{{ classification_label }}
+
+
AegisSight Monitor
+
+
+
+
{{ classification_label }}
+
+
+
+
Executive Summary
+
+ {{ executive_summary | safe }}
+
+
+
+{% if scope in ('report', 'full') %}
+
+{% if fact_checks %}
+
+
Faktencheck
+
+ Behauptung Status Quellen
+
+ {% for fc in fact_checks %}
+
+ {{ fc.claim or '' }}
+ {{ fc.status_label }}
+ {{ fc.sources_count or 0 }}
+
+ {% endfor %}
+
+
+
+{% endif %}
+
+
+{% if source_stats %}
+
+
Quellenstatistik
+
+ Quelle Artikel Sprache
+
+ {% for stat in source_stats %}
+ {{ stat.name }} {{ stat.count }} {{ stat.languages }}
+ {% endfor %}
+
+
+
+
+
+
Lagebild
+ {% if lagebild_timestamp %}
Aktualisiert: {{ lagebild_timestamp }}
{% endif %}
+
{{ lagebild_html | safe }}
+
+
+
+{% if sources %}
+
+
Quellenverzeichnis
+
+ # Quelle URL
+
+ {% for src in sources %}
+ {{ loop.index }} {{ src.name or src.title or '' }} {{ src.url or '' }}
+ {% endfor %}
+
+
+
+{% endif %}
+
+{% endif %}
+{% endif %}
+
+{% if scope == 'full' %}
+
+{% if timeline %}
+
+
Ereignis-Timeline
+ {% for event in timeline %}
+
+
{{ event.date }}
+
{{ event.headline }}
+
{{ event.source }}
+
+ {% endfor %}
+
+{% endif %}
+
+
+{% if articles %}
+
+
Artikelverzeichnis ({{ articles | length }} Artikel)
+
+ Headline Quelle Sprache Datum
+
+ {% for art in articles %}
+
+ {{ art.headline_de or art.headline or 'Ohne Titel' }}
+ {{ art.source or '' }}
+ {{ (art.language or 'de') | upper }}
+ {{ art.pub_date }}
+
+ {% endfor %}
+
+
+
+{% endif %}
+{% endif %}
+
+
+
+
diff --git a/src/routers/incidents.py b/src/routers/incidents.py
index fed0d20..8b2d8b6 100644
--- a/src/routers/incidents.py
+++ b/src/routers/incidents.py
@@ -635,7 +635,6 @@ async def export_incident(
incident_id: int,
format: str = Query("pdf", pattern="^(pdf|docx)$"),
scope: str = Query("report", pattern="^(summary|report|full)$"),
- classification: str = Query("offen", pattern="^(offen|dienstgebrauch|vertraulich)$"),
current_user: dict = Depends(get_current_user),
db: aiosqlite.Connection = Depends(db_dependency),
):
@@ -690,7 +689,7 @@ async def export_incident(
scope_labels = {"summary": "executive_summary", "report": "lagebericht", "full": "vollstaendig"}
if format == "pdf":
- pdf_bytes = await generate_pdf(incident, articles, fact_checks, snapshots, scope, classification, creator, exec_summary)
+ pdf_bytes = await generate_pdf(incident, articles, fact_checks, snapshots, scope, creator, exec_summary)
filename = f"{slug}_{scope_labels[scope]}_{date_str}.pdf"
return StreamingResponse(
io.BytesIO(pdf_bytes),
@@ -698,7 +697,7 @@ async def export_incident(
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
else:
- docx_bytes = await generate_docx(incident, articles, fact_checks, snapshots, scope, classification, creator, exec_summary)
+ docx_bytes = await generate_docx(incident, articles, fact_checks, snapshots, scope, creator, exec_summary)
filename = f"{slug}_{scope_labels[scope]}_{date_str}.docx"
return StreamingResponse(
io.BytesIO(docx_bytes),
diff --git a/src/routers/incidents.py.bak b/src/routers/incidents.py.bak
new file mode 100644
index 0000000..fed0d20
--- /dev/null
+++ b/src/routers/incidents.py.bak
@@ -0,0 +1,708 @@
+"""Incidents-Router: Lagen verwalten (Multi-Tenant)."""
+from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query, status
+from fastapi.responses import StreamingResponse
+from models import IncidentCreate, IncidentUpdate, IncidentResponse, SubscriptionUpdate, SubscriptionResponse
+from auth import get_current_user
+from middleware.license_check import require_writable_license
+from database import db_dependency, get_db
+from datetime import datetime
+from config import TIMEZONE
+import asyncio
+import aiosqlite
+import io
+import json
+import logging
+import re
+import unicodedata
+
+_geoparse_logger = logging.getLogger("osint.geoparse_bg")
+
+router = APIRouter(prefix="/api/incidents", tags=["incidents"])
+
+INCIDENT_UPDATE_COLUMNS = {
+ "title", "description", "type", "status", "refresh_mode",
+ "refresh_interval", "retention_days", "international_sources", "include_telegram", "visibility",
+}
+
+
+async def _check_incident_access(
+ db: aiosqlite.Connection, incident_id: int, user_id: int, tenant_id: int
+) -> aiosqlite.Row:
+ """Lage laden und Zugriff pruefen (Tenant + Sichtbarkeit)."""
+ cursor = await db.execute(
+ "SELECT * FROM incidents WHERE id = ? AND tenant_id = ?",
+ (incident_id, tenant_id),
+ )
+ row = await cursor.fetchone()
+ if not row:
+ raise HTTPException(status_code=404, detail="Lage nicht gefunden")
+ if row["visibility"] == "private" and row["created_by"] != user_id:
+ raise HTTPException(status_code=403, detail="Kein Zugriff auf private Lage")
+ return row
+
+
+async def _enrich_incident(db: aiosqlite.Connection, row: aiosqlite.Row) -> dict:
+ """Incident-Row mit Statistiken und Ersteller-Name anreichern."""
+ incident = dict(row)
+ cursor = await db.execute(
+ "SELECT COUNT(*) as cnt FROM articles WHERE incident_id = ?",
+ (incident["id"],),
+ )
+ article_count = (await cursor.fetchone())["cnt"]
+
+ cursor = await db.execute(
+ "SELECT COUNT(DISTINCT source) as cnt FROM articles WHERE incident_id = ?",
+ (incident["id"],),
+ )
+ source_count = (await cursor.fetchone())["cnt"]
+
+ cursor = await db.execute(
+ "SELECT email FROM users WHERE id = ?",
+ (incident["created_by"],),
+ )
+ user_row = await cursor.fetchone()
+
+ incident["article_count"] = article_count
+ incident["source_count"] = source_count
+ incident["created_by_username"] = user_row["email"] if user_row else "Unbekannt"
+
+ return incident
+
+
+@router.get("", response_model=list[IncidentResponse])
+async def list_incidents(
+ status_filter: str = None,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Alle Lagen des Tenants auflisten (oeffentliche + eigene private)."""
+ tenant_id = current_user.get("tenant_id")
+ user_id = current_user["id"]
+
+ query = "SELECT * FROM incidents WHERE tenant_id = ? AND (visibility = 'public' OR created_by = ?)"
+ params = [tenant_id, user_id]
+
+ if status_filter:
+ query += " AND status = ?"
+ params.append(status_filter)
+
+ query += " ORDER BY updated_at DESC"
+ cursor = await db.execute(query, params)
+ rows = await cursor.fetchall()
+
+ results = []
+ for row in rows:
+ results.append(await _enrich_incident(db, row))
+ return results
+
+
+@router.post("", response_model=IncidentResponse, status_code=status.HTTP_201_CREATED)
+async def create_incident(
+ data: IncidentCreate,
+ current_user: dict = Depends(require_writable_license),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Neue Lage anlegen."""
+ tenant_id = current_user.get("tenant_id")
+ now = datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')
+ cursor = await db.execute(
+ """INSERT INTO incidents (title, description, type, refresh_mode, refresh_interval,
+ retention_days, international_sources, include_telegram, visibility,
+ tenant_id, created_by, created_at, updated_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
+ (
+ data.title,
+ data.description,
+ data.type,
+ data.refresh_mode,
+ data.refresh_interval,
+ data.retention_days,
+ 1 if data.international_sources else 0,
+ 1 if data.include_telegram else 0,
+ data.visibility,
+ tenant_id,
+ current_user["id"],
+ now,
+ now,
+ ),
+ )
+ await db.commit()
+
+ cursor = await db.execute("SELECT * FROM incidents WHERE id = ?", (cursor.lastrowid,))
+ row = await cursor.fetchone()
+ return await _enrich_incident(db, row)
+
+
+@router.get("/refreshing")
+async def get_refreshing_incidents(
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Gibt IDs aller Lagen mit laufendem Refresh zurueck (nur eigener Tenant)."""
+ tenant_id = current_user.get("tenant_id")
+ cursor = await db.execute(
+ """SELECT rl.incident_id, rl.started_at FROM refresh_log rl
+ JOIN incidents i ON i.id = rl.incident_id
+ WHERE rl.status = 'running'
+ AND i.tenant_id = ?
+ AND (i.visibility = 'public' OR i.created_by = ?)""",
+ (tenant_id, current_user["id"]),
+ )
+ rows = await cursor.fetchall()
+ return {
+ "refreshing": [row["incident_id"] for row in rows],
+ "details": {str(row["incident_id"]): {"started_at": row["started_at"]} for row in rows},
+ }
+
+
+@router.get("/{incident_id}", response_model=IncidentResponse)
+async def get_incident(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Einzelne Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ return await _enrich_incident(db, row)
+
+
+@router.put("/{incident_id}", response_model=IncidentResponse)
+async def update_incident(
+ incident_id: int,
+ data: IncidentUpdate,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Lage aktualisieren."""
+ tenant_id = current_user.get("tenant_id")
+ row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+
+ updates = {}
+ for field, value in data.model_dump(exclude_none=True).items():
+ if field not in INCIDENT_UPDATE_COLUMNS:
+ continue
+ if field in ("international_sources", "include_telegram"):
+ updates[field] = 1 if value else 0
+ else:
+ updates[field] = value
+
+ if not updates:
+ return await _enrich_incident(db, row)
+
+ updates["updated_at"] = datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')
+ set_clause = ", ".join(f"{k} = ?" for k in updates)
+ values = list(updates.values()) + [incident_id]
+
+ await db.execute(f"UPDATE incidents SET {set_clause} WHERE id = ?", values)
+ await db.commit()
+
+ cursor = await db.execute("SELECT * FROM incidents WHERE id = ?", (incident_id,))
+ row = await cursor.fetchone()
+ return await _enrich_incident(db, row)
+
+
+@router.delete("/{incident_id}", status_code=status.HTTP_204_NO_CONTENT)
+async def delete_incident(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Lage loeschen (nur Ersteller)."""
+ tenant_id = current_user.get("tenant_id")
+ cursor = await db.execute(
+ "SELECT id, created_by FROM incidents WHERE id = ? AND tenant_id = ?",
+ (incident_id, tenant_id),
+ )
+ incident = await cursor.fetchone()
+ if not incident:
+ raise HTTPException(status_code=404, detail="Lage nicht gefunden")
+
+ if incident["created_by"] != current_user["id"]:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Nur der Ersteller kann diese Lage loeschen",
+ )
+
+ try:
+ await db.execute("DELETE FROM incidents WHERE id = ?", (incident_id,))
+ await db.commit()
+ except Exception as e:
+ if "database is locked" in str(e):
+ raise HTTPException(
+ status_code=status.HTTP_409_CONFLICT,
+ detail="Datenbank ist momentan beschaeftigt. Bitte in wenigen Sekunden erneut versuchen.",
+ )
+ raise
+
+
+@router.get("/{incident_id}/articles")
+async def get_articles(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Alle Artikel einer Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ "SELECT * FROM articles WHERE incident_id = ? ORDER BY collected_at DESC",
+ (incident_id,),
+ )
+ rows = await cursor.fetchall()
+ return [dict(row) for row in rows]
+
+
+@router.get("/{incident_id}/snapshots")
+async def get_snapshots(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Lageberichte (Snapshots) einer Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ """SELECT id, incident_id, summary, sources_json,
+ article_count, fact_check_count, created_at
+ FROM incident_snapshots WHERE incident_id = ?
+ ORDER BY created_at DESC""",
+ (incident_id,),
+ )
+ rows = await cursor.fetchall()
+ return [dict(row) for row in rows]
+
+
+@router.get("/{incident_id}/factchecks")
+async def get_factchecks(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Faktenchecks einer Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ "SELECT * FROM fact_checks WHERE incident_id = ? ORDER BY checked_at DESC",
+ (incident_id,),
+ )
+ rows = await cursor.fetchall()
+ return [dict(row) for row in rows]
+
+
+@router.get("/{incident_id}/locations")
+async def get_locations(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Geografische Orte einer Lage abrufen (aggregiert nach Ort)."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ """SELECT al.location_name, al.location_name_normalized, al.country_code,
+ al.latitude, al.longitude, al.confidence, al.category,
+ a.id as article_id, a.headline, a.headline_de, a.source, a.source_url
+ FROM article_locations al
+ JOIN articles a ON a.id = al.article_id
+ WHERE al.incident_id = ?
+ ORDER BY al.location_name_normalized, a.collected_at DESC""",
+ (incident_id,),
+ )
+ rows = await cursor.fetchall()
+
+ # Aggregierung nach normalisiertem Ortsnamen + Koordinaten
+ loc_map = {}
+ for row in rows:
+ row = dict(row)
+ key = (row["location_name_normalized"] or row["location_name"], round(row["latitude"], 2), round(row["longitude"], 2))
+ if key not in loc_map:
+ loc_map[key] = {
+ "location_name": row["location_name_normalized"] or row["location_name"],
+ "lat": row["latitude"],
+ "lon": row["longitude"],
+ "country_code": row["country_code"],
+ "confidence": row["confidence"],
+ "article_count": 0,
+ "articles": [],
+ "categories": {},
+ }
+ loc_map[key]["article_count"] += 1
+ cat = row["category"] or "mentioned"
+ loc_map[key]["categories"][cat] = loc_map[key]["categories"].get(cat, 0) + 1
+ # Maximal 10 Artikel pro Ort mitliefern
+ if len(loc_map[key]["articles"]) < 10:
+ loc_map[key]["articles"].append({
+ "id": row["article_id"],
+ "headline": row["headline_de"] or row["headline"],
+ "source": row["source"],
+ "source_url": row["source_url"],
+ })
+
+ # Dominanteste Kategorie pro Ort bestimmen (Prioritaet: primary > secondary > tertiary > mentioned)
+ priority = {"primary": 4, "secondary": 3, "tertiary": 2, "mentioned": 1}
+ result = []
+ for loc in loc_map.values():
+ cats = loc.pop("categories")
+ if cats:
+ best_cat = max(cats, key=lambda c: (priority.get(c, 0), cats[c]))
+ else:
+ best_cat = "mentioned"
+ loc["category"] = best_cat
+ result.append(loc)
+
+ # Category-Labels aus Incident laden
+ cursor = await db.execute(
+ "SELECT category_labels FROM incidents WHERE id = ?", (incident_id,)
+ )
+ inc_row = await cursor.fetchone()
+ category_labels = None
+ if inc_row and inc_row["category_labels"]:
+ try:
+ category_labels = json.loads(inc_row["category_labels"])
+ except (json.JSONDecodeError, TypeError):
+ pass
+
+ return {"category_labels": category_labels, "locations": result}
+
+
+# Geoparse-Status pro Incident (in-memory)
+_geoparse_status: dict[int, dict] = {}
+
+
+async def _run_geoparse_background(incident_id: int, tenant_id: int | None):
+ """Hintergrund-Task: Geoparsing fuer alle Artikel einer Lage."""
+ _geoparse_status[incident_id] = {"status": "running", "processed": 0, "total": 0, "locations": 0}
+ db = None
+ try:
+ from agents.geoparsing import geoparse_articles
+ db = await get_db()
+
+ # Incident-Kontext fuer Haiku laden
+ cursor = await db.execute(
+ "SELECT title, description FROM incidents WHERE id = ?", (incident_id,)
+ )
+ inc_row = await cursor.fetchone()
+ incident_context = ""
+ if inc_row:
+ incident_context = f"{inc_row['title']} - {inc_row['description'] or ''}"
+
+ cursor = await db.execute(
+ """SELECT a.* FROM articles a
+ WHERE a.incident_id = ?
+ AND a.id NOT IN (SELECT DISTINCT article_id FROM article_locations WHERE incident_id = ?)""",
+ (incident_id, incident_id),
+ )
+ articles = [dict(row) for row in await cursor.fetchall()]
+
+ if not articles:
+ _geoparse_status[incident_id] = {"status": "done", "processed": 0, "total": 0, "locations": 0}
+ return
+
+ total = len(articles)
+ _geoparse_status[incident_id]["total"] = total
+ _geoparse_logger.info(f"Geoparsing Hintergrund: {total} Artikel fuer Lage {incident_id}")
+
+ # In Batches verarbeiten (50 Artikel pro Batch)
+ batch_size = 50
+ geo_count = 0
+ processed = 0
+ for i in range(0, total, batch_size):
+ batch = articles[i:i + batch_size]
+ geo_result = await geoparse_articles(batch, incident_context)
+ # Tuple-Rückgabe: (locations_dict, category_labels)
+ if isinstance(geo_result, tuple):
+ batch_geo_results, batch_labels = geo_result
+ # Labels beim ersten Batch speichern
+ if batch_labels and i == 0:
+ try:
+ await db.execute(
+ "UPDATE incidents SET category_labels = ? WHERE id = ? AND category_labels IS NULL",
+ (json.dumps(batch_labels, ensure_ascii=False), incident_id),
+ )
+ await db.commit()
+ except Exception:
+ pass
+ else:
+ batch_geo_results = geo_result
+ for art_id, locations in batch_geo_results.items():
+ for loc in locations:
+ await db.execute(
+ """INSERT INTO article_locations
+ (article_id, incident_id, location_name, location_name_normalized,
+ country_code, latitude, longitude, confidence, source_text, tenant_id, category)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
+ (art_id, incident_id, loc["location_name"], loc["location_name_normalized"],
+ loc["country_code"], loc["lat"], loc["lon"], loc["confidence"],
+ loc.get("source_text", ""), tenant_id, loc.get("category", "mentioned")),
+ )
+ geo_count += 1
+ await db.commit()
+ processed += len(batch)
+ _geoparse_status[incident_id] = {"status": "running", "processed": processed, "total": total, "locations": geo_count}
+
+ _geoparse_status[incident_id] = {"status": "done", "processed": processed, "total": total, "locations": geo_count}
+ _geoparse_logger.info(f"Geoparsing fertig: {geo_count} Orte aus {processed} Artikeln (Lage {incident_id})")
+ except Exception as e:
+ _geoparse_status[incident_id] = {"status": "error", "error": str(e)}
+ _geoparse_logger.error(f"Geoparsing Fehler (Lage {incident_id}): {e}")
+ finally:
+ if db:
+ await db.close()
+
+
+@router.post("/{incident_id}/geoparse")
+async def trigger_geoparse(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Geoparsing fuer alle Artikel einer Lage als Hintergrund-Task starten."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+
+ # Bereits laufend?
+ existing = _geoparse_status.get(incident_id, {})
+ if existing.get("status") == "running":
+ return {"status": "running", "message": f"Läuft bereits ({existing.get('processed', 0)}/{existing.get('total', 0)} Artikel)"}
+
+ # Pruefen ob es ueberhaupt ungeparsete Artikel gibt
+ cursor = await db.execute(
+ """SELECT COUNT(*) as cnt FROM articles a
+ WHERE a.incident_id = ?
+ AND a.id NOT IN (SELECT DISTINCT article_id FROM article_locations WHERE incident_id = ?)""",
+ (incident_id, incident_id),
+ )
+ count = (await cursor.fetchone())["cnt"]
+ if count == 0:
+ return {"status": "done", "message": "Alle Artikel wurden bereits verarbeitet", "locations": 0}
+
+ # Hintergrund-Task starten
+ asyncio.create_task(_run_geoparse_background(incident_id, tenant_id))
+ return {"status": "started", "message": f"Geoparsing gestartet für {count} Artikel"}
+
+
+@router.get("/{incident_id}/geoparse-status")
+async def get_geoparse_status(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Status des laufenden Geoparsing-Tasks abfragen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ return _geoparse_status.get(incident_id, {"status": "idle"})
+
+
+@router.get("/{incident_id}/refresh-log")
+async def get_refresh_log(
+ incident_id: int,
+ limit: int = 20,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Refresh-Verlauf einer Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ """SELECT id, started_at, completed_at, articles_found, status,
+ trigger_type, retry_count, error_message
+ FROM refresh_log WHERE incident_id = ?
+ ORDER BY started_at DESC LIMIT ?""",
+ (incident_id, min(limit, 100)),
+ )
+ rows = await cursor.fetchall()
+ results = []
+ for row in rows:
+ entry = dict(row)
+ if entry["started_at"] and entry["completed_at"]:
+ try:
+ start = datetime.fromisoformat(entry["started_at"])
+ end = datetime.fromisoformat(entry["completed_at"])
+ entry["duration_seconds"] = round((end - start).total_seconds(), 1)
+ except Exception:
+ entry["duration_seconds"] = None
+ else:
+ entry["duration_seconds"] = None
+ results.append(entry)
+ return results
+
+
+@router.get("/{incident_id}/subscription", response_model=SubscriptionResponse)
+async def get_subscription(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """E-Mail-Abo-Einstellungen des aktuellen Nutzers fuer eine Lage abrufen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ cursor = await db.execute(
+ """SELECT notify_email_summary, notify_email_new_articles, notify_email_status_change
+ FROM incident_subscriptions WHERE user_id = ? AND incident_id = ?""",
+ (current_user["id"], incident_id),
+ )
+ row = await cursor.fetchone()
+ if row:
+ return dict(row)
+ return {"notify_email_summary": False, "notify_email_new_articles": False, "notify_email_status_change": False}
+
+
+@router.put("/{incident_id}/subscription", response_model=SubscriptionResponse)
+async def update_subscription(
+ incident_id: int,
+ data: SubscriptionUpdate,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """E-Mail-Abo-Einstellungen des aktuellen Nutzers fuer eine Lage setzen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ await db.execute(
+ """INSERT INTO incident_subscriptions (user_id, incident_id, notify_email_summary, notify_email_new_articles, notify_email_status_change)
+ VALUES (?, ?, ?, ?, ?)
+ ON CONFLICT(user_id, incident_id) DO UPDATE SET
+ notify_email_summary = excluded.notify_email_summary,
+ notify_email_new_articles = excluded.notify_email_new_articles,
+ notify_email_status_change = excluded.notify_email_status_change""",
+ (
+ current_user["id"],
+ incident_id,
+ 1 if data.notify_email_summary else 0,
+ 1 if data.notify_email_new_articles else 0,
+ 1 if data.notify_email_status_change else 0,
+ ),
+ )
+ await db.commit()
+ return {
+ "notify_email_summary": data.notify_email_summary,
+ "notify_email_new_articles": data.notify_email_new_articles,
+ "notify_email_status_change": data.notify_email_status_change,
+ }
+
+
+@router.post("/{incident_id}/refresh")
+async def trigger_refresh(
+ incident_id: int,
+ current_user: dict = Depends(require_writable_license),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Manuellen Refresh fuer eine Lage ausloesen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+
+ from agents.orchestrator import orchestrator
+ enqueued = await orchestrator.enqueue_refresh(incident_id, user_id=current_user["id"])
+
+ if not enqueued:
+ return {"status": "skipped", "incident_id": incident_id}
+ return {"status": "queued", "incident_id": incident_id}
+
+
+@router.post("/{incident_id}/cancel-refresh")
+async def cancel_refresh(
+ incident_id: int,
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Laufenden Refresh fuer eine Lage abbrechen."""
+ tenant_id = current_user.get("tenant_id")
+ await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+
+ from agents.orchestrator import orchestrator
+ cancelled = await orchestrator.cancel_refresh(incident_id)
+
+ return {"status": "cancelling" if cancelled else "not_running"}
+
+
+
+def _slugify(text: str) -> str:
+ """Dateinamen-sicherer Slug aus Titel."""
+ replacements = {
+ "\u00e4": "ae", "\u00f6": "oe", "\u00fc": "ue", "\u00df": "ss",
+ "\u00c4": "Ae", "\u00d6": "Oe", "\u00dc": "Ue",
+ }
+ for src, dst in replacements.items():
+ text = text.replace(src, dst)
+ text = unicodedata.normalize("NFKD", text)
+ text = re.sub(r"[^\w\s-]", "", text)
+ text = re.sub(r"[\s_]+", "-", text).strip("-")
+ return text[:80].lower()
+
+
+@router.get("/{incident_id}/export")
+async def export_incident(
+ incident_id: int,
+ format: str = Query("pdf", pattern="^(pdf|docx)$"),
+ scope: str = Query("report", pattern="^(summary|report|full)$"),
+ classification: str = Query("offen", pattern="^(offen|dienstgebrauch|vertraulich)$"),
+ current_user: dict = Depends(get_current_user),
+ db: aiosqlite.Connection = Depends(db_dependency),
+):
+ """Lage als PDF oder Word exportieren."""
+ from report_generator import generate_pdf, generate_docx, generate_executive_summary
+
+ tenant_id = current_user.get("tenant_id")
+ row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
+ incident = dict(row)
+
+ # Ersteller-Name
+ cursor = await db.execute("SELECT email FROM users WHERE id = ?", (incident["created_by"],))
+ user_row = await cursor.fetchone()
+ creator = user_row["email"] if user_row else "Unbekannt"
+
+ # Artikel
+ cursor = await db.execute(
+ "SELECT * FROM articles WHERE incident_id = ? ORDER BY collected_at DESC",
+ (incident_id,),
+ )
+ articles = [dict(r) for r in await cursor.fetchall()]
+
+ # Faktenchecks
+ cursor = await db.execute(
+ "SELECT * FROM fact_checks WHERE incident_id = ? ORDER BY checked_at DESC",
+ (incident_id,),
+ )
+ fact_checks = [dict(r) for r in await cursor.fetchall()]
+
+ # Snapshots (nur bei full)
+ snapshots = []
+ if scope == "full":
+ cursor = await db.execute(
+ "SELECT * FROM incident_snapshots WHERE incident_id = ? ORDER BY created_at DESC",
+ (incident_id,),
+ )
+ snapshots = [dict(r) for r in await cursor.fetchall()]
+
+ # Executive Summary (KI-generiert, gecacht)
+ exec_summary = incident.get("executive_summary")
+ if not exec_summary:
+ summary_text = incident.get("summary") or ""
+ exec_summary = await generate_executive_summary(summary_text)
+ await db.execute(
+ "UPDATE incidents SET executive_summary = ? WHERE id = ?",
+ (exec_summary, incident_id),
+ )
+ await db.commit()
+
+ date_str = datetime.now(TIMEZONE).strftime("%Y%m%d")
+ slug = _slugify(incident["title"])
+ scope_labels = {"summary": "executive_summary", "report": "lagebericht", "full": "vollstaendig"}
+
+ if format == "pdf":
+ pdf_bytes = await generate_pdf(incident, articles, fact_checks, snapshots, scope, classification, creator, exec_summary)
+ filename = f"{slug}_{scope_labels[scope]}_{date_str}.pdf"
+ return StreamingResponse(
+ io.BytesIO(pdf_bytes),
+ media_type="application/pdf",
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
+ else:
+ docx_bytes = await generate_docx(incident, articles, fact_checks, snapshots, scope, classification, creator, exec_summary)
+ filename = f"{slug}_{scope_labels[scope]}_{date_str}.docx"
+ return StreamingResponse(
+ io.BytesIO(docx_bytes),
+ media_type="application/vnd.openxmlformats-officedocument.wordprocessingml.document",
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
+
diff --git a/src/static/dashboard.html b/src/static/dashboard.html
index d3b85fe..442441e 100644
--- a/src/static/dashboard.html
+++ b/src/static/dashboard.html
@@ -680,14 +680,6 @@
PDF
Word (DOCX)
-