Analysepipeline: Visualisierung der Refresh-Schritte
Neuer Tab "Analysepipeline" zwischen Faktencheck und Quellenuebersicht.
Zeigt 9 Verarbeitungsschritte als n8n-artige Blockkette: Quellen sichten,
Nachrichten sammeln, Doppeltes filtern, Relevanz bewerten, Orte erkennen,
Lagebild verfassen, Fakten pruefen, Qualitaetscheck, Benachrichtigen.
- Backend: refresh_pipeline_steps-Tabelle persistiert pro Refresh+Pass die
Status- und Zahlen-Werte. pipeline_tracker.py kapselt Start/Done/Skip/Error
inkl. WebSocket-Broadcast (Event-Typ pipeline_step). 9 Hooks im Orchestrator
speisen die Anzeige.
- API: GET /api/incidents/{id}/pipeline liefert Definition + letzten Stand
(Zahlen aus letztem Refresh, Multi-Pass-Konsolidierung).
- Frontend: pipeline.js rendert Vollbild-Blockkette mit pulsierendem Glow am
aktiven Block, animierten Pfeilen bei Datenfluss, Haekchen am fertigen Block.
Hover-Tooltip mit Erklaerung in Nutzersprache, Klick oeffnet Detail-Popup.
Bei Research-Lagen leuchtet ein Schleifen-Pfeil pro Mehrfach-Durchlauf auf.
Mini-Variante (nur Icons) im Refresh-Progress-Popup.
- CSS: Light/Dark-Theme-fest, dezenter Circuit-Hintergrund (5% Opacity),
Mobile-vertikale Stapelung unter 900px, prefers-reduced-motion respektiert.
- Uebersprungene Schritte (z.B. Geoparsing ohne neue Artikel) werden
ausgeblendet, brandneue Lagen ohne Refresh zeigen Hinweis.
Tooltips bewusst in normaler Sprache ohne Internas (keine Modellnamen,
keine Toolnamen, keine Phasen-Labels).
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
Dieser Commit ist enthalten in:
@@ -613,6 +613,98 @@ async def get_factchecks(
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
|
||||
@router.get("/{incident_id}/pipeline")
|
||||
async def get_pipeline(
|
||||
incident_id: int,
|
||||
current_user: dict = Depends(get_current_user),
|
||||
db: aiosqlite.Connection = Depends(db_dependency),
|
||||
):
|
||||
"""Analysepipeline-Status der Lage: Definition aller Schritte + Stand des
|
||||
letzten (oder gerade laufenden) Refreshs.
|
||||
|
||||
Antwort:
|
||||
{
|
||||
"is_research": bool,
|
||||
"is_running": bool,
|
||||
"last_refresh": {started_at, completed_at, duration_sec, status, pass_total} | null,
|
||||
"steps_definition": [{key, label, icon, tooltip}, ...],
|
||||
"steps": [{step_key, status, count_value, count_secondary, pass_number}, ...]
|
||||
}
|
||||
"""
|
||||
from services.pipeline_tracker import PIPELINE_STEPS
|
||||
|
||||
tenant_id = current_user.get("tenant_id")
|
||||
incident_row = await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
|
||||
is_research = (incident_row["type"] or "adhoc") == "research"
|
||||
|
||||
# Juengsten Refresh-Log waehlen: bevorzugt running, sonst der letzte completed
|
||||
cursor = await db.execute(
|
||||
"""SELECT id, started_at, completed_at, status, retry_count
|
||||
FROM refresh_log
|
||||
WHERE incident_id = ? AND status = 'running'
|
||||
ORDER BY started_at DESC LIMIT 1""",
|
||||
(incident_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
if not row:
|
||||
cursor = await db.execute(
|
||||
"""SELECT id, started_at, completed_at, status, retry_count
|
||||
FROM refresh_log
|
||||
WHERE incident_id = ?
|
||||
ORDER BY started_at DESC LIMIT 1""",
|
||||
(incident_id,),
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
|
||||
last_refresh = None
|
||||
steps = []
|
||||
is_running = False
|
||||
if row:
|
||||
is_running = row["status"] == "running"
|
||||
# Pipeline-Steps zu diesem Refresh laden
|
||||
sc = await db.execute(
|
||||
"""SELECT step_key, pass_number, status, count_value, count_secondary,
|
||||
started_at, completed_at
|
||||
FROM refresh_pipeline_steps
|
||||
WHERE refresh_log_id = ?
|
||||
ORDER BY pass_number ASC, id ASC""",
|
||||
(row["id"],),
|
||||
)
|
||||
steps = [dict(r) for r in await sc.fetchall()]
|
||||
|
||||
# Pass-Total: bei Research-Lagen mit Multi-Pass-Daten ermitteln
|
||||
max_pass = 1
|
||||
for s in steps:
|
||||
if s["pass_number"] and s["pass_number"] > max_pass:
|
||||
max_pass = s["pass_number"]
|
||||
|
||||
# Dauer berechnen (nur wenn completed)
|
||||
duration_sec = None
|
||||
try:
|
||||
if row["started_at"] and row["completed_at"]:
|
||||
t0 = datetime.strptime(row["started_at"], "%Y-%m-%d %H:%M:%S")
|
||||
t1 = datetime.strptime(row["completed_at"], "%Y-%m-%d %H:%M:%S")
|
||||
duration_sec = max(0, int((t1 - t0).total_seconds()))
|
||||
except Exception:
|
||||
duration_sec = None
|
||||
|
||||
last_refresh = {
|
||||
"started_at": row["started_at"],
|
||||
"completed_at": row["completed_at"],
|
||||
"status": row["status"],
|
||||
"duration_sec": duration_sec,
|
||||
"pass_total": max_pass,
|
||||
}
|
||||
|
||||
return {
|
||||
"is_research": is_research,
|
||||
"is_running": is_running,
|
||||
"last_refresh": last_refresh,
|
||||
"steps_definition": PIPELINE_STEPS,
|
||||
"steps": steps,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{incident_id}/locations")
|
||||
async def get_locations(
|
||||
incident_id: int,
|
||||
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren