Commits vergleichen
58 Commits
b3bc96c580
...
main
| Autor | SHA1 | Datum | |
|---|---|---|---|
|
|
e83f80dbe9 | ||
|
|
d71daee581 | ||
| 1e9cca2555 | |||
|
|
f4c0c930b8 | ||
| 03ee30a83e | |||
|
|
f73c21235e | ||
|
|
cbfb608471 | ||
|
|
9078489d0a | ||
|
|
e517de7404 | ||
| 07c3fed9c8 | |||
| 24d7500152 | |||
|
|
f0fe35b279 | ||
|
|
fb6e9fff19 | ||
| 6a24d0b51d | |||
|
|
b1a0e97a34 | ||
|
|
77797f6027 | ||
|
|
dc51ecafe8 | ||
|
|
31fa17465a | ||
| eaffd70575 | |||
|
|
2a654cc882 | ||
|
|
6293cef91e | ||
| 46864c5457 | |||
|
|
a6f36be9c6 | ||
| 1f4d7b1837 | |||
|
|
98c9da64b0 | ||
|
|
307f0a1868 | ||
| d7711711aa | |||
|
|
430541f49b | ||
|
|
74d76d2e50 | ||
|
|
ee83f38edf | ||
| 0775a475a4 | |||
| 2b1e8c3632 | |||
| b1f8113207 | |||
| 8b8e31e3cd | |||
| 26fac0e824 | |||
| 62c0be64ee | |||
| 8c4ef6b2cf | |||
| 4a2d85d3b8 | |||
| ad5b723d79 | |||
| 51615cae62 | |||
| a2610d0094 | |||
| d24205841f | |||
| a08df3d121 | |||
| 0a6208c289 | |||
| b9985b8e35 | |||
| 19038472cf | |||
| 462127dc52 | |||
| 34aeb04a88 | |||
| b14fe31f42 | |||
| ffb8dddc4f | |||
|
|
0edbf7e3b8 | ||
|
|
de01ab71fc | ||
|
|
86a49e082c | ||
|
|
221b21cb4e | ||
| 30cb276ec6 | |||
| cae9c5467a | |||
| 58eb1298ca | |||
| 370bb94b26 |
@@ -1,4 +1,13 @@
|
||||
[
|
||||
{
|
||||
"version": "2026-05-03T15:21Z",
|
||||
"date": "2026-05-03",
|
||||
"title": "Übersichtlichere Navigation in der Seitenleiste",
|
||||
"items": [
|
||||
"Schaltflächen in der Seitenleiste haben jetzt klarere Icons und kürzere Beschriftungen",
|
||||
"Der Feedback-Button zeigt nun ein Brief-Symbol für bessere Erkennbarkeit"
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "2026-04-30T23:12Z",
|
||||
"date": "2026-04-30",
|
||||
|
||||
@@ -11,4 +11,8 @@ python-multipart
|
||||
aiosmtplib
|
||||
geonamescache>=2.0
|
||||
telethon
|
||||
# Bericht-Export (PDF via WeasyPrint + DOCX via python-docx)
|
||||
Jinja2>=3.1
|
||||
weasyprint>=68.0
|
||||
python-docx>=1.2
|
||||
pikepdf>=9.0
|
||||
|
||||
@@ -47,7 +47,6 @@ Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
|
||||
- "summary": Zusammenfassung auf {output_language} mit Quellenverweisen [1], [2] etc. im Text (Markdown-Überschriften ## erlaubt wenn sinnvoll, aber KEINE "## ZUSAMMENFASSUNG"/"## ÜBERBLICK"-Sektion)
|
||||
- "sources": Array von Quellenobjekten, je: {{"nr": 1, "name": "Quellenname", "url": "https://..."}}
|
||||
- "key_facts": Array von bestätigten Kernfakten (Strings, in Ausgabesprache)
|
||||
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für fremdsprachige Artikel)
|
||||
|
||||
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
|
||||
|
||||
@@ -102,7 +101,6 @@ Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
|
||||
- "summary": Das strukturierte Briefing als Markdown-Text mit Quellenverweisen [1], [2] etc.
|
||||
- "sources": Array von Quellenobjekten, je: {{"nr": 1, "name": "Quellenname", "url": "https://..."}}
|
||||
- "key_facts": Array von gesicherten Kernfakten (Strings, in Ausgabesprache)
|
||||
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für fremdsprachige Artikel)
|
||||
|
||||
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
|
||||
|
||||
@@ -149,7 +147,6 @@ Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
|
||||
- "summary": Aktualisierte Zusammenfassung mit Quellenverweisen [1], [2] etc.
|
||||
- "sources": Array mit NUR den NEUEN Quellen aus den neuen Meldungen, je: {{"nr": <fortlaufende ganze Zahl, KEINE Buchstaben-Suffixe>, "name": "Quellenname", "url": "https://..."}}. Alte Quellen werden automatisch gemerged.
|
||||
- "key_facts": Array aller aktuellen Kernfakten (in Ausgabesprache)
|
||||
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für neue fremdsprachige Artikel)
|
||||
|
||||
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
|
||||
|
||||
@@ -201,7 +198,6 @@ Antworte AUSSCHLIESSLICH als JSON-Objekt mit diesen Feldern:
|
||||
- "summary": Das aktualisierte Briefing als Markdown-Text mit Quellenverweisen
|
||||
- "sources": Array mit NUR den NEUEN Quellen aus den neuen Meldungen, je: {{"nr": <fortlaufende ganze Zahl, KEINE Buchstaben-Suffixe>, "name": "Quellenname", "url": "https://..."}}. Alte Quellen werden automatisch gemerged.
|
||||
- "key_facts": Array aller gesicherten Kernfakten (in Ausgabesprache)
|
||||
- "translations": Array von Objekten mit "article_id", "headline_de", "content_de" (nur für neue fremdsprachige Artikel)
|
||||
|
||||
Antworte NUR mit dem JSON-Objekt. Keine Einleitung, keine Erklärung."""
|
||||
|
||||
@@ -796,5 +792,5 @@ class AnalyzerAgent:
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
return {"summary": summary, "sources": sources, "key_facts": [], "translations": []}
|
||||
return {"summary": summary, "sources": sources, "key_facts": []}
|
||||
|
||||
|
||||
@@ -21,15 +21,21 @@ from source_rules import (
|
||||
|
||||
logger = logging.getLogger("osint.orchestrator")
|
||||
|
||||
# Reputations-Score nach Quellenkategorie (für Relevanz-Scoring)
|
||||
# Reputations-Score nach Quellenkategorie (fuer Relevanz-Scoring).
|
||||
# Keys muessen mit den tatsaechlichen DB-Werten in sources.category uebereinstimmen
|
||||
# (siehe DOMAIN_CATEGORY_MAP in source_rules.py).
|
||||
CATEGORY_REPUTATION = {
|
||||
"nachrichten_de": 0.9,
|
||||
"nachrichten_int": 0.9,
|
||||
"presseagenturen": 1.0,
|
||||
"behoerden": 1.0,
|
||||
"fachmedien": 0.8,
|
||||
"international": 0.7,
|
||||
"sonstige": 0.4,
|
||||
"nachrichtenagentur": 1.0, # Reuters, AP, dpa, AFP — Primärquellen
|
||||
"behoerde": 1.0, # BMI, BSI, Europol — offizielle Quellen
|
||||
"oeffentlich-rechtlich": 0.95, # tagesschau, ZDF, ARD, BBC, ORF
|
||||
"qualitaetszeitung": 0.85, # Spiegel, Zeit, FAZ, NZZ, Süddeutsche
|
||||
"think-tank": 0.85, # SWP, IISS, Brookings, Chatham House
|
||||
"fachmedien": 0.8, # heise, golem, netzpolitik, Handelsblatt
|
||||
"international": 0.75, # CNN, Guardian, NYT, Al Jazeera, France24
|
||||
"regional": 0.65, # regionale Tageszeitungen
|
||||
"telegram": 0.5, # OSINT-Kanaele — gemischte Qualitaet
|
||||
"sonstige": 0.4, # unkategorisiert
|
||||
"boulevard": 0.3, # Bild, Sun etc.
|
||||
}
|
||||
|
||||
# Research-Modus: Automatisch 3 Durchläufe für optimale Ergebnisse
|
||||
@@ -483,6 +489,9 @@ class AgentOrchestrator:
|
||||
|
||||
logger.info(f"Lage {incident_id} aus Warteschlange entfernt (removed={removed})")
|
||||
|
||||
# refresh_log-Eintrag schreiben, damit Auto-Refresh nicht im naechsten Tick erneut einreiht
|
||||
await self._log_queued_cancellation(incident_id)
|
||||
|
||||
# Send cancelled event
|
||||
if self._ws_manager:
|
||||
try:
|
||||
@@ -618,18 +627,56 @@ class AgentOrchestrator:
|
||||
self._queue.task_done()
|
||||
|
||||
async def _mark_refresh_cancelled(self, incident_id: int):
|
||||
"""Markiert den laufenden Refresh-Log-Eintrag als cancelled."""
|
||||
"""Markiert den laufenden Refresh-Log-Eintrag als cancelled und schliesst
|
||||
alle noch aktiven Pipeline-Schritte. Ohne den zweiten Schritt blieb der
|
||||
zuletzt aktive Step-Eintrag verwaist und das Frontend zeigte dauerhaft
|
||||
'Schritt X laeuft', weil /api/incidents/<id>/pipeline aus
|
||||
refresh_pipeline_steps liest."""
|
||||
from database import get_db
|
||||
from services.pipeline_tracker import cancel_active_steps
|
||||
db = await get_db()
|
||||
try:
|
||||
now_str = datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')
|
||||
cur = await db.execute(
|
||||
"SELECT id FROM refresh_log WHERE incident_id = ? AND status = 'running'",
|
||||
(incident_id,),
|
||||
)
|
||||
row = await cur.fetchone()
|
||||
refresh_log_id = row["id"] if row else None
|
||||
|
||||
await db.execute(
|
||||
"""UPDATE refresh_log SET status = 'cancelled', error_message = 'Vom Nutzer abgebrochen',
|
||||
completed_at = ? WHERE incident_id = ? AND status = 'running'""",
|
||||
(datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S'), incident_id),
|
||||
(now_str, incident_id),
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
if refresh_log_id is not None:
|
||||
await cancel_active_steps(db, refresh_log_id=refresh_log_id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Konnte Refresh-Log nicht als abgebrochen markieren: {e}")
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
async def _log_queued_cancellation(self, incident_id: int):
|
||||
"""Schreibt einen cancelled-Eintrag fuer einen Queue-Abbruch (Lage war noch nicht laufend).
|
||||
Verhindert, dass der Auto-Refresh-Scheduler im naechsten Tick sofort wieder einreiht."""
|
||||
from database import get_db
|
||||
db = await get_db()
|
||||
try:
|
||||
cur = await db.execute("SELECT tenant_id FROM incidents WHERE id = ?", (incident_id,))
|
||||
row = await cur.fetchone()
|
||||
tid = row["tenant_id"] if row else None
|
||||
now_str = datetime.now(TIMEZONE).strftime("%Y-%m-%d %H:%M:%S")
|
||||
await db.execute(
|
||||
"""INSERT INTO refresh_log (incident_id, started_at, completed_at, status,
|
||||
trigger_type, error_message, tenant_id)
|
||||
VALUES (?, ?, ?, 'cancelled', 'manual', 'Aus Warteschlange entfernt', ?)""",
|
||||
(incident_id, now_str, now_str, tid),
|
||||
)
|
||||
await db.commit()
|
||||
except Exception as e:
|
||||
logger.warning(f"Konnte Refresh-Log nicht als abgebrochen markieren: {e}")
|
||||
logger.warning(f"Konnte Queue-Cancel nicht in refresh_log loggen: {e}")
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
@@ -844,7 +891,7 @@ class AgentOrchestrator:
|
||||
return articles, feed_usage
|
||||
|
||||
async def _web_search_pipeline():
|
||||
"""Claude WebSearch-Recherche."""
|
||||
"""Claude WebSearch-Recherche mit Vorselektion eingetragener Web-Quellen."""
|
||||
researcher = ResearcherAgent()
|
||||
# Bestehende Artikel als Kontext mitgeben (Research + Adhoc)
|
||||
existing_for_context = None
|
||||
@@ -855,13 +902,31 @@ class AgentOrchestrator:
|
||||
"source_url": row["source_url"]}
|
||||
for row in existing_db_articles_full
|
||||
]
|
||||
|
||||
# Web-Quellen vorselektieren (Haiku) — nur thematisch passende werden Claude im Prompt empfohlen
|
||||
preferred_sources = []
|
||||
try:
|
||||
from source_rules import get_feeds_with_metadata
|
||||
web_sources = await get_feeds_with_metadata(tenant_id=tenant_id, source_type="web_source")
|
||||
if web_sources:
|
||||
preferred_sources, web_sel_usage = await researcher.select_relevant_web_sources(
|
||||
title, description, web_sources,
|
||||
)
|
||||
if web_sel_usage:
|
||||
usage_acc.add(web_sel_usage)
|
||||
except Exception as e:
|
||||
logger.warning(f"Web-Source-Vorselektion fehlgeschlagen (Pipeline laeuft weiter): {e}")
|
||||
preferred_sources = []
|
||||
|
||||
results, usage, parse_failed = await researcher.search(
|
||||
title, description, incident_type,
|
||||
international=international, user_id=user_id,
|
||||
existing_articles=existing_for_context,
|
||||
preferred_sources=preferred_sources,
|
||||
)
|
||||
logger.info(
|
||||
f"Claude-Recherche: {len(results)} Ergebnisse"
|
||||
+ (f" (mit {len(preferred_sources)} Web-Quellen-Hinweis)" if preferred_sources else "")
|
||||
+ (" (Parser fehlgeschlagen)" if parse_failed else "")
|
||||
)
|
||||
return results, usage, parse_failed
|
||||
@@ -1386,20 +1451,64 @@ class AgentOrchestrator:
|
||||
snap_articles, snap_fcs, log_id, now, tenant_id),
|
||||
)
|
||||
|
||||
# Übersetzungen aktualisieren (nur für gültige DB-IDs)
|
||||
for translation in analysis.get("translations", []):
|
||||
article_id = translation.get("article_id")
|
||||
if isinstance(article_id, int):
|
||||
await db.execute(
|
||||
"UPDATE articles SET headline_de = ?, content_de = ? WHERE id = ? AND incident_id = ?",
|
||||
(translation.get("headline_de"), translation.get("content_de"), article_id, incident_id),
|
||||
)
|
||||
# Translations werden vom dedizierten Translator-Agent unten
|
||||
# erzeugt (frueher inline im Analyzer-Output, das war token-
|
||||
# instabil und schaetzte regelmaessig content_de aus).
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Cancel-Check nach paralleler Verarbeitung
|
||||
self._check_cancelled(incident_id)
|
||||
|
||||
# --- Translator (Haiku) fuer fremdsprachige Artikel ohne DE-Texte ---
|
||||
# Idempotent: nur Artikel ohne headline_de/content_de werden geholt.
|
||||
# Lauft nach der Analyse (Lagebild ist schon committed) und vor QC
|
||||
# (damit normalize_umlaut_articles auch die frischen DE-Texte fasst).
|
||||
try:
|
||||
tr_cursor = await db.execute(
|
||||
"""SELECT id, headline, content_original, language
|
||||
FROM articles
|
||||
WHERE incident_id = ?
|
||||
AND language IS NOT NULL AND LOWER(language) != 'de'
|
||||
AND (headline_de IS NULL OR headline_de = ''
|
||||
OR content_de IS NULL OR content_de = '')""",
|
||||
(incident_id,),
|
||||
)
|
||||
pending_translations = [dict(r) for r in await tr_cursor.fetchall()]
|
||||
if pending_translations:
|
||||
logger.info(
|
||||
"Translator fuer Incident %d: %d Artikel ohne DE-Uebersetzung",
|
||||
incident_id, len(pending_translations),
|
||||
)
|
||||
from agents.translator import translate_articles
|
||||
from services.post_refresh_qc import normalize_german_umlauts as _norm_de2
|
||||
translations = await translate_articles(
|
||||
pending_translations,
|
||||
output_lang="de",
|
||||
usage_accumulator=usage_acc,
|
||||
)
|
||||
for t in translations:
|
||||
hd = t.get("headline_de")
|
||||
cd = t.get("content_de")
|
||||
if hd:
|
||||
hd, _ = _norm_de2(hd)
|
||||
if cd:
|
||||
cd, _ = _norm_de2(cd)
|
||||
if hd or cd:
|
||||
await db.execute(
|
||||
"UPDATE articles SET headline_de = COALESCE(?, headline_de), "
|
||||
"content_de = COALESCE(?, content_de) WHERE id = ? AND incident_id = ?",
|
||||
(hd, cd, t["id"], incident_id),
|
||||
)
|
||||
await db.commit()
|
||||
logger.info(
|
||||
"Translator fuer Incident %d: %d/%d Artikel uebersetzt",
|
||||
incident_id, len(translations), len(pending_translations),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Translator-Fehler fuer Incident %d: %s", incident_id, e, exc_info=True)
|
||||
# Refresh trotz Translator-Fehler weiterlaufen lassen
|
||||
|
||||
# --- Neueste Entwicklungen (nur Live-Monitoring / adhoc) ---
|
||||
# Basis ist jetzt das frisch generierte Lagebild (autoritativ, thematisch sauber).
|
||||
# Zeitstempel und Quellen kommen aus den jüngsten belegenden Artikeln.
|
||||
|
||||
@@ -69,7 +69,7 @@ WICHTIG: Verwende IMMER echte UTF-8-Umlaute (ä, ö, ü, ß) — NIEMALS Umschre
|
||||
AUFTRAG: Suche nach aktuellen Informationen zu folgendem Vorfall:
|
||||
Titel: {title}
|
||||
Kontext: {description}
|
||||
{existing_context}
|
||||
{existing_context}{preferred_sources_block}
|
||||
REGELN:
|
||||
- Suche nur bei seriösen Nachrichtenquellen (Nachrichtenagenturen, Qualitätszeitungen, öffentlich-rechtliche Medien, Behörden)
|
||||
- KEIN Social Media (Twitter/X, Facebook, Instagram, TikTok, Reddit)
|
||||
@@ -100,7 +100,7 @@ WICHTIG: Verwende IMMER echte UTF-8-Umlaute (ä, ö, ü, ß) — NIEMALS Umschre
|
||||
AUFTRAG: Führe eine umfassende, mehrstufige Hintergrundrecherche durch zu:
|
||||
Titel: {title}
|
||||
Kontext: {description}
|
||||
{existing_context}
|
||||
{existing_context}{preferred_sources_block}
|
||||
RECHERCHE IN 4 PHASEN — Führe ALLE Phasen nacheinander durch:
|
||||
|
||||
PHASE 1 — BREITE ERFASSUNG:
|
||||
@@ -199,19 +199,45 @@ AKTUELLE HEADLINES (die letzten Meldungen zu diesem Thema):
|
||||
|
||||
AUFGABE:
|
||||
Generiere 5 Begriffspaare (DE + EN), mit denen neue RSS-Artikel zu diesem Thema gefunden werden.
|
||||
Ein Artikel gilt als relevant, wenn mindestens 2 dieser Begriffe im Titel oder der Beschreibung vorkommen.
|
||||
Ein Artikel gilt als relevant, wenn mindestens 2 dieser Begriffe im Titel oder der Beschreibung vorkommen
|
||||
- bei spezifischen Begriffen (Eigennamen, lange Begriffe ab 7 Zeichen) reicht 1 Treffer.
|
||||
|
||||
REGELN:
|
||||
- Die ersten 2 Begriffspaare MUESSEN die zentralen Akteure/Laender/Themen sein (z.B. iran, israel, usa) — also die Begriffe, die in fast JEDEM Artikel zum Thema vorkommen
|
||||
- Die letzten 3 Begriffspaare sind aktuelle Entwicklungen aus den Headlines (Orte, Akteure, Schluesselwoerter der aktuellen Phase)
|
||||
- Begriffe muessen so gewaehlt sein, dass sie in kurzen RSS-Titeln matchen (einzelne Woerter, keine Phrasen)
|
||||
- Alle Begriffe in Kleinbuchstaben
|
||||
- Exakt 5 Begriffspaare
|
||||
- ZWINGEND: Eigennamen oder spezifische Begriffe aus dem THEMA (z.B. Personennamen, Tiernamen,
|
||||
Ortsnamen wie "timmy", "buckelwal", "merz", "dobrindt") MUESSEN als eigene Begriffspaare
|
||||
enthalten sein. Solche Begriffe sind oft das einzige, was in kurzen Headlines vorkommt.
|
||||
- Die ersten 2 Begriffspaare sind die zentralen Akteure/Laender/Themen (z.B. iran, israel,
|
||||
buckelwal, timmy) — also die Begriffe, die in fast JEDEM Artikel zum Thema vorkommen.
|
||||
- Die uebrigen 3 Begriffspaare sind aktuelle Entwicklungen aus den Headlines (Orte, Akteure,
|
||||
Schluesselwoerter der aktuellen Phase).
|
||||
- Wenn DE und EN identisch sind (Eigennamen), trotzdem das Paar einreichen.
|
||||
- Begriffe muessen so gewaehlt sein, dass sie in kurzen RSS-Titeln matchen (einzelne Woerter,
|
||||
keine Phrasen, keine Konjunktionen).
|
||||
- Alle Begriffe in Kleinbuchstaben.
|
||||
- Exakt 5 Begriffspaare.
|
||||
|
||||
Antwort NUR als JSON-Array:
|
||||
[{{"de": "iran", "en": "iran"}}, {{"de": "israel", "en": "israel"}}, {{"de": "teheran", "en": "tehran"}}, {{"de": "luftangriff", "en": "airstrike"}}, {{"de": "trump", "en": "trump"}}]"""
|
||||
|
||||
|
||||
WEB_SOURCE_SELECTION_PROMPT = """Du bist ein OSINT-Analyst. Pruefe diese eingetragenen Web-Quellen und waehle nur die thematisch passenden aus.
|
||||
|
||||
LAGE: {title}
|
||||
KONTEXT: {description}
|
||||
|
||||
WEB-QUELLEN:
|
||||
{source_list}
|
||||
|
||||
REGELN:
|
||||
- Waehle nur Quellen, die thematisch tatsaechlich zur Lage passen
|
||||
- Lieber leere Liste zurueckgeben als pauschal alle aufnehmen
|
||||
- Behoerden- und institutionelle Quellen sind oft hochwertig, aber nur wenn das Thema passt
|
||||
- Petitions-Plattformen z.B. nur bei Lagen zu Buergerinitiativen, Gesetzen, oeffentlichem Druck
|
||||
- Bei reinen Kriegs-/Konflikt-/Tagesnachrichten meistens leere Liste
|
||||
|
||||
Antworte NUR mit einem JSON-Array der Quellen-Nummern, z.B. [1, 3] oder []."""
|
||||
|
||||
|
||||
TELEGRAM_CHANNEL_SELECTION_PROMPT = """Du bist ein OSINT-Analyst. Waehle aus dieser Liste von Telegram-Kanaelen diejenigen aus, die fuer die Lage relevant sein koennten.
|
||||
|
||||
LAGE: {title}
|
||||
@@ -347,6 +373,17 @@ class ResearcherAgent:
|
||||
if en and en != de:
|
||||
keywords.append(en)
|
||||
|
||||
# Bug-2-Fallback: Lagentitel-Wörter (>=4 Zeichen) zwingend in Keyword-Liste,
|
||||
# falls Haiku sie weggelassen hat. Verhindert "Buckelwal timmy"-Bug, bei dem
|
||||
# der Eigenname "timmy" fehlte und damit Headlines mit nur "Buckelwal" durchfielen.
|
||||
STOPWORDS = {"der", "die", "das", "und", "oder", "von", "vom", "zum", "zur",
|
||||
"the", "and", "for", "with", "ueber", "über", "von", "for"}
|
||||
for word in (title or "").lower().split():
|
||||
w = word.strip(".,;:!?\"\'()[]{}")
|
||||
if len(w) >= 4 and w not in STOPWORDS and w not in keywords:
|
||||
keywords.append(w)
|
||||
logger.info(f"Lagentitel-Keyword '{w}' nachträglich injiziert")
|
||||
|
||||
if keywords:
|
||||
logger.info(f"Dynamische Keywords ({len(keywords)}): {keywords}")
|
||||
return keywords if keywords else None, usage
|
||||
@@ -355,7 +392,7 @@ class ResearcherAgent:
|
||||
logger.warning(f"Keyword-Extraktion fehlgeschlagen: {e}")
|
||||
return None, None
|
||||
|
||||
async def search(self, title: str, description: str = "", incident_type: str = "adhoc", international: bool = True, user_id: int = None, existing_articles: list[dict] = None) -> tuple[list[dict], ClaudeUsage | None, bool]:
|
||||
async def search(self, title: str, description: str = "", incident_type: str = "adhoc", international: bool = True, user_id: int = None, existing_articles: list[dict] = None, preferred_sources: list[dict] = None) -> tuple[list[dict], ClaudeUsage | None, bool]:
|
||||
"""Sucht nach Informationen zu einem Vorfall.
|
||||
|
||||
Returns:
|
||||
@@ -364,6 +401,26 @@ class ResearcherAgent:
|
||||
"echt keine Treffer" und "kaputte Antwort" unterscheiden.
|
||||
"""
|
||||
from config import OUTPUT_LANGUAGE
|
||||
|
||||
# Bevorzugte Web-Quellen als Prompt-Block (optional)
|
||||
preferred_sources_block = ""
|
||||
if preferred_sources:
|
||||
ps_lines = []
|
||||
for s in preferred_sources:
|
||||
domain = s.get("domain", "")
|
||||
name = s.get("name", domain) or domain
|
||||
if not domain:
|
||||
continue
|
||||
ps_lines.append(f"- {domain} ({name})")
|
||||
if ps_lines:
|
||||
preferred_sources_block = (
|
||||
"\nEINGETRAGENE WEB-QUELLEN (vom Betreiber als seriös markiert):\n"
|
||||
+ "\n".join(ps_lines) + "\n"
|
||||
"EMPFEHLUNG: Wenn diese Domains thematisch zur Lage passen, suche dort gezielt "
|
||||
"mit \"site:domain [Suchbegriff]\". Sie sind vertrauenswuerdig eingetragen, ersetzen "
|
||||
"aber nicht deine sonstige Recherche.\n"
|
||||
)
|
||||
|
||||
if incident_type == "research":
|
||||
lang_instruction = LANG_DEEP_INTERNATIONAL if international else LANG_DEEP_GERMAN_ONLY
|
||||
# Bestehende Artikel als Kontext für den Prompt aufbereiten
|
||||
@@ -383,6 +440,7 @@ class ResearcherAgent:
|
||||
prompt = DEEP_RESEARCH_PROMPT_TEMPLATE.format(
|
||||
title=title, description=description, language_instruction=lang_instruction,
|
||||
output_language=OUTPUT_LANGUAGE, existing_context=existing_context,
|
||||
preferred_sources_block=preferred_sources_block,
|
||||
)
|
||||
else:
|
||||
lang_instruction = LANG_INTERNATIONAL if international else LANG_GERMAN_ONLY
|
||||
@@ -401,6 +459,7 @@ class ResearcherAgent:
|
||||
prompt = RESEARCH_PROMPT_TEMPLATE.format(
|
||||
title=title, description=description, language_instruction=lang_instruction,
|
||||
output_language=OUTPUT_LANGUAGE, existing_context=existing_context,
|
||||
preferred_sources_block=preferred_sources_block,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -514,6 +573,67 @@ class ResearcherAgent:
|
||||
)
|
||||
raise ResearcherParseError(f"Claude-Antwort enthielt kein verwertbares JSON (Laenge: {len(text)})")
|
||||
|
||||
async def select_relevant_web_sources(
|
||||
self,
|
||||
title: str,
|
||||
description: str,
|
||||
web_sources: list[dict],
|
||||
) -> tuple[list[dict], ClaudeUsage | None]:
|
||||
"""Laesst Claude die thematisch passenden Web-Quellen auswaehlen (Haiku).
|
||||
|
||||
Returns:
|
||||
(ausgewaehlte Quellen, usage). Bei Fehler: ([], None).
|
||||
Leere Auswahl ist explizit erlaubt — keine Quelle wird zwangsweise aufgenommen.
|
||||
"""
|
||||
if not web_sources:
|
||||
return [], None
|
||||
|
||||
# Bei sehr wenigen Quellen lohnt der Selektions-Call kaum — alle weiterreichen.
|
||||
if len(web_sources) <= 3:
|
||||
logger.info("Web-Source-Selektion: Nur %d Quellen, alle uebernehmen", len(web_sources))
|
||||
return list(web_sources), None
|
||||
|
||||
lines = []
|
||||
for i, src in enumerate(web_sources, 1):
|
||||
cat = src.get("category", "sonstige")
|
||||
notes = (src.get("notes") or "")[:80]
|
||||
domain = src.get("domain", "")
|
||||
line = f"{i}. {src.get('name', domain)} ({domain}) [{cat}]"
|
||||
if notes:
|
||||
line += f" - {notes}"
|
||||
lines.append(line)
|
||||
|
||||
prompt = WEB_SOURCE_SELECTION_PROMPT.format(
|
||||
title=title,
|
||||
description=description or "Keine weitere Beschreibung",
|
||||
source_list="\n".join(lines),
|
||||
)
|
||||
|
||||
try:
|
||||
result, usage = await call_claude(prompt, tools=None, model=CLAUDE_MODEL_FAST)
|
||||
indices = _extract_json_array(result)
|
||||
if not isinstance(indices, list):
|
||||
logger.warning(
|
||||
"Web-Source-Selektion: Kein JSON in Antwort, ignoriere Quellen. Sample: %s",
|
||||
_truncate_for_log(result),
|
||||
)
|
||||
return [], usage
|
||||
|
||||
selected = []
|
||||
for idx in indices:
|
||||
if isinstance(idx, int) and 1 <= idx <= len(web_sources):
|
||||
selected.append(web_sources[idx - 1])
|
||||
|
||||
logger.info(
|
||||
"Web-Source-Selektion: %d von %d ausgewaehlt%s",
|
||||
len(selected), len(web_sources),
|
||||
f" ({', '.join(s.get('domain', '') for s in selected)})" if selected else "",
|
||||
)
|
||||
return selected, usage
|
||||
except Exception as e:
|
||||
logger.warning("Web-Source-Selektion fehlgeschlagen (%s)", e)
|
||||
return [], None
|
||||
|
||||
async def select_relevant_telegram_channels(
|
||||
self,
|
||||
title: str,
|
||||
|
||||
254
src/agents/translator.py
Normale Datei
254
src/agents/translator.py
Normale Datei
@@ -0,0 +1,254 @@
|
||||
"""Translator-Agent: uebersetzt fremdsprachige Artikel ins Deutsche.
|
||||
|
||||
Eigener Agent (separat vom Analyzer), damit Token-Limits nicht zwischen
|
||||
Lagebild und Uebersetzung konkurrieren. Nutzt CLAUDE_MODEL_FAST (Haiku) in
|
||||
Batches.
|
||||
|
||||
Aufgerufen vom Orchestrator nach analyzer.analyze() und vor post_refresh_qc.
|
||||
Backfill-Skript nutzt dieselbe Funktion fuer rueckwirkendes Auffuellen.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from agents.claude_client import call_claude, ClaudeUsage, UsageAccumulator
|
||||
from config import CLAUDE_MODEL_FAST, TRANSLATOR_ENABLED
|
||||
|
||||
logger = logging.getLogger("osint.translator")
|
||||
|
||||
# Pro Batch nicht mehr als so viele Artikel an Claude geben.
|
||||
# Bei Haiku ist das Output-Limit ca. 8k Tokens. Pro Artikel kommen leicht
|
||||
# 400-600 Tokens raus (headline_de + content_de bis 1000 Zeichen). Bei 15
|
||||
# wurde regelmaessig getrunkt (mid-JSON broken). 5 ist sicher mit Reserve.
|
||||
DEFAULT_BATCH_SIZE = 5
|
||||
|
||||
# content_original wird ohnehin auf 1000 Zeichen gecappt (rss_parser).
|
||||
# Fuer den Translator nochmal verkuerzen, falls vorhanden mehr.
|
||||
CONTENT_INPUT_MAX = 1200
|
||||
|
||||
# content_de soll wie content_original auf 1000 Zeichen begrenzt sein.
|
||||
CONTENT_OUTPUT_MAX = 1000
|
||||
|
||||
|
||||
def _extract_complete_objects(text: str) -> list[dict]:
|
||||
"""Extrahiert vollstaendige JSON-Objekte aus moeglicherweise abgeschnittenem Text.
|
||||
|
||||
Klammer-Counter-Ansatz: jedes balancierte {...} wird probiert.
|
||||
"""
|
||||
results = []
|
||||
depth = 0
|
||||
start = -1
|
||||
in_string = False
|
||||
escape = False
|
||||
for i, ch in enumerate(text):
|
||||
if escape:
|
||||
escape = False
|
||||
continue
|
||||
if ch == "\\":
|
||||
escape = True
|
||||
continue
|
||||
if ch == '"' and not escape:
|
||||
in_string = not in_string
|
||||
continue
|
||||
if in_string:
|
||||
continue
|
||||
if ch == "{":
|
||||
if depth == 0:
|
||||
start = i
|
||||
depth += 1
|
||||
elif ch == "}":
|
||||
depth -= 1
|
||||
if depth == 0 and start >= 0:
|
||||
obj_text = text[start:i + 1]
|
||||
try:
|
||||
obj = json.loads(obj_text)
|
||||
if isinstance(obj, dict):
|
||||
results.append(obj)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
start = -1
|
||||
return results
|
||||
|
||||
|
||||
def _build_prompt(articles: list[dict], output_lang: str = "de") -> str:
|
||||
"""Bauen den Translation-Prompt fuer eine Batch."""
|
||||
lang_label = {"de": "Deutsch", "en": "Englisch"}.get(output_lang, output_lang)
|
||||
|
||||
items = []
|
||||
for a in articles:
|
||||
items.append({
|
||||
"id": a["id"],
|
||||
"headline": a.get("headline", "") or "",
|
||||
"content": (a.get("content_original") or "")[:CONTENT_INPUT_MAX],
|
||||
"source_lang": a.get("language", "en"),
|
||||
})
|
||||
|
||||
return f"""Du bist ein praeziser Uebersetzer fuer Nachrichten-Artikel.
|
||||
Uebersetze die folgenden Artikel nach {lang_label}.
|
||||
|
||||
WICHTIG:
|
||||
- Verwende IMMER echte UTF-8-Umlaute (ä, ö, ü, ß) - NIEMALS Umschreibungen wie ae, oe, ue, ss.
|
||||
Beispiele: "Gespraeche" -> "Gespräche", "Fuehrer" -> "Führer", "grosse" -> "große".
|
||||
- Behalte Eigennamen (Personen, Orte, Organisationen) im Original.
|
||||
- Headline kurz und buendig wie im Original.
|
||||
- Content auf MAX {CONTENT_OUTPUT_MAX} Zeichen kuerzen, kein HTML, kein Markdown.
|
||||
- Wenn der Artikel schon auf {lang_label} ist (z.B. source_lang="{output_lang}"),
|
||||
kopiere headline und content unveraendert.
|
||||
|
||||
Antworte AUSSCHLIESSLICH mit einem flachen JSON-Array (kein Wrapper-Objekt!).
|
||||
Format genau so:
|
||||
[
|
||||
{{"id": 1, "headline_de": "Titel auf Deutsch", "content_de": "Inhalt auf Deutsch"}},
|
||||
{{"id": 2, "headline_de": "...", "content_de": "..."}}
|
||||
]
|
||||
|
||||
NICHT erlaubt: {{"translations": [...]}} oder {{"items": [...]}} oder Markdown-Codefences.
|
||||
Nur das Array, ohne Einleitung, ohne Erklaerung.
|
||||
|
||||
ARTIKEL:
|
||||
{json.dumps(items, ensure_ascii=False, indent=2)}
|
||||
"""
|
||||
|
||||
|
||||
def _parse_response(text: str) -> list[dict]:
|
||||
"""Robustes JSON-Array-Parsing.
|
||||
|
||||
Handhabt:
|
||||
- reines JSON
|
||||
- JSON in Markdown-Codefence ```json ... ```
|
||||
- abgeschnittene Antworten (extrahiert vollstaendige Top-Level-Objekte)
|
||||
"""
|
||||
text = text.strip()
|
||||
# Markdown-Codefence entfernen
|
||||
if text.startswith("```"):
|
||||
text = re.sub(r"^```(?:json)?\s*", "", text)
|
||||
text = re.sub(r"\s*```\s*$", "", text)
|
||||
text = text.strip()
|
||||
|
||||
try:
|
||||
data = json.loads(text)
|
||||
except json.JSONDecodeError:
|
||||
# Erst Array versuchen
|
||||
match = re.search(r"\[.*\]", text, re.DOTALL)
|
||||
if match:
|
||||
try:
|
||||
data = json.loads(match.group(0))
|
||||
except json.JSONDecodeError:
|
||||
# Truncate-Fallback: einzelne Top-Level-Objekte extrahieren
|
||||
data = _extract_complete_objects(text)
|
||||
else:
|
||||
data = _extract_complete_objects(text)
|
||||
|
||||
# Claude wraps das Array gelegentlich in {"translations": [...]} oder {"items": [...]}
|
||||
if isinstance(data, dict):
|
||||
for key in ("translations", "items", "results", "data"):
|
||||
if isinstance(data.get(key), list):
|
||||
data = data[key]
|
||||
break
|
||||
else:
|
||||
# Einzelnes Objekt? Dann als Liste mit einem Element behandeln
|
||||
if "id" in data:
|
||||
data = [data]
|
||||
else:
|
||||
raise ValueError(f"Translator-Antwort: Dict ohne erwarteten Array-Key (keys={list(data.keys())[:5]})")
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValueError(f"Translator-Antwort ist kein Array: {type(data).__name__}")
|
||||
|
||||
cleaned = []
|
||||
for item in data:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
aid = item.get("id")
|
||||
if not isinstance(aid, int):
|
||||
try:
|
||||
aid = int(aid)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
cleaned.append({
|
||||
"id": aid,
|
||||
"headline_de": (item.get("headline_de") or "").strip() or None,
|
||||
"content_de": (item.get("content_de") or "").strip() or None,
|
||||
})
|
||||
return cleaned
|
||||
|
||||
|
||||
async def translate_articles_batch(
|
||||
articles: list[dict],
|
||||
output_lang: str = "de",
|
||||
) -> tuple[list[dict], ClaudeUsage]:
|
||||
"""Uebersetzt eine Batch von Artikeln.
|
||||
|
||||
Erwartet articles als Liste von Dicts mit den Feldern id, headline,
|
||||
content_original, language.
|
||||
|
||||
Rueckgabe: (uebersetzte_artikel, usage)
|
||||
Wenn der Call fehlschlaegt, wird ([], leere_usage) zurueckgegeben - der
|
||||
Caller kann entscheiden, ob retry oder skip.
|
||||
"""
|
||||
if not articles:
|
||||
return [], ClaudeUsage()
|
||||
|
||||
prompt = _build_prompt(articles, output_lang)
|
||||
|
||||
try:
|
||||
result_text, usage = await call_claude(prompt, tools=None, model=CLAUDE_MODEL_FAST)
|
||||
except Exception as e:
|
||||
logger.error(f"Translator Claude-Call fehlgeschlagen: {e}")
|
||||
return [], ClaudeUsage()
|
||||
|
||||
try:
|
||||
translations = _parse_response(result_text)
|
||||
except Exception as e:
|
||||
logger.error(f"Translator JSON-Parsing fehlgeschlagen: {e}; raw: {result_text[:300]!r}")
|
||||
return [], usage
|
||||
|
||||
# Validierung: nur Translations zurueckgeben, deren id wirklich
|
||||
# in der angefragten Batch war
|
||||
requested_ids = {a["id"] for a in articles}
|
||||
valid = [t for t in translations if t["id"] in requested_ids]
|
||||
if len(valid) != len(translations):
|
||||
logger.warning(
|
||||
"Translator: %d von %d Translations referenzieren unbekannte IDs",
|
||||
len(translations) - len(valid), len(translations),
|
||||
)
|
||||
return valid, usage
|
||||
|
||||
|
||||
async def translate_articles(
|
||||
articles: list[dict],
|
||||
output_lang: str = "de",
|
||||
batch_size: int = DEFAULT_BATCH_SIZE,
|
||||
usage_accumulator: UsageAccumulator | None = None,
|
||||
) -> list[dict]:
|
||||
"""Uebersetzt eine beliebige Anzahl Artikel in Batches.
|
||||
|
||||
Bringt die Batches durch Logik in `translate_articles_batch` und gibt
|
||||
EINE flache Liste der Translations zurueck. Wenn ein Batch fehlschlaegt,
|
||||
wird er uebersprungen (anderer Batches laufen weiter).
|
||||
"""
|
||||
if not articles:
|
||||
return []
|
||||
|
||||
if not TRANSLATOR_ENABLED:
|
||||
logger.info(
|
||||
"Translator deaktiviert (TRANSLATOR_ENABLED=false), %d Artikel uebersprungen",
|
||||
len(articles),
|
||||
)
|
||||
return []
|
||||
|
||||
all_translations = []
|
||||
for i in range(0, len(articles), batch_size):
|
||||
batch = articles[i : i + batch_size]
|
||||
translations, usage = await translate_articles_batch(batch, output_lang)
|
||||
if usage_accumulator is not None:
|
||||
usage_accumulator.add(usage)
|
||||
all_translations.extend(translations)
|
||||
logger.info(
|
||||
"Translator-Batch %d/%d: %d/%d uebersetzt (cost=$%.4f)",
|
||||
(i // batch_size) + 1,
|
||||
(len(articles) + batch_size - 1) // batch_size,
|
||||
len(translations), len(batch),
|
||||
usage.cost_usd,
|
||||
)
|
||||
return all_translations
|
||||
@@ -41,6 +41,10 @@ OUTPUT_LANGUAGE = "Deutsch"
|
||||
# In Kundenversion auf False setzen oder Env-Variable entfernen
|
||||
DEV_MODE = os.environ.get("DEV_MODE", "true").lower() == "true"
|
||||
|
||||
# Feature-Flag: Translator-Agent (Haiku) komplett deaktivieren.
|
||||
# False = keine Uebersetzungen mehr, fremdsprachige Artikel bleiben unuebersetzt.
|
||||
TRANSLATOR_ENABLED = os.environ.get("TRANSLATOR_ENABLED", "true").lower() == "true"
|
||||
|
||||
# RSS-Feeds (Fallback, primär aus DB geladen)
|
||||
RSS_FEEDS = {
|
||||
"deutsch": [
|
||||
|
||||
@@ -6,6 +6,8 @@ import httpx
|
||||
from datetime import datetime, timezone
|
||||
from config import TIMEZONE, MAX_ARTICLES_PER_DOMAIN_RSS
|
||||
from source_rules import _extract_domain
|
||||
from feeds.transcript_extractors._common import html_to_text
|
||||
from services.post_refresh_qc import normalize_german_umlauts
|
||||
|
||||
logger = logging.getLogger("osint.rss")
|
||||
|
||||
@@ -152,11 +154,27 @@ class RSSParser:
|
||||
|
||||
for entry in feed.entries[:50]:
|
||||
title = entry.get("title", "")
|
||||
summary = entry.get("summary", "")
|
||||
# RSS-summary ist bei vielen Quellen HTML (Guardian, AP, SZ, ...).
|
||||
# Vor weiterer Verwendung strippen, sonst landet HTML in DB
|
||||
# und KI-Agenten und Sprach-Heuristik werden gestoert.
|
||||
summary_raw = entry.get("summary", "")
|
||||
summary = html_to_text(summary_raw) if summary_raw else ""
|
||||
# ASCII-Umlaut-Normalisierung (z.B. dpa-AFX schreibt "Gespraeche").
|
||||
# Dictionary-basiert, sicher gegen englische Woerter wie "Boeing".
|
||||
title, _ = normalize_german_umlauts(title)
|
||||
summary, _ = normalize_german_umlauts(summary)
|
||||
text = f"{title} {summary}".lower()
|
||||
|
||||
# Flexibles Keyword-Matching: mindestens die Hälfte der Suchworte muss vorkommen (aufgerundet)
|
||||
min_matches = min(2, max(1, (len(search_words) + 1) // 2))
|
||||
# Adaptive Match-Schwelle:
|
||||
# - Bei mindestens einem spezifischen Keyword (>=7 Zeichen) im Text reicht 1 Treffer.
|
||||
# Verhindert, dass Headlines mit nur einem starken Keyword wie "buckelwal"
|
||||
# rausfallen, wenn die Lage thematisch eng ist (Bug 1, vom User dokumentiert).
|
||||
# - Sonst: alte Heuristik (mindestens halb der Wörter, max. 2).
|
||||
specific_in_text = any(w in text for w in search_words if len(w) >= 7)
|
||||
if specific_in_text:
|
||||
min_matches = 1
|
||||
else:
|
||||
min_matches = min(2, max(1, (len(search_words) + 1) // 2))
|
||||
match_count = sum(1 for word in search_words if word in text)
|
||||
|
||||
if match_count >= min_matches:
|
||||
|
||||
@@ -124,7 +124,7 @@ async def check_auto_refresh():
|
||||
|
||||
# Letzten abgeschlossenen oder laufenden Refresh pruefen
|
||||
cursor = await db.execute(
|
||||
"SELECT started_at, status FROM refresh_log WHERE incident_id = ? AND status IN ('completed', 'running') ORDER BY id DESC LIMIT 1",
|
||||
"SELECT started_at, status FROM refresh_log WHERE incident_id = ? AND status IN ('completed', 'running', 'cancelled', 'error') ORDER BY id DESC LIMIT 1",
|
||||
(incident_id,),
|
||||
)
|
||||
last_refresh = await cursor.fetchone()
|
||||
|
||||
@@ -40,12 +40,25 @@ async def require_writable_license(
|
||||
) -> dict:
|
||||
"""Dependency die sicherstellt, dass die Lizenz Schreibzugriff erlaubt.
|
||||
|
||||
Blockiert neue Lagen/Refreshes bei abgelaufener Lizenz (Nur-Lesen-Modus).
|
||||
Blockiert neue Lagen/Refreshes bei abgelaufener Lizenz, deaktivierter Org
|
||||
oder aufgebrauchtem Token-Budget (Hard-Stop).
|
||||
"""
|
||||
lic = current_user.get("license", {})
|
||||
if lic.get("read_only"):
|
||||
reason = lic.get("read_only_reason") or "expired"
|
||||
if reason == "budget_exceeded":
|
||||
detail = "Token-Budget aufgebraucht. Für Aufstockung oder Upgrade bitte info@aegis-sight.de kontaktieren."
|
||||
elif reason == "expired":
|
||||
detail = "Lizenz abgelaufen. Nur Lesezugriff moeglich."
|
||||
elif reason == "no_license":
|
||||
detail = "Keine aktive Lizenz. Bitte Verwaltung kontaktieren."
|
||||
elif reason == "org_disabled":
|
||||
detail = "Organisation deaktiviert. Bitte Support kontaktieren."
|
||||
else:
|
||||
detail = lic.get("message") or "Nur Lesezugriff moeglich."
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Lizenz abgelaufen oder widerrufen. Nur Lesezugriff moeglich.",
|
||||
detail=detail,
|
||||
headers={"X-License-Status": reason},
|
||||
)
|
||||
return current_user
|
||||
|
||||
@@ -37,6 +37,8 @@ class UserMeResponse(BaseModel):
|
||||
license_status: str = "unknown"
|
||||
license_type: str = ""
|
||||
read_only: bool = False
|
||||
read_only_reason: Optional[str] = None
|
||||
unlimited_budget: bool = False
|
||||
credits_total: Optional[int] = None
|
||||
credits_remaining: Optional[int] = None
|
||||
credits_percent_used: Optional[float] = None
|
||||
|
||||
@@ -26,10 +26,15 @@ LOGO_PATH = Path(__file__).parent / "static" / "favicon.svg"
|
||||
|
||||
|
||||
FC_STATUS_LABELS = {
|
||||
"confirmed": "Bestätigt",
|
||||
"unconfirmed": "Unbestätigt",
|
||||
"disputed": "Umstritten",
|
||||
"false": "Falsch",
|
||||
# 1:1 vom Monitor-Frontend (components.js) — konsistent zum UI.
|
||||
"confirmed": "Bestätigt",
|
||||
"unconfirmed": "Unbestätigt",
|
||||
"contradicted": "Widerlegt",
|
||||
"developing": "Unklar",
|
||||
"established": "Gesichert",
|
||||
"disputed": "Umstritten",
|
||||
"unverified": "Ungeprüft",
|
||||
"false": "Falsch", # Legacy-Fallback
|
||||
}
|
||||
|
||||
|
||||
@@ -709,7 +714,7 @@ async def generate_pdf(
|
||||
),
|
||||
lagebild_timestamp=(incident.get("updated_at") or "")[:16].replace("T", " "),
|
||||
sources=_prepare_sources(incident)[:30] if scope == "report" else _prepare_sources(incident),
|
||||
fact_checks=_prepare_fact_checks(fact_checks[:20] if scope == "report" else fact_checks),
|
||||
fact_checks=_prepare_fact_checks(fact_checks),
|
||||
source_stats=_prepare_source_stats(articles)[:20] if scope == "report" else _prepare_source_stats(articles),
|
||||
timeline=_prepare_timeline(articles) if scope == "full" else [],
|
||||
articles=articles if scope == "full" else [],
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
"""Auth-Router: Magic-Link-Login und Nutzerverwaltung."""
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
|
||||
def _staging_mode() -> bool:
|
||||
"""STAGING_MODE Env-Flag (vgl. services.license_service)."""
|
||||
return os.environ.get("STAGING_MODE", "").lower() in ("1", "true", "yes")
|
||||
from models import (
|
||||
MagicLinkRequest,
|
||||
MagicLinkResponse,
|
||||
@@ -187,10 +193,11 @@ async def get_me(
|
||||
from services.license_service import check_license
|
||||
license_info = await check_license(db, current_user["tenant_id"])
|
||||
|
||||
# Credits-Daten laden
|
||||
# Credits-Daten laden (echte Prozente, nicht gekappt)
|
||||
credits_total = None
|
||||
credits_remaining = None
|
||||
credits_percent_used = None
|
||||
unlimited_budget = bool(license_info.get("unlimited_budget", False))
|
||||
if current_user.get("tenant_id"):
|
||||
lic_cursor = await db.execute(
|
||||
"SELECT credits_total, credits_used, cost_per_credit FROM licenses WHERE organization_id = ? AND status = 'active' ORDER BY id DESC LIMIT 1",
|
||||
@@ -200,7 +207,12 @@ async def get_me(
|
||||
credits_total = lic_row["credits_total"]
|
||||
credits_used = lic_row["credits_used"] or 0
|
||||
credits_remaining = max(0, int(credits_total - credits_used))
|
||||
credits_percent_used = round(min(100, (credits_used / credits_total) * 100), 1) if credits_total > 0 else 0
|
||||
credits_percent_used = round((credits_used / credits_total) * 100, 1) if credits_total > 0 else 0
|
||||
|
||||
# STAGING_MODE: Org-Switcher im Frontend deaktivieren
|
||||
is_global_admin_response = current_user.get("is_global_admin", False)
|
||||
if _staging_mode():
|
||||
is_global_admin_response = False
|
||||
|
||||
return UserMeResponse(
|
||||
id=current_user["id"],
|
||||
@@ -216,7 +228,9 @@ async def get_me(
|
||||
license_status=license_info.get("status", "unknown"),
|
||||
license_type=license_info.get("license_type", ""),
|
||||
read_only=license_info.get("read_only", False),
|
||||
is_global_admin=current_user.get("is_global_admin", False),
|
||||
read_only_reason=license_info.get("read_only_reason"),
|
||||
unlimited_budget=unlimited_budget,
|
||||
is_global_admin=is_global_admin_response,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1165,8 +1165,18 @@ async def export_incident(
|
||||
)
|
||||
snapshots = [dict(r) for r in await cursor.fetchall()]
|
||||
|
||||
# Executive Summary (KI-generiert, gecacht)
|
||||
exec_summary = incident.get("executive_summary")
|
||||
# Zusammenfassung fuer den Export:
|
||||
# - Bei Adhoc-Lagen primaer "Neueste Entwicklungen" (latest_developments) als Markdown-Bullets,
|
||||
# weil Live-Monitoring von Aktualitaet lebt.
|
||||
# - Fallback (oder bei Research): Executive Summary (KI-generiert, gecacht).
|
||||
is_adhoc = (incident.get("type") or "adhoc") != "research"
|
||||
latest_dev = (incident.get("latest_developments") or "").strip()
|
||||
exec_summary = None
|
||||
if is_adhoc and latest_dev:
|
||||
from report_generator import _markdown_to_html as _md_to_html
|
||||
exec_summary = _md_to_html(latest_dev)
|
||||
if not exec_summary:
|
||||
exec_summary = incident.get("executive_summary")
|
||||
if not exec_summary:
|
||||
summary_text = incident.get("summary") or ""
|
||||
exec_summary = await generate_executive_summary(summary_text)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Lizenz-Verwaltung und -Pruefung."""
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from config import TIMEZONE
|
||||
import aiosqlite
|
||||
@@ -7,11 +8,21 @@ import aiosqlite
|
||||
logger = logging.getLogger("osint.license")
|
||||
|
||||
|
||||
def _staging_mode() -> bool:
|
||||
"""Staging-Mode aktiv? Wenn ja, gilt: immer unlimited Budget, kein Hard-Stop.
|
||||
|
||||
Wird ueber ENV-Variable STAGING_MODE=1 (oder true) aktiviert.
|
||||
Nur in Staging-.env gesetzt; Live-.env hat das Flag nicht.
|
||||
"""
|
||||
return os.environ.get("STAGING_MODE", "").lower() in ("1", "true", "yes")
|
||||
|
||||
|
||||
async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
"""Prueft den Lizenzstatus einer Organisation.
|
||||
|
||||
Returns:
|
||||
dict mit: valid, status, license_type, max_users, current_users, read_only, message
|
||||
dict mit: valid, status, license_type, max_users, current_users, read_only,
|
||||
read_only_reason, message, unlimited_budget, credits_total, credits_used
|
||||
"""
|
||||
# Organisation pruefen
|
||||
cursor = await db.execute(
|
||||
@@ -20,10 +31,14 @@ async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
)
|
||||
org = await cursor.fetchone()
|
||||
if not org:
|
||||
return {"valid": False, "status": "not_found", "read_only": True, "message": "Organisation nicht gefunden"}
|
||||
return {"valid": False, "status": "not_found", "read_only": True,
|
||||
"read_only_reason": "not_found",
|
||||
"message": "Organisation nicht gefunden"}
|
||||
|
||||
if not org["is_active"]:
|
||||
return {"valid": False, "status": "org_disabled", "read_only": True, "message": "Organisation deaktiviert"}
|
||||
return {"valid": False, "status": "org_disabled", "read_only": True,
|
||||
"read_only_reason": "org_disabled",
|
||||
"message": "Organisation deaktiviert"}
|
||||
|
||||
# Aktive Lizenz suchen
|
||||
cursor = await db.execute(
|
||||
@@ -35,7 +50,19 @@ async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
license_row = await cursor.fetchone()
|
||||
|
||||
if not license_row:
|
||||
return {"valid": False, "status": "no_license", "read_only": True, "message": "Keine aktive Lizenz"}
|
||||
return {"valid": False, "status": "no_license", "read_only": True,
|
||||
"read_only_reason": "no_license",
|
||||
"message": "Keine aktive Lizenz"}
|
||||
|
||||
# Felder zur weiteren Verwendung extrahieren
|
||||
lic_dict = dict(license_row)
|
||||
unlimited_budget = bool(lic_dict.get("unlimited_budget"))
|
||||
credits_total = lic_dict.get("credits_total")
|
||||
credits_used = lic_dict.get("credits_used") or 0
|
||||
|
||||
# STAGING_MODE: kein Token-Budget-Hard-Stop, immer unlimited
|
||||
if _staging_mode():
|
||||
unlimited_budget = True
|
||||
|
||||
# Ablauf pruefen
|
||||
now = datetime.now(TIMEZONE)
|
||||
@@ -52,11 +79,21 @@ async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
"status": "expired",
|
||||
"license_type": license_row["license_type"],
|
||||
"read_only": True,
|
||||
"read_only_reason": "expired",
|
||||
"message": "Lizenz abgelaufen",
|
||||
"unlimited_budget": unlimited_budget,
|
||||
"credits_total": credits_total,
|
||||
"credits_used": credits_used,
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Budget-Check (Hard-Stop bei aufgebrauchten Credits, ausser unlimited)
|
||||
budget_exceeded = False
|
||||
if not unlimited_budget and credits_total and credits_total > 0:
|
||||
if credits_used >= credits_total:
|
||||
budget_exceeded = True
|
||||
|
||||
# Nutzerzahl pruefen
|
||||
cursor = await db.execute(
|
||||
"SELECT COUNT(*) as cnt FROM users WHERE organization_id = ? AND is_active = 1",
|
||||
@@ -64,6 +101,21 @@ async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
)
|
||||
current_users = (await cursor.fetchone())["cnt"]
|
||||
|
||||
if budget_exceeded:
|
||||
return {
|
||||
"valid": True, # Lizenz ist gueltig, aber Budget aufgebraucht -> read-only
|
||||
"status": "budget_exceeded",
|
||||
"license_type": license_row["license_type"],
|
||||
"max_users": license_row["max_users"],
|
||||
"current_users": current_users,
|
||||
"read_only": True,
|
||||
"read_only_reason": "budget_exceeded",
|
||||
"message": "Token-Budget aufgebraucht",
|
||||
"unlimited_budget": False,
|
||||
"credits_total": credits_total,
|
||||
"credits_used": credits_used,
|
||||
}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"status": license_row["status"],
|
||||
@@ -71,7 +123,11 @@ async def check_license(db: aiosqlite.Connection, organization_id: int) -> dict:
|
||||
"max_users": license_row["max_users"],
|
||||
"current_users": current_users,
|
||||
"read_only": False,
|
||||
"read_only_reason": None,
|
||||
"message": "Lizenz aktiv",
|
||||
"unlimited_budget": unlimited_budget,
|
||||
"credits_total": credits_total,
|
||||
"credits_used": credits_used,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -50,18 +50,18 @@ PIPELINE_STEPS = [
|
||||
"icon": "map-pin",
|
||||
"tooltip": "Aus den Meldungen werden Ortsangaben erkannt und auf der Karte verortet.",
|
||||
},
|
||||
{
|
||||
"key": "summary",
|
||||
"label": "Lagebild verfassen",
|
||||
"icon": "file-text",
|
||||
"tooltip": "Aus allen geprüften Meldungen wird ein zusammenhängendes Lagebild geschrieben, mit Quellenangaben am Text.",
|
||||
},
|
||||
{
|
||||
"key": "factcheck",
|
||||
"label": "Fakten prüfen",
|
||||
"icon": "shield",
|
||||
"tooltip": "Behauptungen aus den Meldungen werden gegeneinander abgeglichen: Bestätigt? Umstritten? Noch unklar?",
|
||||
},
|
||||
{
|
||||
"key": "summary",
|
||||
"label": "Lagebild verfassen",
|
||||
"icon": "file-text",
|
||||
"tooltip": "Aus allen geprüften Meldungen wird ein zusammenhängendes Lagebild geschrieben, mit Quellenangaben am Text.",
|
||||
},
|
||||
{
|
||||
"key": "qc",
|
||||
"label": "Qualitätscheck",
|
||||
@@ -228,3 +228,25 @@ async def error_step(db, ws_manager, *, step_id: Optional[int], refresh_log_id:
|
||||
"status": "error",
|
||||
"pass_number": pass_number,
|
||||
}, visibility, created_by, tenant_id)
|
||||
|
||||
|
||||
async def cancel_active_steps(db, *, refresh_log_id: int) -> int:
|
||||
"""Schliesst alle noch aktiven Pipeline-Schritte eines Refreshs als 'cancelled' ab.
|
||||
|
||||
Wird vom Orchestrator nach einem User-Cancel aufgerufen. Ohne diesen Schritt
|
||||
bleibt der zuletzt aktive Step-Eintrag verwaist und der Pipeline-Endpoint
|
||||
liefert dauerhaft 'Schritt X laeuft' an die UI.
|
||||
"""
|
||||
try:
|
||||
cur = await db.execute(
|
||||
"""UPDATE refresh_pipeline_steps
|
||||
SET status = 'cancelled', completed_at = ?
|
||||
WHERE refresh_log_id = ? AND status = 'active'""",
|
||||
(_now_db(), refresh_log_id),
|
||||
)
|
||||
await db.commit()
|
||||
return cur.rowcount or 0
|
||||
except Exception as e:
|
||||
logger.warning(f"Pipeline cancel_active_steps DB-Fehler: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
@@ -400,18 +400,20 @@ async def run_post_refresh_qc(db, incident_id: int) -> dict:
|
||||
db, incident_id, incident_title, incident_desc
|
||||
)
|
||||
umlauts_fixed = await normalize_umlaut_fields(db, incident_id)
|
||||
article_umlauts_fixed = await normalize_umlaut_articles(db, incident_id)
|
||||
|
||||
if facts_removed > 0 or locations_fixed > 0 or umlauts_fixed > 0:
|
||||
total_umlaut_changes = umlauts_fixed + article_umlauts_fixed
|
||||
if facts_removed > 0 or locations_fixed > 0 or total_umlaut_changes > 0:
|
||||
await db.commit()
|
||||
logger.info(
|
||||
"Post-Refresh QC fuer Incident %d: %d Duplikate entfernt, %d Locations korrigiert, %d Umlaute normalisiert",
|
||||
incident_id, facts_removed, locations_fixed, umlauts_fixed,
|
||||
"Post-Refresh QC fuer Incident %d: %d Duplikate entfernt, %d Locations korrigiert, %d Umlaute normalisiert (davon %d in Articles)",
|
||||
incident_id, facts_removed, locations_fixed, total_umlaut_changes, article_umlauts_fixed,
|
||||
)
|
||||
|
||||
return {
|
||||
"facts_removed": facts_removed,
|
||||
"locations_fixed": locations_fixed,
|
||||
"umlauts_fixed": umlauts_fixed,
|
||||
"umlauts_fixed": total_umlaut_changes,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
@@ -568,3 +570,64 @@ async def normalize_umlaut_fields(db, incident_id: int) -> int:
|
||||
incident_id, count_summary, count_dev,
|
||||
)
|
||||
return total
|
||||
|
||||
|
||||
async def normalize_umlaut_articles(db, incident_id: int) -> int:
|
||||
"""Normalisiert Umlaute in allen Artikel-Texten des Incidents.
|
||||
|
||||
Felder die behandelt werden:
|
||||
- headline_de und content_de bei allen Artikeln (LLM-Uebersetzung kann
|
||||
ASCII-Umlaute liefern trotz Prompt-Anweisung)
|
||||
- headline und content_original bei language='de' (manche Quellen wie
|
||||
dpa-AFX, Telegram-Kanaele liefern selbst schon ASCII-Umlaute)
|
||||
|
||||
Idempotent: Wenn der Text schon korrekt ist, macht das Dict-Lookup
|
||||
keine Aenderung und wir schreiben nicht zurueck.
|
||||
|
||||
Rueckgabe: Gesamtzahl der Wort-Ersetzungen ueber alle Artikel.
|
||||
"""
|
||||
cursor = await db.execute(
|
||||
"""SELECT id, language, headline, headline_de, content_original, content_de
|
||||
FROM articles WHERE incident_id = ?""",
|
||||
(incident_id,),
|
||||
)
|
||||
rows = await cursor.fetchall()
|
||||
if not rows:
|
||||
return 0
|
||||
|
||||
total = 0
|
||||
for row in rows:
|
||||
is_de = (row["language"] or "").lower() == "de"
|
||||
updates = {}
|
||||
|
||||
# Felder die immer behandelt werden (LLM-Uebersetzungen)
|
||||
if row["headline_de"]:
|
||||
new, n = normalize_german_umlauts(row["headline_de"])
|
||||
if n > 0:
|
||||
updates["headline_de"] = new
|
||||
total += n
|
||||
if row["content_de"]:
|
||||
new, n = normalize_german_umlauts(row["content_de"])
|
||||
if n > 0:
|
||||
updates["content_de"] = new
|
||||
total += n
|
||||
|
||||
# Originalfelder nur bei deutschen Quellen
|
||||
if is_de:
|
||||
if row["headline"]:
|
||||
new, n = normalize_german_umlauts(row["headline"])
|
||||
if n > 0:
|
||||
updates["headline"] = new
|
||||
total += n
|
||||
if row["content_original"]:
|
||||
new, n = normalize_german_umlauts(row["content_original"])
|
||||
if n > 0:
|
||||
updates["content_original"] = new
|
||||
total += n
|
||||
|
||||
if updates:
|
||||
set_clause = ", ".join(f"{k} = ?" for k in updates)
|
||||
values = list(updates.values()) + [row["id"]]
|
||||
await db.execute(f"UPDATE articles SET {set_clause} WHERE id = ?", values)
|
||||
|
||||
return total
|
||||
|
||||
@@ -1,282 +1,282 @@
|
||||
"""Quellen-Health-Check Engine - prüft Erreichbarkeit, Feed-Validität, Duplikate."""
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
import feedparser
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger("osint.source_health")
|
||||
|
||||
|
||||
async def run_health_checks(db: aiosqlite.Connection) -> dict:
|
||||
"""Führt alle Health-Checks für aktive Grundquellen durch."""
|
||||
logger.info("Starte Quellen-Health-Check...")
|
||||
|
||||
# Alle aktiven Grundquellen laden
|
||||
cursor = await db.execute(
|
||||
"SELECT id, name, url, domain, source_type, article_count, last_seen_at "
|
||||
"FROM sources WHERE status = 'active' AND tenant_id IS NULL"
|
||||
)
|
||||
sources = [dict(row) for row in await cursor.fetchall()]
|
||||
|
||||
# Aktuelle Health-Check-Ergebnisse löschen (werden neu geschrieben)
|
||||
await db.execute("DELETE FROM source_health_checks")
|
||||
await db.commit()
|
||||
|
||||
checks_done = 0
|
||||
issues_found = 0
|
||||
|
||||
# 1. Erreichbarkeit + Feed-Validität (nur Quellen mit URL)
|
||||
sources_with_url = [s for s in sources if s["url"]]
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
timeout=15.0,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": "Mozilla/5.0 (compatible; OSINT-Monitor/1.0)"},
|
||||
) as client:
|
||||
for i in range(0, len(sources_with_url), 5):
|
||||
batch = sources_with_url[i:i + 5]
|
||||
tasks = [_check_source_reachability(client, s) for s in batch]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for source, result in zip(batch, results):
|
||||
if isinstance(result, Exception):
|
||||
await _save_check(
|
||||
db, source["id"], "reachability", "error",
|
||||
f"Prüfung fehlgeschlagen: {result}",
|
||||
)
|
||||
issues_found += 1
|
||||
else:
|
||||
for check in result:
|
||||
await _save_check(
|
||||
db, source["id"], check["type"], check["status"],
|
||||
check["message"], check.get("details"),
|
||||
)
|
||||
if check["status"] != "ok":
|
||||
issues_found += 1
|
||||
checks_done += 1
|
||||
|
||||
# 2. Veraltete Quellen (kein Artikel seit >30 Tagen)
|
||||
for source in sources:
|
||||
if source["source_type"] in ("excluded", "web_source"):
|
||||
continue
|
||||
stale_check = _check_stale(source)
|
||||
if stale_check:
|
||||
await _save_check(
|
||||
db, source["id"], stale_check["type"],
|
||||
stale_check["status"], stale_check["message"],
|
||||
)
|
||||
if stale_check["status"] != "ok":
|
||||
issues_found += 1
|
||||
|
||||
# 3. Duplikate erkennen
|
||||
duplicates = _find_duplicates(sources)
|
||||
for dup in duplicates:
|
||||
await _save_check(
|
||||
db, dup["source_id"], "duplicate", "warning",
|
||||
dup["message"], json.dumps(dup.get("details", {})),
|
||||
)
|
||||
issues_found += 1
|
||||
|
||||
await db.commit()
|
||||
logger.info(
|
||||
f"Health-Check abgeschlossen: {checks_done} Quellen geprüft, "
|
||||
f"{issues_found} Probleme gefunden"
|
||||
)
|
||||
return {"checked": checks_done, "issues": issues_found}
|
||||
|
||||
|
||||
async def _check_source_reachability(
|
||||
client: httpx.AsyncClient, source: dict,
|
||||
) -> list[dict]:
|
||||
"""Prüft Erreichbarkeit und Feed-Validität einer Quelle."""
|
||||
checks = []
|
||||
url = source["url"]
|
||||
|
||||
try:
|
||||
resp = await client.get(url)
|
||||
|
||||
if resp.status_code >= 400:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"HTTP {resp.status_code} - nicht erreichbar",
|
||||
"details": json.dumps({"status_code": resp.status_code, "url": url}),
|
||||
})
|
||||
return checks
|
||||
|
||||
if resp.status_code >= 300:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "warning",
|
||||
"message": f"HTTP {resp.status_code} - Weiterleitung",
|
||||
"details": json.dumps({
|
||||
"status_code": resp.status_code,
|
||||
"final_url": str(resp.url),
|
||||
}),
|
||||
})
|
||||
else:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "ok",
|
||||
"message": "Erreichbar",
|
||||
})
|
||||
|
||||
# Feed-Validität nur für RSS-Feeds
|
||||
if source["source_type"] == "rss_feed":
|
||||
text = resp.text[:20000]
|
||||
if "<rss" not in text and "<feed" not in text and "<channel" not in text:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "error",
|
||||
"message": "Kein gültiger RSS/Atom-Feed",
|
||||
})
|
||||
else:
|
||||
feed = await asyncio.to_thread(feedparser.parse, text)
|
||||
if feed.get("bozo") and not feed.entries:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "error",
|
||||
"message": "Feed fehlerhaft (bozo)",
|
||||
"details": json.dumps({
|
||||
"bozo_exception": str(feed.get("bozo_exception", "")),
|
||||
}),
|
||||
})
|
||||
elif not feed.entries:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "warning",
|
||||
"message": "Feed erreichbar aber leer",
|
||||
})
|
||||
else:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "ok",
|
||||
"message": f"Feed gültig ({len(feed.entries)} Einträge)",
|
||||
})
|
||||
|
||||
except httpx.TimeoutException:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": "Timeout (15s)",
|
||||
})
|
||||
except httpx.ConnectError as e:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"Verbindung fehlgeschlagen: {e}",
|
||||
})
|
||||
except Exception as e:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"{type(e).__name__}: {e}",
|
||||
})
|
||||
|
||||
return checks
|
||||
|
||||
|
||||
def _check_stale(source: dict) -> dict | None:
|
||||
"""Prüft ob eine Quelle veraltet ist (keine Artikel seit >30 Tagen)."""
|
||||
if source["source_type"] == "excluded":
|
||||
return None
|
||||
|
||||
article_count = source.get("article_count") or 0
|
||||
last_seen = source.get("last_seen_at")
|
||||
|
||||
if article_count == 0:
|
||||
return {
|
||||
"type": "stale",
|
||||
"status": "warning",
|
||||
"message": "Noch nie Artikel geliefert",
|
||||
}
|
||||
|
||||
if last_seen:
|
||||
try:
|
||||
from datetime import datetime
|
||||
last_dt = datetime.fromisoformat(last_seen)
|
||||
now = datetime.now()
|
||||
age_days = (now - last_dt).days
|
||||
if age_days > 30:
|
||||
return {
|
||||
"type": "stale",
|
||||
"status": "warning",
|
||||
"message": f"Letzter Artikel vor {age_days} Tagen",
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_duplicates(sources: list[dict]) -> list[dict]:
|
||||
"""Findet doppelte Quellen (gleiche URL)."""
|
||||
duplicates = []
|
||||
url_map = {}
|
||||
|
||||
for s in sources:
|
||||
if not s["url"]:
|
||||
continue
|
||||
url_norm = s["url"].lower().rstrip("/")
|
||||
if url_norm in url_map:
|
||||
existing = url_map[url_norm]
|
||||
duplicates.append({
|
||||
"source_id": s["id"],
|
||||
"message": f"Doppelte URL wie '{existing['name']}' (ID {existing['id']})",
|
||||
"details": {"duplicate_of": existing["id"], "type": "url"},
|
||||
})
|
||||
else:
|
||||
url_map[url_norm] = s
|
||||
|
||||
return duplicates
|
||||
|
||||
|
||||
async def _save_check(
|
||||
db: aiosqlite.Connection, source_id: int, check_type: str,
|
||||
status: str, message: str, details: str = None,
|
||||
):
|
||||
"""Speichert ein Health-Check-Ergebnis."""
|
||||
await db.execute(
|
||||
"INSERT INTO source_health_checks "
|
||||
"(source_id, check_type, status, message, details) "
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
(source_id, check_type, status, message, details),
|
||||
)
|
||||
|
||||
|
||||
async def get_health_summary(db: aiosqlite.Connection) -> dict:
|
||||
"""Gibt eine Zusammenfassung der letzten Health-Check-Ergebnisse zurück."""
|
||||
cursor = await db.execute("""
|
||||
SELECT
|
||||
h.id, h.source_id, s.name, s.domain, s.url, s.source_type,
|
||||
h.check_type, h.status, h.message, h.details, h.checked_at
|
||||
FROM source_health_checks h
|
||||
JOIN sources s ON s.id = h.source_id
|
||||
ORDER BY
|
||||
CASE h.status WHEN 'error' THEN 0 WHEN 'warning' THEN 1 ELSE 2 END,
|
||||
s.name
|
||||
""")
|
||||
checks = [dict(row) for row in await cursor.fetchall()]
|
||||
|
||||
error_count = sum(1 for c in checks if c["status"] == "error")
|
||||
warning_count = sum(1 for c in checks if c["status"] == "warning")
|
||||
ok_count = sum(1 for c in checks if c["status"] == "ok")
|
||||
|
||||
cursor = await db.execute(
|
||||
"SELECT MAX(checked_at) as last_check FROM source_health_checks"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
last_check = row["last_check"] if row else None
|
||||
|
||||
return {
|
||||
"last_check": last_check,
|
||||
"total_checks": len(checks),
|
||||
"errors": error_count,
|
||||
"warnings": warning_count,
|
||||
"ok": ok_count,
|
||||
"checks": checks,
|
||||
}
|
||||
"""Quellen-Health-Check Engine - prüft Erreichbarkeit, Feed-Validität, Duplikate."""
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
import feedparser
|
||||
import aiosqlite
|
||||
|
||||
logger = logging.getLogger("osint.source_health")
|
||||
|
||||
|
||||
async def run_health_checks(db: aiosqlite.Connection) -> dict:
|
||||
"""Führt alle Health-Checks für aktive Grundquellen durch."""
|
||||
logger.info("Starte Quellen-Health-Check...")
|
||||
|
||||
# Alle aktiven Grundquellen laden
|
||||
cursor = await db.execute(
|
||||
"SELECT id, name, url, domain, source_type, article_count, last_seen_at "
|
||||
"FROM sources WHERE status = 'active' AND tenant_id IS NULL"
|
||||
)
|
||||
sources = [dict(row) for row in await cursor.fetchall()]
|
||||
|
||||
# Aktuelle Health-Check-Ergebnisse löschen (werden neu geschrieben)
|
||||
await db.execute("DELETE FROM source_health_checks")
|
||||
await db.commit()
|
||||
|
||||
checks_done = 0
|
||||
issues_found = 0
|
||||
|
||||
# 1. Erreichbarkeit + Feed-Validität (nur Quellen mit URL)
|
||||
sources_with_url = [s for s in sources if s["url"]]
|
||||
|
||||
async with httpx.AsyncClient(
|
||||
timeout=15.0,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": "Mozilla/5.0 (compatible; OSINT-Monitor/1.0)"},
|
||||
) as client:
|
||||
for i in range(0, len(sources_with_url), 5):
|
||||
batch = sources_with_url[i:i + 5]
|
||||
tasks = [_check_source_reachability(client, s) for s in batch]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for source, result in zip(batch, results):
|
||||
if isinstance(result, Exception):
|
||||
await _save_check(
|
||||
db, source["id"], "reachability", "error",
|
||||
f"Prüfung fehlgeschlagen: {result}",
|
||||
)
|
||||
issues_found += 1
|
||||
else:
|
||||
for check in result:
|
||||
await _save_check(
|
||||
db, source["id"], check["type"], check["status"],
|
||||
check["message"], check.get("details"),
|
||||
)
|
||||
if check["status"] != "ok":
|
||||
issues_found += 1
|
||||
checks_done += 1
|
||||
|
||||
# 2. Veraltete Quellen (kein Artikel seit >30 Tagen)
|
||||
for source in sources:
|
||||
if source["source_type"] in ("excluded", "web_source"):
|
||||
continue
|
||||
stale_check = _check_stale(source)
|
||||
if stale_check:
|
||||
await _save_check(
|
||||
db, source["id"], stale_check["type"],
|
||||
stale_check["status"], stale_check["message"],
|
||||
)
|
||||
if stale_check["status"] != "ok":
|
||||
issues_found += 1
|
||||
|
||||
# 3. Duplikate erkennen
|
||||
duplicates = _find_duplicates(sources)
|
||||
for dup in duplicates:
|
||||
await _save_check(
|
||||
db, dup["source_id"], "duplicate", "warning",
|
||||
dup["message"], json.dumps(dup.get("details", {})),
|
||||
)
|
||||
issues_found += 1
|
||||
|
||||
await db.commit()
|
||||
logger.info(
|
||||
f"Health-Check abgeschlossen: {checks_done} Quellen geprüft, "
|
||||
f"{issues_found} Probleme gefunden"
|
||||
)
|
||||
return {"checked": checks_done, "issues": issues_found}
|
||||
|
||||
|
||||
async def _check_source_reachability(
|
||||
client: httpx.AsyncClient, source: dict,
|
||||
) -> list[dict]:
|
||||
"""Prüft Erreichbarkeit und Feed-Validität einer Quelle."""
|
||||
checks = []
|
||||
url = source["url"]
|
||||
|
||||
try:
|
||||
resp = await client.get(url)
|
||||
|
||||
if resp.status_code >= 400:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"HTTP {resp.status_code} - nicht erreichbar",
|
||||
"details": json.dumps({"status_code": resp.status_code, "url": url}),
|
||||
})
|
||||
return checks
|
||||
|
||||
if resp.status_code >= 300:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "warning",
|
||||
"message": f"HTTP {resp.status_code} - Weiterleitung",
|
||||
"details": json.dumps({
|
||||
"status_code": resp.status_code,
|
||||
"final_url": str(resp.url),
|
||||
}),
|
||||
})
|
||||
else:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "ok",
|
||||
"message": "Erreichbar",
|
||||
})
|
||||
|
||||
# Feed-Validität nur für RSS-Feeds
|
||||
if source["source_type"] == "rss_feed":
|
||||
text = resp.text[:20000]
|
||||
if "<rss" not in text and "<feed" not in text and "<channel" not in text:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "error",
|
||||
"message": "Kein gültiger RSS/Atom-Feed",
|
||||
})
|
||||
else:
|
||||
feed = await asyncio.to_thread(feedparser.parse, text)
|
||||
if feed.get("bozo") and not feed.entries:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "error",
|
||||
"message": "Feed fehlerhaft (bozo)",
|
||||
"details": json.dumps({
|
||||
"bozo_exception": str(feed.get("bozo_exception", "")),
|
||||
}),
|
||||
})
|
||||
elif not feed.entries:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "warning",
|
||||
"message": "Feed erreichbar aber leer",
|
||||
})
|
||||
else:
|
||||
checks.append({
|
||||
"type": "feed_validity",
|
||||
"status": "ok",
|
||||
"message": f"Feed gültig ({len(feed.entries)} Einträge)",
|
||||
})
|
||||
|
||||
except httpx.TimeoutException:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": "Timeout (15s)",
|
||||
})
|
||||
except httpx.ConnectError as e:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"Verbindung fehlgeschlagen: {e}",
|
||||
})
|
||||
except Exception as e:
|
||||
checks.append({
|
||||
"type": "reachability",
|
||||
"status": "error",
|
||||
"message": f"{type(e).__name__}: {e}",
|
||||
})
|
||||
|
||||
return checks
|
||||
|
||||
|
||||
def _check_stale(source: dict) -> dict | None:
|
||||
"""Prüft ob eine Quelle veraltet ist (keine Artikel seit >30 Tagen)."""
|
||||
if source["source_type"] == "excluded":
|
||||
return None
|
||||
|
||||
article_count = source.get("article_count") or 0
|
||||
last_seen = source.get("last_seen_at")
|
||||
|
||||
if article_count == 0:
|
||||
return {
|
||||
"type": "stale",
|
||||
"status": "warning",
|
||||
"message": "Noch nie Artikel geliefert",
|
||||
}
|
||||
|
||||
if last_seen:
|
||||
try:
|
||||
from datetime import datetime
|
||||
last_dt = datetime.fromisoformat(last_seen)
|
||||
now = datetime.now()
|
||||
age_days = (now - last_dt).days
|
||||
if age_days > 30:
|
||||
return {
|
||||
"type": "stale",
|
||||
"status": "warning",
|
||||
"message": f"Letzter Artikel vor {age_days} Tagen",
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_duplicates(sources: list[dict]) -> list[dict]:
|
||||
"""Findet doppelte Quellen (gleiche URL)."""
|
||||
duplicates = []
|
||||
url_map = {}
|
||||
|
||||
for s in sources:
|
||||
if not s["url"]:
|
||||
continue
|
||||
url_norm = s["url"].lower().rstrip("/")
|
||||
if url_norm in url_map:
|
||||
existing = url_map[url_norm]
|
||||
duplicates.append({
|
||||
"source_id": s["id"],
|
||||
"message": f"Doppelte URL wie '{existing['name']}' (ID {existing['id']})",
|
||||
"details": {"duplicate_of": existing["id"], "type": "url"},
|
||||
})
|
||||
else:
|
||||
url_map[url_norm] = s
|
||||
|
||||
return duplicates
|
||||
|
||||
|
||||
async def _save_check(
|
||||
db: aiosqlite.Connection, source_id: int, check_type: str,
|
||||
status: str, message: str, details: str = None,
|
||||
):
|
||||
"""Speichert ein Health-Check-Ergebnis."""
|
||||
await db.execute(
|
||||
"INSERT INTO source_health_checks "
|
||||
"(source_id, check_type, status, message, details) "
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
(source_id, check_type, status, message, details),
|
||||
)
|
||||
|
||||
|
||||
async def get_health_summary(db: aiosqlite.Connection) -> dict:
|
||||
"""Gibt eine Zusammenfassung der letzten Health-Check-Ergebnisse zurück."""
|
||||
cursor = await db.execute("""
|
||||
SELECT
|
||||
h.id, h.source_id, s.name, s.domain, s.url, s.source_type,
|
||||
h.check_type, h.status, h.message, h.details, h.checked_at
|
||||
FROM source_health_checks h
|
||||
JOIN sources s ON s.id = h.source_id
|
||||
ORDER BY
|
||||
CASE h.status WHEN 'error' THEN 0 WHEN 'warning' THEN 1 ELSE 2 END,
|
||||
s.name
|
||||
""")
|
||||
checks = [dict(row) for row in await cursor.fetchall()]
|
||||
|
||||
error_count = sum(1 for c in checks if c["status"] == "error")
|
||||
warning_count = sum(1 for c in checks if c["status"] == "warning")
|
||||
ok_count = sum(1 for c in checks if c["status"] == "ok")
|
||||
|
||||
cursor = await db.execute(
|
||||
"SELECT MAX(checked_at) as last_check FROM source_health_checks"
|
||||
)
|
||||
row = await cursor.fetchone()
|
||||
last_check = row["last_check"] if row else None
|
||||
|
||||
return {
|
||||
"last_check": last_check,
|
||||
"total_checks": len(checks),
|
||||
"errors": error_count,
|
||||
"warnings": warning_count,
|
||||
"ok": ok_count,
|
||||
"checks": checks,
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""KI-gestützte Quellen-Vorschläge via Haiku."""
|
||||
"""KI-gestützte Quellen-Vorschläge via Haiku."""
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
@@ -12,8 +12,8 @@ logger = logging.getLogger("osint.source_suggester")
|
||||
|
||||
|
||||
async def generate_suggestions(db: aiosqlite.Connection) -> int:
|
||||
"""Generiert Quellen-Vorschläge basierend auf Health-Checks und Lückenanalyse."""
|
||||
logger.info("Starte Quellen-Vorschläge via Haiku...")
|
||||
"""Generiert Quellen-Vorschläge basierend auf Health-Checks und Lückenanalyse."""
|
||||
logger.info("Starte Quellen-Vorschläge via Haiku...")
|
||||
|
||||
# 1. Aktuelle Quellen laden
|
||||
cursor = await db.execute(
|
||||
@@ -33,13 +33,13 @@ async def generate_suggestions(db: aiosqlite.Connection) -> int:
|
||||
""")
|
||||
issues = [dict(row) for row in await cursor.fetchall()]
|
||||
|
||||
# 3. Alte pending-Vorschläge entfernen (älter als 30 Tage)
|
||||
# 3. Alte pending-Vorschläge entfernen (älter als 30 Tage)
|
||||
await db.execute(
|
||||
"DELETE FROM source_suggestions "
|
||||
"WHERE status = 'pending' AND created_at < datetime('now', '-30 days')"
|
||||
)
|
||||
|
||||
# 4. Quellen-Zusammenfassung für Haiku
|
||||
# 4. Quellen-Zusammenfassung für Haiku
|
||||
categories = {}
|
||||
for s in sources:
|
||||
cat = s["category"]
|
||||
@@ -67,7 +67,7 @@ async def generate_suggestions(db: aiosqlite.Connection) -> int:
|
||||
f"{issue['check_type']} = {issue['status']} - {issue['message']}\n"
|
||||
)
|
||||
|
||||
prompt = f"""Du bist ein OSINT-Analyst und verwaltest die Quellensammlung eines Lagebildmonitors für Sicherheitsbehörden.
|
||||
prompt = f"""Du bist ein OSINT-Analyst und verwaltest die Quellensammlung eines Lagebildmonitors für Sicherheitsbehörden.
|
||||
|
||||
Aktuelle Quellensammlung:{source_summary}{issues_summary}
|
||||
|
||||
@@ -78,13 +78,13 @@ Beachte:
|
||||
2. Fehlende wichtige OSINT-Quellen: Schlage "add_source" mit konkreter RSS-Feed-URL vor
|
||||
3. Fokus auf deutschsprachige + wichtige internationale Nachrichtenquellen
|
||||
4. Nur Quellen vorschlagen, die NICHT bereits vorhanden sind
|
||||
5. Maximal 5 Vorschläge
|
||||
5. Maximal 5 Vorschläge
|
||||
|
||||
Antworte NUR mit einem JSON-Array. Jedes Element:
|
||||
{{
|
||||
"type": "add_source|deactivate_source|fix_url|remove_source",
|
||||
"title": "Kurzer Titel",
|
||||
"description": "Begründung",
|
||||
"description": "Begründung",
|
||||
"priority": "low|medium|high",
|
||||
"source_id": null,
|
||||
"data": {{
|
||||
@@ -104,7 +104,7 @@ Nur das JSON-Array, kein anderer Text."""
|
||||
|
||||
json_match = re.search(r'\[.*\]', response, re.DOTALL)
|
||||
if not json_match:
|
||||
logger.warning("Keine Vorschläge von Haiku erhalten (kein JSON)")
|
||||
logger.warning("Keine Vorschläge von Haiku erhalten (kein JSON)")
|
||||
return 0
|
||||
|
||||
suggestions = json.loads(json_match.group(0))
|
||||
@@ -164,14 +164,14 @@ Nur das JSON-Array, kein anderer Text."""
|
||||
|
||||
await db.commit()
|
||||
logger.info(
|
||||
f"Quellen-Vorschläge: {count} neue Vorschläge generiert "
|
||||
f"Quellen-Vorschläge: {count} neue Vorschläge generiert "
|
||||
f"(Haiku: {usage.input_tokens} in / {usage.output_tokens} out / "
|
||||
f"${usage.cost_usd:.4f})"
|
||||
)
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler bei Quellen-Vorschlägen: {e}", exc_info=True)
|
||||
logger.error(f"Fehler bei Quellen-Vorschlägen: {e}", exc_info=True)
|
||||
return 0
|
||||
|
||||
|
||||
@@ -218,7 +218,7 @@ async def apply_suggestion(
|
||||
(url,),
|
||||
)
|
||||
if await cursor.fetchone():
|
||||
result["action"] = "übersprungen (URL bereits vorhanden)"
|
||||
result["action"] = "übersprungen (URL bereits vorhanden)"
|
||||
new_status = "rejected"
|
||||
else:
|
||||
await db.execute(
|
||||
@@ -230,7 +230,7 @@ async def apply_suggestion(
|
||||
)
|
||||
result["action"] = f"Quelle '{name}' angelegt"
|
||||
else:
|
||||
result["action"] = "übersprungen (keine URL)"
|
||||
result["action"] = "übersprungen (keine URL)"
|
||||
new_status = "rejected"
|
||||
|
||||
elif stype == "deactivate_source":
|
||||
@@ -242,7 +242,7 @@ async def apply_suggestion(
|
||||
)
|
||||
result["action"] = "Quelle deaktiviert"
|
||||
else:
|
||||
result["action"] = "übersprungen (keine source_id)"
|
||||
result["action"] = "übersprungen (keine source_id)"
|
||||
|
||||
elif stype == "remove_source":
|
||||
source_id = suggestion["source_id"]
|
||||
@@ -250,9 +250,9 @@ async def apply_suggestion(
|
||||
await db.execute(
|
||||
"DELETE FROM sources WHERE id = ?", (source_id,),
|
||||
)
|
||||
result["action"] = "Quelle gelöscht"
|
||||
result["action"] = "Quelle gelöscht"
|
||||
else:
|
||||
result["action"] = "übersprungen (keine source_id)"
|
||||
result["action"] = "übersprungen (keine source_id)"
|
||||
|
||||
elif stype == "fix_url":
|
||||
source_id = suggestion["source_id"]
|
||||
@@ -264,7 +264,7 @@ async def apply_suggestion(
|
||||
)
|
||||
result["action"] = f"URL aktualisiert auf {new_url}"
|
||||
else:
|
||||
result["action"] = "übersprungen (keine source_id oder URL)"
|
||||
result["action"] = "übersprungen (keine source_id oder URL)"
|
||||
|
||||
await db.execute(
|
||||
"UPDATE source_suggestions SET status = ?, reviewed_at = CURRENT_TIMESTAMP "
|
||||
|
||||
@@ -649,14 +649,14 @@ async def get_feeds_with_metadata(tenant_id: int = None, source_type: str = "rss
|
||||
try:
|
||||
if tenant_id:
|
||||
cursor = await db.execute(
|
||||
"SELECT name, url, domain, category, COALESCE(article_count, 0) AS article_count FROM sources "
|
||||
"SELECT name, url, domain, category, notes, COALESCE(article_count, 0) AS article_count FROM sources "
|
||||
"WHERE source_type = ? AND status = 'active' "
|
||||
"AND (tenant_id IS NULL OR tenant_id = ?)",
|
||||
(source_type, tenant_id),
|
||||
)
|
||||
else:
|
||||
cursor = await db.execute(
|
||||
"SELECT name, url, domain, category, COALESCE(article_count, 0) AS article_count FROM sources "
|
||||
"SELECT name, url, domain, category, notes, COALESCE(article_count, 0) AS article_count FROM sources "
|
||||
"WHERE source_type = ? AND status = 'active'",
|
||||
(source_type,),
|
||||
)
|
||||
|
||||
@@ -549,6 +549,31 @@ a:hover {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.header-dropdown-action {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
background: transparent;
|
||||
border: 0;
|
||||
padding: 8px 12px;
|
||||
color: var(--text-secondary);
|
||||
font-size: 12px;
|
||||
font-family: inherit;
|
||||
cursor: pointer;
|
||||
border-radius: 6px;
|
||||
text-align: left;
|
||||
transition: background 0.15s ease, color 0.15s ease;
|
||||
}
|
||||
.header-dropdown-action:hover {
|
||||
background: var(--bg-hover, rgba(255, 255, 255, 0.04));
|
||||
color: var(--text-primary);
|
||||
}
|
||||
.header-dropdown-action svg {
|
||||
flex-shrink: 0;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.header-license-badge {
|
||||
display: inline-block;
|
||||
font-size: 10px;
|
||||
@@ -1704,6 +1729,108 @@ a.dev-source-pill:hover {
|
||||
border-radius: var(--radius);
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border);
|
||||
cursor: pointer;
|
||||
transition: border-color 0.15s ease, background 0.15s ease;
|
||||
outline: none;
|
||||
}
|
||||
.source-overview-item:hover {
|
||||
border-color: var(--accent);
|
||||
background: var(--bg-elevated);
|
||||
}
|
||||
.source-overview-item:focus-visible {
|
||||
box-shadow: 0 0 0 2px var(--tint-accent-strong);
|
||||
}
|
||||
.source-overview-item.active {
|
||||
border-color: var(--accent);
|
||||
background: var(--tint-accent-subtle);
|
||||
box-shadow: var(--glow-accent);
|
||||
}
|
||||
|
||||
/* Inline-Aufklapp-Bereich (volle Reihen-Breite, direkt unter dem geklickten Item) */
|
||||
.source-overview-detail {
|
||||
grid-column: 1 / -1;
|
||||
padding: var(--sp-md) var(--sp-lg);
|
||||
background: var(--bg-elevated);
|
||||
border: 1px solid var(--accent);
|
||||
border-radius: var(--radius);
|
||||
animation: source-detail-in 0.18s ease;
|
||||
}
|
||||
@keyframes source-detail-in {
|
||||
from { opacity: 0; transform: translateY(-4px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
}
|
||||
.source-overview-detail-empty {
|
||||
font-size: 12px;
|
||||
color: var(--text-tertiary);
|
||||
font-style: italic;
|
||||
}
|
||||
.source-overview-detail-list {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
max-height: 320px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.source-overview-detail-list::-webkit-scrollbar { width: 6px; }
|
||||
.source-overview-detail-list::-webkit-scrollbar-track { background: var(--bg-primary); border-radius: 3px; }
|
||||
.source-overview-detail-list::-webkit-scrollbar-thumb { background: var(--text-disabled); border-radius: 3px; }
|
||||
.source-overview-detail-list li {
|
||||
font-size: 12px;
|
||||
line-height: 1.4;
|
||||
padding: 4px 0;
|
||||
border-top: 1px dashed var(--border);
|
||||
display: grid;
|
||||
grid-template-columns: auto auto 1fr;
|
||||
gap: var(--sp-md);
|
||||
align-items: baseline;
|
||||
}
|
||||
.source-overview-detail-list li:first-child { border-top: none; }
|
||||
.source-overview-detail-list li a {
|
||||
color: var(--text-primary);
|
||||
text-decoration: none;
|
||||
}
|
||||
.source-overview-detail-list li a:hover {
|
||||
color: var(--accent);
|
||||
text-decoration: underline;
|
||||
}
|
||||
.source-overview-detail-num {
|
||||
font-family: var(--font-mono);
|
||||
font-size: 11px;
|
||||
font-weight: 700;
|
||||
color: var(--accent);
|
||||
min-width: 36px;
|
||||
text-align: right;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.source-overview-detail-num--none {
|
||||
color: var(--text-disabled);
|
||||
font-weight: 400;
|
||||
}
|
||||
.source-overview-detail-date {
|
||||
font-family: var(--font-mono);
|
||||
font-size: 11px;
|
||||
color: var(--text-tertiary);
|
||||
white-space: nowrap;
|
||||
}
|
||||
.source-overview-detail-headline {
|
||||
min-width: 0;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
@media (max-width: 600px) {
|
||||
.source-overview-detail-list li {
|
||||
grid-template-columns: auto 1fr;
|
||||
}
|
||||
.source-overview-detail-date {
|
||||
grid-column: 1 / -1;
|
||||
margin-left: 32px;
|
||||
}
|
||||
}
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
.source-overview-detail { animation: none; }
|
||||
.source-overview-item { transition: none; }
|
||||
}
|
||||
|
||||
.source-overview-name {
|
||||
@@ -2450,213 +2577,113 @@ a.dev-source-pill:hover {
|
||||
padding: 12px 20px 8px;
|
||||
}
|
||||
|
||||
/* Achsen-Container */
|
||||
.ht-axis {
|
||||
position: relative;
|
||||
height: 110px;
|
||||
/* === Timeline: Heatmap-Strip oben + vertikaler Newsfeed-Stream darunter === */
|
||||
.ht-tl {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--sp-md);
|
||||
}
|
||||
|
||||
/* Stündliches Layout: höher wegen Datums-Markern oben */
|
||||
.ht-axis--hourly {
|
||||
height: 130px;
|
||||
/* Heatmap-Strip */
|
||||
.ht-strip {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
padding: 4px 0 6px;
|
||||
}
|
||||
|
||||
/* Punkte-Bereich (über der Linie) */
|
||||
.ht-points {
|
||||
position: absolute;
|
||||
left: 4%;
|
||||
right: 4%;
|
||||
top: 0;
|
||||
height: 56px;
|
||||
.ht-strip-cells {
|
||||
display: grid;
|
||||
grid-auto-flow: column;
|
||||
grid-auto-columns: minmax(8px, 1fr);
|
||||
gap: 2px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.ht-axis--hourly .ht-points {
|
||||
top: 20px;
|
||||
}
|
||||
|
||||
/* Achsenlinie */
|
||||
.ht-axis-line {
|
||||
position: absolute;
|
||||
left: 2%;
|
||||
right: 2%;
|
||||
top: 60px;
|
||||
height: 2px;
|
||||
background: var(--border);
|
||||
}
|
||||
|
||||
.ht-axis--hourly .ht-axis-line {
|
||||
top: 80px;
|
||||
}
|
||||
|
||||
/* Datums-Marker (vertikale Linie + Datum oben, nur bei Stunden-Granularität) */
|
||||
.ht-day-markers {
|
||||
position: absolute;
|
||||
left: 4%;
|
||||
right: 4%;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.ht-day-marker {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.ht-day-marker-label {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
transform: translateX(-50%);
|
||||
font-size: 10px;
|
||||
font-family: var(--font-mono);
|
||||
font-weight: 600;
|
||||
color: var(--accent);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.ht-day-marker-line {
|
||||
position: absolute;
|
||||
top: 14px;
|
||||
height: 66px;
|
||||
width: 1px;
|
||||
left: 0;
|
||||
background: var(--accent);
|
||||
opacity: 0.2;
|
||||
}
|
||||
|
||||
/* Punkt (Basis) */
|
||||
.ht-point {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
transform: translateX(-50%);
|
||||
border-radius: 50%;
|
||||
background: var(--text-disabled);
|
||||
border: 2px solid var(--bg-card);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
z-index: 2;
|
||||
}
|
||||
|
||||
.ht-point:hover {
|
||||
box-shadow: var(--glow-accent);
|
||||
z-index: 4;
|
||||
}
|
||||
|
||||
.ht-point.active {
|
||||
box-shadow: var(--glow-accent-strong);
|
||||
z-index: 4;
|
||||
}
|
||||
|
||||
/* Dimmen: nicht-aktive Punkte verblassen wenn ein Punkt aktiv ist */
|
||||
.ht-points:has(.ht-point.active) .ht-point:not(.active) {
|
||||
opacity: 0.3;
|
||||
transition: opacity 0.3s ease;
|
||||
}
|
||||
|
||||
/* Pfeil über dem aktiven Punkt */
|
||||
.ht-point.active::after {
|
||||
content: '▼';
|
||||
position: absolute;
|
||||
bottom: calc(100% + 2px);
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
font-size: 10px;
|
||||
color: var(--accent);
|
||||
pointer-events: none;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
/* Snapshot-Punkt (Raute) */
|
||||
.ht-point.ht-snapshot-point {
|
||||
.ht-strip-cell {
|
||||
background: color-mix(in srgb, var(--accent) calc(var(--intensity) * 70%), var(--border));
|
||||
border-radius: 2px;
|
||||
transform: translateX(-50%) rotate(45deg);
|
||||
background: var(--accent);
|
||||
cursor: pointer;
|
||||
transition: transform 0.15s ease, box-shadow 0.15s ease;
|
||||
min-height: 12px;
|
||||
}
|
||||
.ht-strip-cell.empty {
|
||||
background: var(--border);
|
||||
opacity: 0.4;
|
||||
cursor: default;
|
||||
}
|
||||
.ht-strip-cell:hover:not(.empty) {
|
||||
transform: scaleY(1.6);
|
||||
box-shadow: var(--glow-accent);
|
||||
}
|
||||
|
||||
.ht-point.ht-snapshot-point .ht-tooltip,
|
||||
.ht-point.ht-snapshot-point .ht-point-count {
|
||||
transform: rotate(-45deg);
|
||||
.ht-strip-cell.has-snapshot {
|
||||
box-shadow: inset 0 -3px 0 var(--accent);
|
||||
}
|
||||
|
||||
.ht-point.ht-snapshot-point .ht-tooltip {
|
||||
transform: rotate(-45deg) translateX(-50%);
|
||||
transform-origin: bottom left;
|
||||
}
|
||||
|
||||
/* Gemischter Punkt (Gold-Kreis) */
|
||||
.ht-point.ht-mixed-point {
|
||||
.ht-strip-cell.active {
|
||||
background: var(--accent);
|
||||
border: 2px solid var(--bg-card);
|
||||
transform: scaleY(1.6);
|
||||
box-shadow: var(--glow-accent-strong), inset 0 -3px 0 var(--accent);
|
||||
z-index: 2;
|
||||
position: relative;
|
||||
}
|
||||
.ht-strip:has(.ht-strip-cell.active) .ht-strip-cell:not(.active):not(.empty) {
|
||||
opacity: 0.4;
|
||||
}
|
||||
|
||||
/* Tooltip (über dem Punkt) */
|
||||
.ht-tooltip {
|
||||
position: absolute;
|
||||
bottom: calc(100% + 6px);
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: var(--bg-secondary);
|
||||
color: var(--text-primary);
|
||||
font-size: 11px;
|
||||
padding: 3px 8px;
|
||||
/* Banner: aktiver Strip-Filter */
|
||||
.ht-strip-banner {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--sp-md);
|
||||
padding: 6px 12px;
|
||||
background: var(--tint-accent);
|
||||
border: 1px solid var(--accent);
|
||||
border-radius: var(--radius);
|
||||
white-space: nowrap;
|
||||
pointer-events: none;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
transition: opacity 0.15s ease, visibility 0.15s ease;
|
||||
border: 1px solid var(--border);
|
||||
z-index: 10;
|
||||
font-size: 12px;
|
||||
color: var(--text-primary);
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.ht-point:hover .ht-tooltip {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
/* Zahl unter dem Punkt */
|
||||
.ht-point-count {
|
||||
position: absolute;
|
||||
top: calc(100% + 6px);
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
font-size: 10px;
|
||||
font-family: var(--font-mono);
|
||||
color: var(--text-disabled);
|
||||
white-space: nowrap;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.ht-point.active .ht-point-count,
|
||||
.ht-point:hover .ht-point-count {
|
||||
.ht-strip-banner-icon {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
/* Achsen-Labels (unter der Linie) */
|
||||
.ht-axis-labels {
|
||||
position: absolute;
|
||||
left: 4%;
|
||||
right: 4%;
|
||||
top: 72px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.ht-axis--hourly .ht-axis-labels {
|
||||
top: 90px;
|
||||
}
|
||||
|
||||
.ht-axis-label {
|
||||
position: absolute;
|
||||
transform: translateX(-50%);
|
||||
font-size: 10px;
|
||||
}
|
||||
.ht-strip-banner-text {
|
||||
flex: 1;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
.ht-strip-banner-text strong {
|
||||
color: var(--accent);
|
||||
font-family: var(--font-mono);
|
||||
}
|
||||
.ht-strip-banner-close {
|
||||
border: 1px solid var(--accent);
|
||||
background: transparent;
|
||||
color: var(--accent);
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
padding: 2px 10px;
|
||||
border-radius: var(--radius);
|
||||
cursor: pointer;
|
||||
transition: background 0.15s ease;
|
||||
}
|
||||
.ht-strip-banner-close:hover {
|
||||
background: var(--accent);
|
||||
color: var(--bg-card);
|
||||
}
|
||||
.ht-strip-labels {
|
||||
display: grid;
|
||||
gap: 2px;
|
||||
font-size: 9px;
|
||||
font-family: var(--font-mono);
|
||||
color: var(--text-tertiary);
|
||||
}
|
||||
.ht-strip-label {
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Leerer Zustand */
|
||||
/* Stream-Container */
|
||||
.ht-stream {
|
||||
margin-top: var(--sp-md);
|
||||
}
|
||||
.ht-empty {
|
||||
padding: 20px;
|
||||
text-align: center;
|
||||
@@ -2664,60 +2691,19 @@ a.dev-source-pill:hover {
|
||||
color: var(--text-tertiary);
|
||||
}
|
||||
|
||||
/* Detail-Panel */
|
||||
.ht-detail-panel {
|
||||
margin-top: 8px;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
background: var(--bg-secondary);
|
||||
animation: ht-slide-down 0.2s ease;
|
||||
/* Time-Group Flash beim Scrollen vom Strip */
|
||||
.vt-time-group--flash {
|
||||
animation: vt-group-flash 1.2s ease-out;
|
||||
}
|
||||
@keyframes vt-group-flash {
|
||||
0% { background: var(--tint-accent-strong); }
|
||||
100% { background: transparent; }
|
||||
}
|
||||
|
||||
@keyframes ht-slide-down {
|
||||
from { opacity: 0; transform: translateY(-8px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
.vt-time-group--flash { animation: none; }
|
||||
}
|
||||
|
||||
.ht-detail-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 8px 12px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.ht-detail-title {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
color: var(--accent);
|
||||
font-family: var(--font-mono);
|
||||
}
|
||||
|
||||
.ht-detail-close {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-disabled);
|
||||
font-size: 18px;
|
||||
cursor: pointer;
|
||||
padding: 0 4px;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.ht-detail-close:hover {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.ht-detail-content {
|
||||
max-height: 350px;
|
||||
overflow-y: auto;
|
||||
padding: 4px 12px;
|
||||
}
|
||||
|
||||
.ht-detail-content::-webkit-scrollbar { width: 6px; }
|
||||
.ht-detail-content::-webkit-scrollbar-track { background: var(--bg-primary); border-radius: 3px; }
|
||||
.ht-detail-content::-webkit-scrollbar-thumb { background: var(--text-disabled); border-radius: 3px; }
|
||||
.ht-detail-content::-webkit-scrollbar-thumb:hover { background: var(--text-secondary); }
|
||||
|
||||
/* === Briefing Listen === */
|
||||
.briefing-content ul {
|
||||
margin: 8px 0;
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<link rel="stylesheet" href="/static/vendor/leaflet.css">
|
||||
<link rel="stylesheet" href="/static/vendor/MarkerCluster.css">
|
||||
<link rel="stylesheet" href="/static/vendor/MarkerCluster.Default.css">
|
||||
<link rel="stylesheet" href="/static/css/style.css?v=20260316k">
|
||||
<link rel="stylesheet" href="/static/css/style.css?v=20260501h">
|
||||
<style>
|
||||
/* Export Modal Radio */
|
||||
.export-radio { display:flex; align-items:center; gap:10px; padding:8px 12px; cursor:pointer; border-radius:var(--radius-sm); transition:background 0.15s; border:1px solid transparent; margin-bottom:4px; }
|
||||
@@ -72,6 +72,11 @@
|
||||
<span class="credits-percent" id="credits-percent"></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="credits-divider"></div>
|
||||
<button class="header-dropdown-action" type="button" onclick="AIDisclaimer && AIDisclaimer.show()">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true"><circle cx="12" cy="12" r="10"/><path d="M12 16v-4"/><path d="M12 8h.01"/></svg>
|
||||
<span>Über KI-Inhalte</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="header-license-warning" id="header-license-warning"></div>
|
||||
@@ -118,8 +123,14 @@
|
||||
<div id="archived-incidents" aria-live="polite" style="display:none;"></div>
|
||||
</div>
|
||||
<div class="sidebar-sources-link">
|
||||
<button class="btn btn-secondary btn-full btn-small" onclick="App.openSourceManagement()">Quellen verwalten</button>
|
||||
<button class="btn btn-secondary btn-full btn-small sidebar-feedback-btn" onclick="App.openFeedback()">Feedback senden</button>
|
||||
<button class="btn btn-secondary btn-full btn-small" onclick="App.openSourceManagement()" title="Quellen verwalten">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true"><ellipse cx="12" cy="5" rx="9" ry="3"/><path d="M3 5v14c0 1.66 4.03 3 9 3s9-1.34 9-3V5"/><path d="M3 12c0 1.66 4.03 3 9 3s9-1.34 9-3"/></svg>
|
||||
<span>Quellen</span>
|
||||
</button>
|
||||
<button class="btn btn-secondary btn-full btn-small sidebar-feedback-btn" onclick="App.openFeedback()" title="Feedback senden">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true"><rect width="20" height="16" x="2" y="4" rx="2"/><path d="m22 7-10 5L2 7"/></svg>
|
||||
<span>Feedback</span>
|
||||
</button>
|
||||
<!-- Tutorial-Einstieg temporaer deaktiviert (Ueberarbeitung) - reaktivieren durch Entfernen der Kommentarzeichen:
|
||||
<button class="btn btn-secondary btn-full btn-small" onclick="Tutorial.start()" title="Interaktiven Rundgang starten">Rundgang starten</button>
|
||||
-->
|
||||
@@ -646,8 +657,8 @@
|
||||
<script src="/static/js/ws.js?v=20260316b"></script>
|
||||
<script src="/static/js/components.js?v=20260427a"></script>
|
||||
<script src="/static/js/layout.js?v=20260316b"></script>
|
||||
<script src="/static/js/pipeline.js?v=20260501a"></script>
|
||||
<script src="/static/js/app.js?v=20260427c"></script>
|
||||
<script src="/static/js/pipeline.js?v=20260501i"></script>
|
||||
<script src="/static/js/app.js?v=20260501h"></script>
|
||||
<script src="/static/js/cluster-data.js?v=20260322f"></script>
|
||||
<script src="/static/js/tutorial.js?v=20260316z"></script>
|
||||
<script src="/static/js/chat.js?v=20260422a"></script>
|
||||
@@ -738,5 +749,6 @@
|
||||
</div>
|
||||
|
||||
<script src="/static/js/update-system.js"></script>
|
||||
<script src="/static/js/ai-disclaimer.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
195
src/static/js/ai-disclaimer.js
Normale Datei
195
src/static/js/ai-disclaimer.js
Normale Datei
@@ -0,0 +1,195 @@
|
||||
/**
|
||||
* AI-Hallucination-Disclaimer fuer den AegisSight Monitor.
|
||||
*
|
||||
* Zeigt:
|
||||
* 1) Beim ersten Besuch (oder bei neuem v-Bump) ein Modal mit Hinweisen
|
||||
* zur Fehlbarkeit von KI-Modellen.
|
||||
* 2) Im Header-User-Dropdown immer einen Eintrag "Ueber KI-Inhalte",
|
||||
* ueber den der User das Modal jederzeit erneut oeffnen kann.
|
||||
*
|
||||
* Persistenz:
|
||||
* localStorage 'aegis_ai_disclaimer_seen' -> Versionsstring (z.B. "v1").
|
||||
* Wenn die Version sich aendert (Wortlaut-Update), erscheint das Modal
|
||||
* beim naechsten Login erneut.
|
||||
*/
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
const STORAGE_KEY = 'aegis_ai_disclaimer_seen';
|
||||
const CURRENT_VERSION = 'v1';
|
||||
|
||||
// ---- DOM-Helpers (analog zu update-system.js) ----
|
||||
function el(tag, attrs, ...children) {
|
||||
const e = document.createElement(tag);
|
||||
for (const k in (attrs || {})) {
|
||||
if (k === 'class') e.className = attrs[k];
|
||||
else if (k === 'html') e.innerHTML = attrs[k];
|
||||
else if (k.startsWith('on')) e.addEventListener(k.slice(2), attrs[k]);
|
||||
else e.setAttribute(k, attrs[k]);
|
||||
}
|
||||
for (const c of children) {
|
||||
if (c == null) continue;
|
||||
e.appendChild(typeof c === 'string' ? document.createTextNode(c) : c);
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
function injectStyles() {
|
||||
if (document.getElementById('aegis-aidisc-styles')) return;
|
||||
const css = `
|
||||
#aegis-aidisc-overlay {
|
||||
position: fixed; inset: 0; background: rgba(0,0,0,0.55); z-index: 99998;
|
||||
backdrop-filter: blur(3px);
|
||||
display: flex; align-items: center; justify-content: center; padding: 24px;
|
||||
animation: aegis-aidisc-fade 0.25s ease;
|
||||
}
|
||||
@keyframes aegis-aidisc-fade { from { opacity: 0; } to { opacity: 1; } }
|
||||
#aegis-aidisc-modal {
|
||||
background: var(--bg-card);
|
||||
color: var(--text-primary);
|
||||
border-radius: 14px;
|
||||
border: 1px solid var(--border);
|
||||
box-shadow: 0 24px 80px rgba(0,0,0,0.4);
|
||||
font-family: 'Inter', -apple-system, sans-serif;
|
||||
max-width: 580px; width: 100%; max-height: 85vh; overflow: hidden;
|
||||
display: flex; flex-direction: column;
|
||||
}
|
||||
#aegis-aidisc-modal header {
|
||||
padding: 22px 28px 18px; border-bottom: 1px solid var(--border);
|
||||
display: flex; align-items: center; gap: 12px;
|
||||
}
|
||||
#aegis-aidisc-modal header svg { color: var(--accent); flex-shrink: 0; }
|
||||
#aegis-aidisc-modal h2 { margin: 0; color: var(--accent); font-size: 1.25rem; font-weight: 700; }
|
||||
#aegis-aidisc-modal .body { padding: 18px 28px; overflow-y: auto; line-height: 1.55; }
|
||||
#aegis-aidisc-modal .body p { margin: 0 0 12px; color: var(--text-primary); font-size: 0.94rem; }
|
||||
#aegis-aidisc-modal .body strong { color: var(--accent); }
|
||||
#aegis-aidisc-modal .body ul { margin: 8px 0 14px; padding-left: 22px; }
|
||||
#aegis-aidisc-modal .body li { margin-bottom: 6px; color: var(--text-secondary); font-size: 0.92rem; }
|
||||
#aegis-aidisc-modal .footnote {
|
||||
margin-top: 10px; padding-top: 12px; border-top: 1px solid var(--border);
|
||||
color: var(--text-tertiary); font-size: 0.82rem;
|
||||
}
|
||||
#aegis-aidisc-modal footer {
|
||||
padding: 14px 28px 20px; border-top: 1px solid var(--border);
|
||||
display: flex; justify-content: flex-end; gap: 10px;
|
||||
}
|
||||
#aegis-aidisc-modal footer button {
|
||||
background: var(--accent); color: #fff; border: 0; padding: 10px 22px;
|
||||
border-radius: 6px; font: inherit; font-size: 0.92rem; font-weight: 600;
|
||||
cursor: pointer;
|
||||
}
|
||||
#aegis-aidisc-modal footer button:hover { background: var(--accent-hover); }
|
||||
#aegis-aidisc-modal footer button.secondary {
|
||||
background: transparent; color: var(--text-secondary); border: 1px solid var(--border);
|
||||
}
|
||||
#aegis-aidisc-modal footer button.secondary:hover {
|
||||
background: var(--bg-hover, rgba(255,255,255,0.04)); color: var(--text-primary);
|
||||
}`;
|
||||
document.head.appendChild(el('style', { id: 'aegis-aidisc-styles', html: css }));
|
||||
}
|
||||
|
||||
// ---- Modal-Aufbau ----
|
||||
function buildModal(opts) {
|
||||
const isFromUser = !!(opts && opts.fromUserAction);
|
||||
|
||||
// Lucide info-Icon (gleiches Pattern wie .info-icon im Repo)
|
||||
const headerIcon = el('span', {
|
||||
html: '<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" '
|
||||
+ 'viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" '
|
||||
+ 'stroke-linecap="round" stroke-linejoin="round">'
|
||||
+ '<circle cx="12" cy="12" r="10"/>'
|
||||
+ '<path d="M12 16v-4"/><path d="M12 8h.01"/></svg>'
|
||||
});
|
||||
|
||||
const body = el('div', { class: 'body' });
|
||||
body.appendChild(el('p', null,
|
||||
'Der AegisSight Monitor nutzt Künstliche Intelligenz '
|
||||
+ 'zur Analyse, Übersetzung und Zusammenfassung von Nachrichten.'));
|
||||
|
||||
const warn = el('p');
|
||||
warn.innerHTML = '<strong>KI-Modelle können Fehler machen</strong> '
|
||||
+ '(sogenannte „Halluzinationen"): erfundene Details, falsche Verbindungen oder '
|
||||
+ 'ungenaue Zusammenfassungen sind möglich, auch wenn der Text plausibel klingt.';
|
||||
body.appendChild(warn);
|
||||
|
||||
body.appendChild(el('p', null, 'Wir empfehlen daher:'));
|
||||
body.appendChild(el('ul', null,
|
||||
el('li', null, 'Wichtige Informationen mit den verlinkten Quellen verifizieren'),
|
||||
el('li', null, 'Bei kritischen Entscheidungen die Originalartikel prüfen'),
|
||||
el('li', null, 'Faktenchecks als Hinweis verstehen, nicht als endgültige Wahrheit')
|
||||
));
|
||||
|
||||
body.appendChild(el('p', { class: 'footnote' },
|
||||
'Diesen Hinweis findest du jederzeit wieder im Menü oben rechts unter „Über KI-Inhalte".'));
|
||||
|
||||
const closeAndStore = () => {
|
||||
try { localStorage.setItem(STORAGE_KEY, CURRENT_VERSION); } catch (e) {}
|
||||
overlay.remove();
|
||||
document.removeEventListener('keydown', escHandler);
|
||||
};
|
||||
const closeOnly = () => {
|
||||
overlay.remove();
|
||||
document.removeEventListener('keydown', escHandler);
|
||||
};
|
||||
|
||||
const footer = el('footer', null);
|
||||
if (!isFromUser) {
|
||||
footer.appendChild(el('button', { class: 'secondary', onclick: closeOnly }, 'Später nochmal'));
|
||||
}
|
||||
footer.appendChild(el('button', { onclick: closeAndStore }, 'Verstanden'));
|
||||
|
||||
const overlay = el('div', { id: 'aegis-aidisc-overlay' },
|
||||
el('div', { id: 'aegis-aidisc-modal' },
|
||||
el('header', null, headerIcon, el('h2', null, 'Hinweis zu KI-generierten Inhalten')),
|
||||
body,
|
||||
footer
|
||||
)
|
||||
);
|
||||
|
||||
function escHandler(ev) {
|
||||
if (ev.key === 'Escape' && document.getElementById('aegis-aidisc-overlay')) {
|
||||
// ESC = wie "Verstanden" beim erstmaligen Anzeigen, sonst nur schliessen
|
||||
if (isFromUser) closeOnly(); else closeAndStore();
|
||||
}
|
||||
}
|
||||
overlay.addEventListener('click', (ev) => {
|
||||
if (ev.target === overlay) {
|
||||
if (isFromUser) closeOnly(); else closeAndStore();
|
||||
}
|
||||
});
|
||||
document.addEventListener('keydown', escHandler);
|
||||
|
||||
return overlay;
|
||||
}
|
||||
|
||||
function show(opts) {
|
||||
if (document.getElementById('aegis-aidisc-overlay')) return;
|
||||
injectStyles();
|
||||
document.body.appendChild(buildModal(opts));
|
||||
}
|
||||
|
||||
function init() {
|
||||
// Nur auf der Dashboard-Seite zeigen, nicht auf der Login-Seite
|
||||
if (!document.body || document.body.classList.contains('login-page')) return;
|
||||
|
||||
injectStyles();
|
||||
let seenVersion = '';
|
||||
try { seenVersion = localStorage.getItem(STORAGE_KEY) || ''; } catch (e) {}
|
||||
if (seenVersion !== CURRENT_VERSION) {
|
||||
// Etwas verzoegern, damit Hauptdashboard sichtbar ist bevor Modal kommt
|
||||
setTimeout(() => show({ fromUserAction: false }), 600);
|
||||
}
|
||||
}
|
||||
|
||||
// Globaler Zugriff zum manuellen Oeffnen aus dem Header-Dropdown
|
||||
window.AIDisclaimer = {
|
||||
show: () => show({ fromUserAction: true }),
|
||||
VERSION: CURRENT_VERSION,
|
||||
};
|
||||
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', init);
|
||||
} else {
|
||||
init();
|
||||
}
|
||||
})();
|
||||
@@ -67,6 +67,29 @@ const API = {
|
||||
} else if (typeof detail === 'object' && detail !== null) {
|
||||
detail = JSON.stringify(detail);
|
||||
}
|
||||
|
||||
// Lizenz-Status aus Header auslesen (vom Backend gesetzt bei 403)
|
||||
const licStatus = response.headers.get('X-License-Status');
|
||||
if (response.status === 403 && licStatus && typeof App !== 'undefined') {
|
||||
if (!App.user) App.user = {};
|
||||
App.user.read_only = true;
|
||||
App.user.read_only_reason = licStatus;
|
||||
const warningEl = document.getElementById('header-license-warning');
|
||||
if (warningEl) {
|
||||
let text = 'Nur Lesezugriff';
|
||||
if (licStatus === 'budget_exceeded') text = 'Token-Budget aufgebraucht – nur Lesezugriff. Bitte Verwaltung kontaktieren.';
|
||||
else if (licStatus === 'expired') text = 'Lizenz abgelaufen – nur Lesezugriff';
|
||||
else if (licStatus === 'no_license') text = 'Keine aktive Lizenz – nur Lesezugriff';
|
||||
else if (licStatus === 'org_disabled') text = 'Organisation deaktiviert – nur Lesezugriff';
|
||||
warningEl.textContent = text;
|
||||
warningEl.classList.add('visible');
|
||||
}
|
||||
if (typeof App._updateRefreshButton === 'function') App._updateRefreshButton(false);
|
||||
if (typeof UI !== 'undefined' && UI.showToast) {
|
||||
UI.showToast(detail || 'Lizenz-Beschränkung – nur Lesezugriff', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
throw new ApiError(response.status, detail);
|
||||
}
|
||||
|
||||
|
||||
@@ -433,7 +433,7 @@ const App = {
|
||||
_editingSourceId: null,
|
||||
_timelineFilter: 'all',
|
||||
_timelineRange: 'all',
|
||||
_activePointIndex: null,
|
||||
_activeStripWindow: null,
|
||||
_timelineSearchTimer: null,
|
||||
_pendingComplete: null,
|
||||
_pendingCompleteTimer: null,
|
||||
@@ -450,6 +450,7 @@ const App = {
|
||||
|
||||
try {
|
||||
const user = await API.getMe();
|
||||
this.user = user;
|
||||
this._currentUsername = user.email;
|
||||
document.getElementById('header-user').textContent = user.email;
|
||||
|
||||
@@ -515,11 +516,27 @@ const App = {
|
||||
});
|
||||
}
|
||||
|
||||
// Warnung bei abgelaufener Lizenz
|
||||
// Warnung bei Read-Only (Lizenz abgelaufen oder Token-Budget aufgebraucht)
|
||||
const warningEl = document.getElementById('header-license-warning');
|
||||
if (warningEl && user.read_only) {
|
||||
warningEl.textContent = 'Lizenz abgelaufen – nur Lesezugriff';
|
||||
warningEl.classList.add('visible');
|
||||
if (warningEl) {
|
||||
if (user.read_only) {
|
||||
let text = 'Nur Lesezugriff';
|
||||
const reason = user.read_only_reason;
|
||||
if (reason === 'budget_exceeded') {
|
||||
text = 'Token-Budget aufgebraucht – nur Lesezugriff. Für Aufstockung oder Upgrade bitte info@aegis-sight.de kontaktieren.';
|
||||
} else if (reason === 'expired') {
|
||||
text = 'Lizenz abgelaufen – nur Lesezugriff';
|
||||
} else if (reason === 'no_license') {
|
||||
text = 'Keine aktive Lizenz – nur Lesezugriff';
|
||||
} else if (reason === 'org_disabled') {
|
||||
text = 'Organisation deaktiviert – nur Lesezugriff';
|
||||
}
|
||||
warningEl.textContent = text;
|
||||
warningEl.classList.add('visible');
|
||||
} else {
|
||||
warningEl.textContent = '';
|
||||
warningEl.classList.remove('visible');
|
||||
}
|
||||
}
|
||||
|
||||
// --- Global Admin: Org-Switcher (herausnehmbar) ---
|
||||
@@ -601,6 +618,10 @@ const App = {
|
||||
const inc = this.incidents.find(i => i.id === id);
|
||||
const isFirst = inc && !inc.has_summary;
|
||||
UI.showProgress('queued', { queue_position: idx + 1 }, id, isFirst);
|
||||
// Pipeline-Reset auch nach F5: aktive Lage in Queue -> Icons grau
|
||||
if (id === this.currentIncidentId && typeof Pipeline !== 'undefined' && Pipeline.beginQueue) {
|
||||
Pipeline.beginQueue(id);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -866,6 +887,97 @@ const App = {
|
||||
}
|
||||
},
|
||||
|
||||
/** Klick auf eine Quellen-Box: Liste der Artikel inline aufklappen (mutual-exclusive). */
|
||||
toggleSourceOverviewDetail(el) {
|
||||
if (!el) return;
|
||||
const grid = el.parentElement;
|
||||
if (!grid) return;
|
||||
const sourceName = el.dataset.source || '';
|
||||
const wasActive = el.classList.contains('active');
|
||||
|
||||
// Alle anderen schliessen + bestehendes Detail entfernen
|
||||
grid.querySelectorAll('.source-overview-item.active').forEach(it => {
|
||||
it.classList.remove('active');
|
||||
it.setAttribute('aria-expanded', 'false');
|
||||
});
|
||||
const existingDetail = grid.querySelector('.source-overview-detail');
|
||||
if (existingDetail) existingDetail.remove();
|
||||
|
||||
// Wenn das geklickte Item bereits aktiv war: nur schliessen
|
||||
if (wasActive) return;
|
||||
|
||||
// Neues Detail einfuegen direkt nach dem geklickten Item
|
||||
el.classList.add('active');
|
||||
el.setAttribute('aria-expanded', 'true');
|
||||
|
||||
const type = this._currentIncidentType;
|
||||
const getDate = (a) => (type === 'research' && a.published_at) ? a.published_at : (a.collected_at || a.published_at);
|
||||
const articles = (this._currentArticles || [])
|
||||
.filter(a => (a.source || 'Unbekannt') === sourceName)
|
||||
.sort((a, b) => {
|
||||
const ta = new Date(getDate(a) || 0).getTime();
|
||||
const tb = new Date(getDate(b) || 0).getTime();
|
||||
return tb - ta;
|
||||
});
|
||||
|
||||
// Lagebild-Quellennummer pro Artikel ermitteln (matcht Artikel zu sources_json)
|
||||
const normalize = (s) => (s || '').toLowerCase().replace(/^(der|die|das)\s+/, '').replace(/\s+/g, ' ').trim();
|
||||
const sourcesList = this._currentSources || [];
|
||||
const urlToNr = new Map();
|
||||
sourcesList.forEach(s => {
|
||||
if (s.url && s.nr != null) urlToNr.set(String(s.url).trim(), s.nr);
|
||||
});
|
||||
const findNr = (a) => {
|
||||
// 1) Exakter URL-Match
|
||||
if (a.source_url) {
|
||||
const exact = urlToNr.get(String(a.source_url).trim());
|
||||
if (exact != null) return exact;
|
||||
}
|
||||
// 2) Fallback: Match via Quellen-Namen (kann mehrfach treffen, nimm erstes)
|
||||
if (a.source) {
|
||||
const target = normalize(a.source);
|
||||
const hit = sourcesList.find(s => s.nr != null && normalize(s.name) === target);
|
||||
if (hit) return hit.nr;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const detail = document.createElement('div');
|
||||
detail.className = 'source-overview-detail';
|
||||
if (articles.length === 0) {
|
||||
detail.innerHTML = '<div class="source-overview-detail-empty">Keine Artikel gefunden.</div>';
|
||||
} else {
|
||||
const fmtDate = (ts) => {
|
||||
if (!ts) return '—';
|
||||
try {
|
||||
const d = new Date(ts);
|
||||
if (isNaN(d.getTime())) return '—';
|
||||
return d.toLocaleDateString('de-DE', { day: '2-digit', month: '2-digit', year: '2-digit', timeZone: TIMEZONE })
|
||||
+ ' '
|
||||
+ d.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit', timeZone: TIMEZONE });
|
||||
} catch (e) { return '—'; }
|
||||
};
|
||||
const items = articles.map(a => {
|
||||
const nr = findNr(a);
|
||||
const numHtml = nr != null
|
||||
? `<span class="source-overview-detail-num">[${UI.escape(String(nr))}]</span>`
|
||||
: `<span class="source-overview-detail-num source-overview-detail-num--none" title="Nicht im Lagebild zitiert">—</span>`;
|
||||
const dateStr = fmtDate(getDate(a));
|
||||
const headline = UI.escape(a.headline_de || a.headline || '(ohne Titel)');
|
||||
const inner = a.source_url
|
||||
? `<a href="${UI.escape(a.source_url)}" target="_blank" rel="noopener">${headline}</a>`
|
||||
: headline;
|
||||
return `<li>
|
||||
${numHtml}
|
||||
<span class="source-overview-detail-date">${UI.escape(dateStr)}</span>
|
||||
<span class="source-overview-detail-headline">${inner}</span>
|
||||
</li>`;
|
||||
}).join('');
|
||||
detail.innerHTML = `<ul class="source-overview-detail-list">${items}</ul>`;
|
||||
}
|
||||
el.insertAdjacentElement('afterend', detail);
|
||||
},
|
||||
|
||||
/** Restliche Artikel seitenweise im Hintergrund nachladen und in _currentArticles mergen. */
|
||||
async _loadRemainingArticlesInBackground(incidentId) {
|
||||
const BATCH = 500;
|
||||
@@ -1038,7 +1150,7 @@ const App = {
|
||||
}
|
||||
this._timelineFilter = 'all';
|
||||
this._timelineRange = 'all';
|
||||
this._activePointIndex = null;
|
||||
this._activeStripWindow = null;
|
||||
const _tsEl = document.getElementById('timeline-search'); if (_tsEl) _tsEl.value = '';
|
||||
document.querySelectorAll('.ht-filter-btn').forEach(btn => {
|
||||
const isActive = btn.dataset.filter === 'all';
|
||||
@@ -1114,6 +1226,9 @@ const App = {
|
||||
this._timelineSearchTimer = setTimeout(() => this.rerenderTimeline(), 250);
|
||||
},
|
||||
|
||||
/** Heatmap-Strip oben + vertikaler Newsfeed-Stream darunter.
|
||||
* Klick auf Heatmap-Balken: Stream filtert auf das Zeitfenster (aktive Balken hervorgehoben).
|
||||
*/
|
||||
rerenderTimeline() {
|
||||
const container = document.getElementById('timeline');
|
||||
if (!container) return;
|
||||
@@ -1124,271 +1239,216 @@ const App = {
|
||||
let entries = this._collectEntries(filterType, searchTerm, range);
|
||||
this._updateTimelineCount(entries);
|
||||
|
||||
// Strip nutzt IMMER alle Eintraege im Range (unabhaengig von Filter/Search/Strip-Window)
|
||||
const stripEntries = this._collectEntries('all', '', range);
|
||||
stripEntries.sort((a, b) => new Date(a.timestamp || 0) - new Date(b.timestamp || 0));
|
||||
|
||||
// Wenn ein Heatmap-Balken aktiv ist: Stream zusaetzlich auf dieses Zeitfenster filtern
|
||||
const win = this._activeStripWindow;
|
||||
if (win && entries.length > 0) {
|
||||
entries = entries.filter(e => {
|
||||
const ts = new Date(e.timestamp || 0).getTime();
|
||||
return ts >= win.start && ts < win.end;
|
||||
});
|
||||
}
|
||||
|
||||
let html = '<div class="ht-tl">';
|
||||
if (stripEntries.length > 0) {
|
||||
html += this._renderTimelineStrip(stripEntries);
|
||||
}
|
||||
|
||||
// Banner mit aktivem Filter
|
||||
if (win) {
|
||||
html += `<div class="ht-strip-banner">
|
||||
<span class="ht-strip-banner-icon" aria-hidden="true">▼</span>
|
||||
<span class="ht-strip-banner-text">Gefiltert auf <strong>${UI.escape(win.label)}</strong> · ${entries.length} Eintr${entries.length === 1 ? 'ag' : 'äge'}</span>
|
||||
<button class="ht-strip-banner-close" onclick="App.clearStripWindow()" aria-label="Filter aufheben">Filter aufheben</button>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
html += '<div class="ht-stream">';
|
||||
if (entries.length === 0) {
|
||||
this._activePointIndex = null;
|
||||
container.innerHTML = (searchTerm || range !== 'all')
|
||||
? '<div class="ht-empty">Keine Einträge im gewählten Zeitraum.</div>'
|
||||
: '<div class="ht-empty">Noch keine Meldungen. Starte eine Recherche mit "Aktualisieren".</div>';
|
||||
return;
|
||||
html += win
|
||||
? '<div class="ht-empty">Keine Einträge in diesem Zeitfenster.</div>'
|
||||
: (searchTerm || range !== 'all')
|
||||
? '<div class="ht-empty">Keine Einträge im gewählten Zeitraum.</div>'
|
||||
: '<div class="ht-empty">Noch keine Meldungen. Starte eine Recherche mit "Aktualisieren".</div>';
|
||||
} else {
|
||||
html += this._renderVerticalStream(entries);
|
||||
}
|
||||
|
||||
entries.sort((a, b) => new Date(a.timestamp || 0) - new Date(b.timestamp || 0));
|
||||
|
||||
const granularity = this._calcGranularity(entries, range);
|
||||
let buckets = this._buildBuckets(entries, granularity);
|
||||
buckets = this._mergeCloseBuckets(buckets);
|
||||
|
||||
// Aktiven Index validieren
|
||||
if (this._activePointIndex !== null && this._activePointIndex >= buckets.length) {
|
||||
this._activePointIndex = null;
|
||||
}
|
||||
|
||||
// Achsen-Bereich
|
||||
const rangeStart = buckets[0].timestamp;
|
||||
const rangeEnd = buckets[buckets.length - 1].timestamp;
|
||||
const maxCount = Math.max(...buckets.map(b => b.entries.length));
|
||||
|
||||
// Stunden- vs. Tages-Granularität
|
||||
const isHourly = granularity === 'hour';
|
||||
const axisLabels = this._buildAxisLabels(buckets, granularity, true);
|
||||
|
||||
// HTML aufbauen
|
||||
let html = `<div class="ht-axis${isHourly ? ' ht-axis--hourly' : ''}">`;
|
||||
|
||||
// Datums-Marker (immer anzeigen, ausgedünnt)
|
||||
const dayMarkers = this._thinLabels(this._buildDayMarkers(buckets, rangeStart, rangeEnd), 10);
|
||||
html += '<div class="ht-day-markers">';
|
||||
dayMarkers.forEach(m => {
|
||||
html += `<div class="ht-day-marker" style="left:${m.pos}%;">`;
|
||||
html += `<div class="ht-day-marker-label">${UI.escape(m.text)}</div>`;
|
||||
html += `<div class="ht-day-marker-line"></div>`;
|
||||
html += `</div>`;
|
||||
});
|
||||
html += '</div>';
|
||||
|
||||
// Punkte
|
||||
html += '<div class="ht-points">';
|
||||
buckets.forEach((bucket, idx) => {
|
||||
const pos = this._bucketPositionPercent(bucket, rangeStart, rangeEnd, buckets.length);
|
||||
const size = this._calcPointSize(bucket.entries.length, maxCount);
|
||||
const hasSnapshots = bucket.entries.some(e => e.kind === 'snapshot');
|
||||
const hasArticles = bucket.entries.some(e => e.kind === 'article');
|
||||
|
||||
let pointClass = 'ht-point';
|
||||
if (filterType === 'snapshots') {
|
||||
pointClass += ' ht-snapshot-point';
|
||||
} else if (hasSnapshots) {
|
||||
pointClass += ' ht-mixed-point';
|
||||
}
|
||||
if (this._activePointIndex === idx) pointClass += ' active';
|
||||
|
||||
const tooltip = `${bucket.label}: ${bucket.entries.length} Eintr${bucket.entries.length === 1 ? 'ag' : 'äge'}`;
|
||||
|
||||
html += `<div class="${pointClass}" style="left:${pos}%;width:${size}px;height:${size}px;" onclick="App.openTimelineDetail(${idx})" data-idx="${idx}">`;
|
||||
html += `<div class="ht-tooltip">${UI.escape(tooltip)}</div>`;
|
||||
html += `</div>`;
|
||||
});
|
||||
html += '</div>';
|
||||
|
||||
// Achsenlinie
|
||||
html += '<div class="ht-axis-line"></div>';
|
||||
|
||||
// Achsen-Labels (ausgedünnt um Überlappung zu vermeiden)
|
||||
const thinned = this._thinLabels(axisLabels);
|
||||
html += '<div class="ht-axis-labels">';
|
||||
thinned.forEach(lbl => {
|
||||
html += `<div class="ht-axis-label" style="left:${lbl.pos}%;">${UI.escape(lbl.text)}</div>`;
|
||||
});
|
||||
html += '</div>';
|
||||
html += '</div>';
|
||||
|
||||
// Detail-Panel (wenn ein Punkt aktiv ist)
|
||||
if (this._activePointIndex !== null && this._activePointIndex < buckets.length) {
|
||||
html += this._renderDetailPanel(buckets[this._activePointIndex]);
|
||||
}
|
||||
|
||||
container.innerHTML = html;
|
||||
},
|
||||
|
||||
_calcGranularity(entries, range) {
|
||||
if (entries.length < 2) return 'day';
|
||||
const timestamps = entries.map(e => new Date(e.timestamp || 0).getTime()).filter(t => t > 0);
|
||||
if (timestamps.length < 2) return 'day';
|
||||
const span = Math.max(...timestamps) - Math.min(...timestamps);
|
||||
if (range === '24h' || span <= 48 * 60 * 60 * 1000) return 'hour';
|
||||
/** Granularitaets-Heuristik fuer den Newsfeed: Stunden bei kurzen Spannen, sonst Tage. */
|
||||
_calcGranularity(entries) {
|
||||
if (!entries || entries.length < 2) return 'day';
|
||||
const ts = entries.map(e => new Date(e.timestamp || 0).getTime()).filter(t => t > 0);
|
||||
if (ts.length < 2) return 'day';
|
||||
const span = Math.max(...ts) - Math.min(...ts);
|
||||
if (span <= 48 * 60 * 60 * 1000) return 'hour';
|
||||
return 'day';
|
||||
},
|
||||
|
||||
_buildBuckets(entries, granularity) {
|
||||
const bucketMap = {};
|
||||
entries.forEach(e => {
|
||||
const d = new Date(e.timestamp || 0);
|
||||
const b = _tz(d);
|
||||
let key, label, ts;
|
||||
if (granularity === 'hour') {
|
||||
key = `${b.year}-${b.month + 1}-${b.date}-${b.hours}`;
|
||||
label = d.toLocaleDateString('de-DE', { day: '2-digit', month: 'short', timeZone: TIMEZONE }) + ', ' + b.hours.toString().padStart(2, '0') + ':00';
|
||||
ts = new Date(b.year, b.month, b.date, b.hours).getTime();
|
||||
} else {
|
||||
key = `${b.year}-${b.month + 1}-${b.date}`;
|
||||
label = d.toLocaleDateString('de-DE', { weekday: 'short', day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
ts = new Date(b.year, b.month, b.date, 12).getTime();
|
||||
}
|
||||
if (!bucketMap[key]) {
|
||||
bucketMap[key] = { key, label, timestamp: ts, entries: [] };
|
||||
}
|
||||
bucketMap[key].entries.push(e);
|
||||
});
|
||||
return Object.values(bucketMap).sort((a, b) => a.timestamp - b.timestamp);
|
||||
},
|
||||
|
||||
_mergeCloseBuckets(buckets) {
|
||||
if (buckets.length < 2) return buckets;
|
||||
const rangeStart = buckets[0].timestamp;
|
||||
const rangeEnd = buckets[buckets.length - 1].timestamp;
|
||||
if (rangeEnd <= rangeStart) return buckets;
|
||||
|
||||
const container = document.getElementById('timeline');
|
||||
const axisWidth = (container ? container.offsetWidth : 800) * 0.92;
|
||||
const maxCount = Math.max(...buckets.map(b => b.entries.length));
|
||||
const result = [buckets[0]];
|
||||
|
||||
for (let i = 1; i < buckets.length; i++) {
|
||||
const prev = result[result.length - 1];
|
||||
const curr = buckets[i];
|
||||
|
||||
const distPx = ((curr.timestamp - prev.timestamp) / (rangeEnd - rangeStart)) * axisWidth;
|
||||
const prevSize = Math.min(32, this._calcPointSize(prev.entries.length, maxCount));
|
||||
const currSize = Math.min(32, this._calcPointSize(curr.entries.length, maxCount));
|
||||
const minDistPx = (prevSize + currSize) / 2 + 6;
|
||||
|
||||
if (distPx < minDistPx) {
|
||||
prev.entries = prev.entries.concat(curr.entries);
|
||||
} else {
|
||||
result.push(curr);
|
||||
}
|
||||
/** Vertikaler Stream: Datums-Trennzeilen + Lagebericht-Sektionen + Meldungen. */
|
||||
_renderVerticalStream(entries) {
|
||||
if (!entries || entries.length === 0) {
|
||||
return '<div class="ht-empty">Keine Einträge.</div>';
|
||||
}
|
||||
return result;
|
||||
// Neueste oben
|
||||
const sorted = [...entries].sort((a, b) => new Date(b.timestamp || 0) - new Date(a.timestamp || 0));
|
||||
const granularity = this._calcGranularity(sorted);
|
||||
const groups = this._groupByTimePeriod(sorted, granularity);
|
||||
|
||||
let html = '<div class="vt-timeline">';
|
||||
groups.forEach(g => {
|
||||
const groupId = 'vt-grp-' + g.key.replace(/[^a-z0-9]/gi, '-');
|
||||
html += `<div class="vt-time-group" id="${groupId}" data-time-key="${UI.escape(g.key)}">`;
|
||||
html += `<div class="vt-time-label"><span class="vt-time-label-text">${UI.escape(g.label)}</span></div>`;
|
||||
html += this._renderTimeGroupEntries(g.entries, this._currentIncidentType);
|
||||
html += `</div>`;
|
||||
});
|
||||
html += '</div>';
|
||||
return html;
|
||||
},
|
||||
|
||||
_bucketPositionPercent(bucket, rangeStart, rangeEnd, totalBuckets) {
|
||||
if (totalBuckets === 1) return 50;
|
||||
if (rangeEnd === rangeStart) return 50;
|
||||
return ((bucket.timestamp - rangeStart) / (rangeEnd - rangeStart)) * 100;
|
||||
/* ======= Quanti-Strip ======= */
|
||||
_stripGranularity(stripEntries) {
|
||||
if (stripEntries.length < 2) return 'day';
|
||||
const ts = stripEntries.map(e => new Date(e.timestamp || 0).getTime()).filter(t => t > 0);
|
||||
if (ts.length < 2) return 'day';
|
||||
const span = Math.max(...ts) - Math.min(...ts);
|
||||
const DAY = 86400000;
|
||||
if (span <= 2 * DAY) return 'hour';
|
||||
if (span <= 60 * DAY) return 'day';
|
||||
if (span <= 365 * DAY) return 'week';
|
||||
return 'month';
|
||||
},
|
||||
|
||||
_calcPointSize(count, maxCount) {
|
||||
if (maxCount <= 1) return 16;
|
||||
const minSize = 12;
|
||||
const maxSize = 32;
|
||||
const logScale = Math.log(count + 1) / Math.log(maxCount + 1);
|
||||
return Math.round(minSize + logScale * (maxSize - minSize));
|
||||
},
|
||||
_buildStripBuckets(stripEntries, granularity) {
|
||||
if (stripEntries.length === 0) return [];
|
||||
const ts = stripEntries.map(e => new Date(e.timestamp || 0).getTime()).filter(t => t > 0);
|
||||
if (ts.length === 0) return [];
|
||||
const minTs = Math.min(...ts);
|
||||
const maxTs = Math.max(...ts);
|
||||
|
||||
_buildAxisLabels(buckets, granularity, timeOnly) {
|
||||
if (buckets.length === 0) return [];
|
||||
const maxLabels = 8;
|
||||
const labels = [];
|
||||
const rangeStart = buckets[0].timestamp;
|
||||
const rangeEnd = buckets[buckets.length - 1].timestamp;
|
||||
// Bucket-Start fuer minTs ermitteln
|
||||
const minDate = new Date(minTs);
|
||||
const tzMin = _tz(minDate);
|
||||
let firstStart;
|
||||
let stepMs;
|
||||
if (granularity === 'hour') {
|
||||
firstStart = new Date(tzMin.year, tzMin.month, tzMin.date, tzMin.hours).getTime();
|
||||
stepMs = 3600000;
|
||||
} else if (granularity === 'day') {
|
||||
firstStart = new Date(tzMin.year, tzMin.month, tzMin.date).getTime();
|
||||
stepMs = 86400000;
|
||||
} else if (granularity === 'week') {
|
||||
const dow = (minDate.getDay() + 6) % 7; // 0=Mo
|
||||
firstStart = new Date(tzMin.year, tzMin.month, tzMin.date - dow).getTime();
|
||||
stepMs = 7 * 86400000;
|
||||
} else {
|
||||
firstStart = new Date(tzMin.year, tzMin.month, 1).getTime();
|
||||
stepMs = null; // dynamisch (Monatsgrenzen)
|
||||
}
|
||||
|
||||
const getLabelText = (b) => {
|
||||
if (timeOnly) {
|
||||
// Bei Tages-Granularität: Uhrzeit des ersten Eintrags nehmen
|
||||
const ts = (granularity === 'day' && b.entries && b.entries.length > 0)
|
||||
? new Date(b.entries[0].timestamp || b.timestamp)
|
||||
: new Date(b.timestamp);
|
||||
const tp = _tz(ts);
|
||||
return tp.hours.toString().padStart(2, '0') + ':' + tp.minutes.toString().padStart(2, '0');
|
||||
}
|
||||
return b.label;
|
||||
const buckets = [];
|
||||
const fmt = (t) => {
|
||||
const d = new Date(t);
|
||||
if (granularity === 'hour') return d.toLocaleString('de-DE', { day: '2-digit', month: 'short', hour: '2-digit', minute: '2-digit', timeZone: TIMEZONE });
|
||||
if (granularity === 'day') return d.toLocaleDateString('de-DE', { weekday: 'short', day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
if (granularity === 'week') return 'Woche ab ' + d.toLocaleDateString('de-DE', { day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
return d.toLocaleDateString('de-DE', { month: 'long', year: 'numeric', timeZone: TIMEZONE });
|
||||
};
|
||||
|
||||
if (buckets.length <= maxLabels) {
|
||||
buckets.forEach(b => {
|
||||
labels.push({ text: getLabelText(b), pos: this._bucketPositionPercent(b, rangeStart, rangeEnd, buckets.length) });
|
||||
});
|
||||
if (granularity === 'month') {
|
||||
let d = new Date(firstStart);
|
||||
while (d.getTime() <= maxTs && buckets.length < 240) {
|
||||
const start = d.getTime();
|
||||
const next = new Date(d.getFullYear(), d.getMonth() + 1, 1).getTime();
|
||||
buckets.push({ start, end: next, label: fmt(start), articles: 0, snapshots: 0 });
|
||||
d = new Date(next);
|
||||
}
|
||||
} else {
|
||||
const step = (buckets.length - 1) / (maxLabels - 1);
|
||||
for (let i = 0; i < maxLabels; i++) {
|
||||
const idx = Math.round(i * step);
|
||||
const b = buckets[idx];
|
||||
labels.push({ text: getLabelText(b), pos: this._bucketPositionPercent(b, rangeStart, rangeEnd, buckets.length) });
|
||||
for (let t = firstStart; t <= maxTs && buckets.length < 240; t += stepMs) {
|
||||
buckets.push({ start: t, end: t + stepMs, label: fmt(t), articles: 0, snapshots: 0 });
|
||||
}
|
||||
}
|
||||
return labels;
|
||||
},
|
||||
|
||||
_thinLabels(labels, minGapPercent) {
|
||||
if (!labels || labels.length <= 1) return labels;
|
||||
const gap = minGapPercent || 8;
|
||||
const result = [labels[0]];
|
||||
for (let i = 1; i < labels.length; i++) {
|
||||
if (labels[i].pos - result[result.length - 1].pos >= gap) {
|
||||
result.push(labels[i]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
_buildDayMarkers(buckets, rangeStart, rangeEnd) {
|
||||
const seen = {};
|
||||
const markers = [];
|
||||
buckets.forEach(b => {
|
||||
const d = new Date(b.timestamp);
|
||||
const bp = _tz(d);
|
||||
const dayKey = `${bp.year}-${bp.month}-${bp.date}`;
|
||||
if (!seen[dayKey]) {
|
||||
seen[dayKey] = true;
|
||||
const np = _tz(new Date());
|
||||
const todayKey = `${np.year}-${np.month}-${np.date}`;
|
||||
const yp = _tz(new Date(Date.now() - 86400000));
|
||||
const yesterdayKey = `${yp.year}-${yp.month}-${yp.date}`;
|
||||
let label;
|
||||
const dateStr = d.toLocaleDateString('de-DE', { day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
if (dayKey === todayKey) {
|
||||
label = 'Heute, ' + dateStr;
|
||||
} else if (dayKey === yesterdayKey) {
|
||||
label = 'Gestern, ' + dateStr;
|
||||
} else {
|
||||
label = d.toLocaleDateString('de-DE', { weekday: 'short', day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
// Eintraege zaehlen
|
||||
stripEntries.forEach(e => {
|
||||
const ets = new Date(e.timestamp || 0).getTime();
|
||||
// Linear-Suche, da Buckets sortiert; bei vielen Buckets ggf. Binary
|
||||
for (let i = 0; i < buckets.length; i++) {
|
||||
if (ets >= buckets[i].start && ets < buckets[i].end) {
|
||||
if (e.kind === 'article') buckets[i].articles++;
|
||||
else if (e.kind === 'snapshot') buckets[i].snapshots++;
|
||||
break;
|
||||
}
|
||||
const pos = this._bucketPositionPercent(b, rangeStart, rangeEnd, buckets.length);
|
||||
markers.push({ text: label, pos });
|
||||
}
|
||||
});
|
||||
return markers;
|
||||
|
||||
return buckets;
|
||||
},
|
||||
|
||||
_renderDetailPanel(bucket) {
|
||||
const type = this._currentIncidentType;
|
||||
const sorted = [...bucket.entries].sort((a, b) => {
|
||||
if (a.kind === 'snapshot' && b.kind !== 'snapshot') return -1;
|
||||
if (a.kind !== 'snapshot' && b.kind === 'snapshot') return 1;
|
||||
return new Date(b.timestamp || 0) - new Date(a.timestamp || 0);
|
||||
});
|
||||
_renderTimelineStrip(stripEntries) {
|
||||
const granularity = this._stripGranularity(stripEntries);
|
||||
const buckets = this._buildStripBuckets(stripEntries, granularity);
|
||||
if (buckets.length === 0) return '';
|
||||
|
||||
let entriesHtml = '';
|
||||
sorted.forEach(e => {
|
||||
if (e.kind === 'snapshot') {
|
||||
entriesHtml += this._renderSnapshotEntry(e.data);
|
||||
} else {
|
||||
entriesHtml += this._renderArticleEntry(e.data, type, 0);
|
||||
const maxCount = Math.max(1, ...buckets.map(b => b.articles));
|
||||
const win = this._activeStripWindow;
|
||||
|
||||
let html = '<div class="ht-strip">';
|
||||
html += '<div class="ht-strip-cells">';
|
||||
buckets.forEach(b => {
|
||||
const intensity = b.articles > 0 ? Math.min(1, b.articles / maxCount) : 0;
|
||||
const cls = ['ht-strip-cell'];
|
||||
if (b.snapshots > 0) cls.push('has-snapshot');
|
||||
if (b.articles === 0 && b.snapshots === 0) cls.push('empty');
|
||||
if (win && win.start === b.start && win.end === b.end) cls.push('active');
|
||||
const tip = `${b.label}: ${b.articles} Meldung${b.articles === 1 ? '' : 'en'}` +
|
||||
(b.snapshots > 0 ? ` + ${b.snapshots} Lagebericht${b.snapshots === 1 ? '' : 'e'}` : '');
|
||||
// data-Attribute statt JSON-String im onclick-Inline (vermeidet Quote-Konflikte bei Labels mit Komma/Anführungszeichen)
|
||||
html += `<div class="${cls.join(' ')}" style="--intensity:${intensity.toFixed(3)};" title="${UI.escape(tip)}" data-start="${b.start}" data-end="${b.end}" data-label="${UI.escape(b.label || '')}" onclick="App.handleStripClick(this)"></div>`;
|
||||
});
|
||||
html += '</div>';
|
||||
|
||||
// Wenige Datums-Labels unter dem Strip
|
||||
const labelCount = Math.min(buckets.length, 6);
|
||||
const stride = Math.max(1, Math.floor(buckets.length / labelCount));
|
||||
const labelTexts = [];
|
||||
for (let i = 0; i < buckets.length; i += stride) {
|
||||
const b = buckets[i];
|
||||
const d = new Date(b.start);
|
||||
let txt;
|
||||
if (granularity === 'hour') txt = d.toLocaleTimeString('de-DE', { hour: '2-digit', minute: '2-digit', timeZone: TIMEZONE });
|
||||
else if (granularity === 'day') txt = d.toLocaleDateString('de-DE', { day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
else if (granularity === 'week') txt = 'KW ' + d.toLocaleDateString('de-DE', { day: '2-digit', month: 'short', timeZone: TIMEZONE });
|
||||
else txt = d.toLocaleDateString('de-DE', { month: 'short', year: '2-digit', timeZone: TIMEZONE });
|
||||
labelTexts.push({ text: txt, idx: i });
|
||||
}
|
||||
if (labelTexts.length) {
|
||||
html += '<div class="ht-strip-labels" style="grid-template-columns: repeat(' + buckets.length + ', 1fr);">';
|
||||
const seen = new Set(labelTexts.map(l => l.idx));
|
||||
for (let i = 0; i < buckets.length; i++) {
|
||||
if (seen.has(i)) {
|
||||
const t = labelTexts.find(l => l.idx === i).text;
|
||||
html += `<div class="ht-strip-label">${UI.escape(t)}</div>`;
|
||||
} else {
|
||||
html += '<div></div>';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return `<div class="ht-detail-panel">
|
||||
<div class="ht-detail-header">
|
||||
<span class="ht-detail-title">${UI.escape(bucket.label)} (${bucket.entries.length} Eintr${bucket.entries.length === 1 ? 'ag' : 'äge'})</span>
|
||||
<button class="ht-detail-close" onclick="App.closeTimelineDetail()">×</button>
|
||||
</div>
|
||||
<div class="ht-detail-content">${entriesHtml}</div>
|
||||
</div>`;
|
||||
html += '</div>';
|
||||
}
|
||||
html += '</div>';
|
||||
return html;
|
||||
},
|
||||
|
||||
setTimelineFilter(filter) {
|
||||
this._timelineFilter = filter;
|
||||
this._activePointIndex = null;
|
||||
this._activeStripWindow = null;
|
||||
document.querySelectorAll('.ht-filter-btn').forEach(btn => {
|
||||
const isActive = btn.dataset.filter === filter;
|
||||
btn.classList.toggle('active', isActive);
|
||||
@@ -1399,7 +1459,7 @@ const App = {
|
||||
|
||||
setTimelineRange(range) {
|
||||
this._timelineRange = range;
|
||||
this._activePointIndex = null;
|
||||
this._activeStripWindow = null;
|
||||
document.querySelectorAll('.ht-range-btn').forEach(btn => {
|
||||
const isActive = btn.dataset.range === range;
|
||||
btn.classList.toggle('active', isActive);
|
||||
@@ -1408,20 +1468,34 @@ const App = {
|
||||
this.rerenderTimeline();
|
||||
},
|
||||
|
||||
openTimelineDetail(bucketIndex) {
|
||||
if (this._activePointIndex === bucketIndex) {
|
||||
this._activePointIndex = null;
|
||||
} else {
|
||||
this._activePointIndex = bucketIndex;
|
||||
/** Robuster Click-Handler fuer Heatmap-Cells (vermeidet Quote-Konflikte). */
|
||||
handleStripClick(el) {
|
||||
if (!el) return;
|
||||
const start = parseInt(el.dataset.start, 10);
|
||||
const end = parseInt(el.dataset.end, 10);
|
||||
const label = el.dataset.label || '';
|
||||
if (!isNaN(start) && !isNaN(end)) {
|
||||
this.openTimelineWindow(start, end, label);
|
||||
}
|
||||
this.rerenderTimeline();
|
||||
this._resizeTimelineTile();
|
||||
},
|
||||
|
||||
closeTimelineDetail() {
|
||||
this._activePointIndex = null;
|
||||
/** Klick auf Heatmap-Balken: Stream auf dieses Zeitfenster filtern.
|
||||
* Zweiter Klick auf denselben Balken hebt den Filter auf.
|
||||
*/
|
||||
openTimelineWindow(startMs, endMs, label) {
|
||||
const win = this._activeStripWindow;
|
||||
if (win && win.start === startMs && win.end === endMs) {
|
||||
this._activeStripWindow = null;
|
||||
} else {
|
||||
this._activeStripWindow = { start: startMs, end: endMs, label: label || '' };
|
||||
}
|
||||
this.rerenderTimeline();
|
||||
},
|
||||
|
||||
/** Strip-Filter aufheben (z.B. via Banner-Button). */
|
||||
clearStripWindow() {
|
||||
this._activeStripWindow = null;
|
||||
this.rerenderTimeline();
|
||||
this._resizeTimelineTile();
|
||||
},
|
||||
|
||||
_resizeTimelineTile() {
|
||||
@@ -1856,6 +1930,11 @@ async handleRefresh() {
|
||||
this._updateRefreshButton(true);
|
||||
// showProgress called via handleStatusUpdate
|
||||
const result = await API.refreshIncident(this.currentIncidentId);
|
||||
// Pipeline auf "pending" setzen, damit alte gruene Haekchen nicht
|
||||
// faelschlich "schon fertig" suggerieren waehrend die Lage in der Queue steht
|
||||
if (typeof Pipeline !== 'undefined' && Pipeline.beginQueue) {
|
||||
Pipeline.beginQueue(this.currentIncidentId);
|
||||
}
|
||||
if (result && result.status === 'skipped') {
|
||||
UI.showToast('Aktualisierung ist in der Warteschlange und wird ausgefuehrt, sobald die aktuelle Recherche abgeschlossen ist.', 'info');
|
||||
} else {
|
||||
@@ -2077,8 +2156,19 @@ async handleRefresh() {
|
||||
_updateRefreshButton(disabled) {
|
||||
const btn = document.getElementById('refresh-btn');
|
||||
if (!btn) return;
|
||||
// Hard-Stop: Lese-Modus (Budget aufgebraucht / Lizenz abgelaufen) -> immer disabled
|
||||
if (this.user && this.user.read_only) {
|
||||
btn.disabled = true;
|
||||
const reason = this.user.read_only_reason;
|
||||
btn.textContent = reason === 'budget_exceeded' ? 'Budget aufgebraucht' : 'Nur Lesezugriff';
|
||||
btn.title = reason === 'budget_exceeded'
|
||||
? 'Token-Budget aufgebraucht. Bitte Verwaltung kontaktieren.'
|
||||
: 'Lizenz erlaubt keinen Schreibzugriff';
|
||||
return;
|
||||
}
|
||||
btn.disabled = disabled;
|
||||
btn.textContent = disabled ? 'Läuft...' : 'Aktualisieren';
|
||||
btn.title = '';
|
||||
},
|
||||
|
||||
async handleDelete() {
|
||||
|
||||
@@ -354,9 +354,22 @@ const UI = {
|
||||
const minBtn = document.getElementById('progress-popup-minimize');
|
||||
if (minBtn) minBtn.style.display = state.isFirst ? 'none' : '';
|
||||
|
||||
// Title
|
||||
// Title - haengt von Status ab (queued = wartet, cancelling = bricht ab, sonst laeuft)
|
||||
const titleEl = document.getElementById('progress-popup-title');
|
||||
if (titleEl) titleEl.textContent = state.isFirst ? 'Erste Recherche l\u00e4uft' : 'Aktualisierung l\u00e4uft';
|
||||
if (titleEl) {
|
||||
let title;
|
||||
if (status === 'queued') {
|
||||
const pos = (state && state._queuePos) ? ' (#' + state._queuePos + ')' : '';
|
||||
title = 'In Warteschlange' + pos;
|
||||
} else if (status === 'cancelling') {
|
||||
title = 'Wird abgebrochen\u2026';
|
||||
} else if (state.isFirst) {
|
||||
title = 'Erste Recherche l\u00e4uft';
|
||||
} else {
|
||||
title = 'Aktualisierung l\u00e4uft';
|
||||
}
|
||||
titleEl.textContent = title;
|
||||
}
|
||||
|
||||
// Multi-pass info
|
||||
const passEl = document.getElementById('progress-popup-pass');
|
||||
@@ -971,8 +984,9 @@ const UI = {
|
||||
html += '<div class="source-overview-grid">';
|
||||
data.sources.forEach(s => {
|
||||
const langs = (s.languages || ['de']).map(l => (l || 'de').toUpperCase()).join('/');
|
||||
html += `<div class="source-overview-item">
|
||||
<span class="source-overview-name">${this.escape(s.source || 'Unbekannt')}</span>
|
||||
const sourceName = this.escape(s.source || 'Unbekannt');
|
||||
html += `<div class="source-overview-item" data-source="${sourceName}" tabindex="0" role="button" aria-expanded="false" onclick="App.toggleSourceOverviewDetail(this)" onkeydown="if(event.key==='Enter'||event.key===' '){event.preventDefault();App.toggleSourceOverviewDetail(this);}">
|
||||
<span class="source-overview-name">${sourceName}</span>
|
||||
<span class="source-overview-lang">${langs}</span>
|
||||
<span class="source-overview-count">${s.article_count}</span>
|
||||
</div>`;
|
||||
|
||||
@@ -19,6 +19,7 @@ const Pipeline = {
|
||||
_incidentId: null,
|
||||
_definition: null, // PIPELINE_STEPS vom Backend
|
||||
_stateByKey: {}, // step_key -> {status, count_value, count_secondary, pass_number}
|
||||
_snapshotState: null, // deep-copy von _stateByKey vor Refresh-Start (fuer Cancel-Restore)
|
||||
_isResearch: false,
|
||||
_passTotal: 1,
|
||||
_lastRefreshHeader: null,
|
||||
@@ -42,10 +43,11 @@ const Pipeline = {
|
||||
if (this._wsBound) return;
|
||||
if (typeof WS !== 'undefined' && WS.on) {
|
||||
WS.on('pipeline_step', (msg) => this._onWsStep(msg));
|
||||
// Bei Refresh-Complete den finalen Stand neu laden, damit Zahlen gefroren sichtbar bleiben
|
||||
WS.on('refresh_complete', (msg) => this._onRefreshDone(msg));
|
||||
WS.on('refresh_cancelled', (msg) => this._onRefreshDone(msg));
|
||||
WS.on('refresh_error', (msg) => this._onRefreshDone(msg));
|
||||
// Erfolg: API-State neu laden (finaler Stand sichtbar)
|
||||
WS.on('refresh_complete', (msg) => this._onRefreshDoneSuccess(msg));
|
||||
// Cancel/Error: vor-Refresh-Snapshot zurueckspielen, damit Pipeline nicht im Mix-Zustand stehen bleibt
|
||||
WS.on('refresh_cancelled', (msg) => this._onRefreshDoneCancel(msg));
|
||||
WS.on('refresh_error', (msg) => this._onRefreshDoneError(msg));
|
||||
this._wsBound = true;
|
||||
}
|
||||
// Hover-Tooltip-Element vorbereiten
|
||||
@@ -68,6 +70,7 @@ const Pipeline = {
|
||||
async bindToIncident(incidentId) {
|
||||
this._incidentId = incidentId;
|
||||
this._stateByKey = {};
|
||||
this._snapshotState = null; // Snapshot ist immer lagen-spezifisch
|
||||
this._isResearch = false;
|
||||
this._passTotal = 1;
|
||||
this._lastRefreshHeader = null;
|
||||
@@ -101,6 +104,20 @@ const Pipeline = {
|
||||
|
||||
this._render();
|
||||
this._renderMini();
|
||||
|
||||
// Edge-Case: Lage ist gerade in Queue (z.B. via Lagen-Wechsel beim
|
||||
// Klick in der Sidebar). API liefert den LETZTEN gespeicherten Stand
|
||||
// (alles done = gruen), aber tatsaechlich wartet ein neuer Refresh.
|
||||
// -> beginQueue() selbst ausloesen, damit Icons grau zeigen.
|
||||
try {
|
||||
if (typeof App !== 'undefined' && App._refreshingIncidents
|
||||
&& App._refreshingIncidents.has(incidentId)
|
||||
&& typeof UI !== 'undefined' && UI._progressState
|
||||
&& UI._progressState[incidentId]
|
||||
&& UI._progressState[incidentId].step === 'queued') {
|
||||
this.beginQueue(incidentId);
|
||||
}
|
||||
} catch (e) { /* tolerant */ }
|
||||
} catch (e) {
|
||||
console.warn('Pipeline laden fehlgeschlagen:', e);
|
||||
this._renderEmpty('Pipeline-Daten konnten nicht geladen werden.');
|
||||
@@ -141,30 +158,90 @@ const Pipeline = {
|
||||
}
|
||||
}
|
||||
|
||||
// Wenn ein neuer Pass startet (pass_number > prev und status="active" beim ERSTEN step):
|
||||
// alle Schritte zurück auf pending setzen, damit die Animation neu durchläuft.
|
||||
// Wenn der ERSTE Schritt (sources_review) auf "active" geht, beginnt ein neuer
|
||||
// Refresh oder ein neuer Multi-Pass-Durchlauf — alle nachfolgenden Schritte auf
|
||||
// "pending" (grau) zuruecksetzen, damit der User sieht: das ist neu und
|
||||
// noch nicht durchlaufen. Sonst stehen sie als "done" vom letzten Mal da.
|
||||
let didReset = false;
|
||||
if (d.status === 'active' && this._definition && this._definition.length
|
||||
&& key === this._definition[0].key && passNr > 1 && (!prev || prev.pass_number < passNr)) {
|
||||
// Alle anderen Steps in "pending" zurueck (visuell), Werte behalten wir
|
||||
&& key === this._definition[0].key) {
|
||||
this._definition.forEach(s => {
|
||||
if (s.key !== key && this._stateByKey[s.key]) {
|
||||
this._stateByKey[s.key].status = 'pending';
|
||||
didReset = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
this._patchBlock(key);
|
||||
this._patchMiniBlock(key);
|
||||
if (didReset) {
|
||||
// Beim Reset alle Bloecke neu zeichnen, nicht nur den aktuellen
|
||||
this._render();
|
||||
this._renderMini();
|
||||
} else {
|
||||
this._patchBlock(key);
|
||||
this._patchMiniBlock(key);
|
||||
}
|
||||
},
|
||||
|
||||
_onRefreshDone(msg) {
|
||||
/**
|
||||
* Wird vom Frontend gerufen, wenn ein Refresh angestossen wurde (queued).
|
||||
* Macht einen Snapshot des aktuellen Pipeline-Stands (zur spaeteren Wiederherstellung
|
||||
* bei Cancel/Error) und setzt dann alle Steps auf "pending" - damit der User sieht:
|
||||
* "neuer Refresh laeuft an, alte gruene Haekchen sind nicht mehr aktuell".
|
||||
*/
|
||||
beginQueue(incidentId) {
|
||||
if (this._incidentId !== incidentId) return; // andere Lage offen
|
||||
if (!this._definition) return; // noch keine Pipeline-Definition geladen
|
||||
// Aktuellen Stand sichern (deep-copy). Bei Mehrfach-Refresh ohne Cancel
|
||||
// dazwischen wird der Snapshot bewusst ueberschrieben - er soll immer
|
||||
// der "Stand kurz vor diesem Refresh" sein.
|
||||
this._snapshotState = JSON.parse(JSON.stringify(this._stateByKey));
|
||||
// Alle Steps auf pending setzen
|
||||
this._definition.forEach(s => {
|
||||
if (this._stateByKey[s.key]) {
|
||||
this._stateByKey[s.key].status = 'pending';
|
||||
} else {
|
||||
this._stateByKey[s.key] = { status: 'pending', count_value: null, count_secondary: null, pass_number: 1 };
|
||||
}
|
||||
});
|
||||
this._render();
|
||||
this._renderMini();
|
||||
},
|
||||
|
||||
/** Restauriert den letzten Snapshot. Rueckgabe: true bei Erfolg, false wenn keiner da war. */
|
||||
_restoreSnapshot() {
|
||||
if (!this._snapshotState) return false;
|
||||
this._stateByKey = this._snapshotState;
|
||||
this._snapshotState = null;
|
||||
this._render();
|
||||
this._renderMini();
|
||||
return true;
|
||||
},
|
||||
|
||||
_onRefreshDoneSuccess(msg) {
|
||||
if (this._incidentId == null || (msg && msg.incident_id !== this._incidentId)) return;
|
||||
this._snapshotState = null; // verworfen, neuer Stand wird vom API geladen
|
||||
// Daten frisch nachladen, damit Header (Dauer) und finale Zahlen passen
|
||||
setTimeout(() => {
|
||||
if (this._incidentId != null) this.bindToIncident(this._incidentId);
|
||||
}, 600);
|
||||
},
|
||||
|
||||
_onRefreshDoneCancel(msg) {
|
||||
if (this._incidentId == null || (msg && msg.incident_id !== this._incidentId)) return;
|
||||
if (!this._restoreSnapshot()) {
|
||||
// Kein Snapshot vorhanden (z.B. Page-Reload mitten im Refresh) -> wie bisher API-Reload
|
||||
setTimeout(() => {
|
||||
if (this._incidentId != null) this.bindToIncident(this._incidentId);
|
||||
}, 600);
|
||||
}
|
||||
},
|
||||
|
||||
_onRefreshDoneError(msg) {
|
||||
// Wie Cancel: vorheriger Stand zurueck (nicht im Mix-Zustand stehenbleiben)
|
||||
this._onRefreshDoneCancel(msg);
|
||||
},
|
||||
|
||||
/** Vollbild-Pipeline (Tab "Analysepipeline") als 3x3-Snake rendern. */
|
||||
_render() {
|
||||
const stage = document.getElementById('pipeline-stage');
|
||||
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren