Per-User Domain-Ausschlüsse + Grundquellen-Schutz

- Neue Tabelle user_excluded_domains für benutzerspezifische Ausschlüsse
- Domain-Ausschlüsse wirken nur für den jeweiligen User, nicht org-weit
- user_id wird durch die gesamte Pipeline geschleust (Orchestrator → Researcher → RSS-Parser)
- Grundquellen (is_global) können nicht mehr bearbeitet/gelöscht werden im Frontend
- Grundquelle-Badge bei globalen Quellen statt Edit/Delete-Buttons
- Filter Von mir ausgeschlossen im Quellen-Modal

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Dieser Commit ist enthalten in:
claude-dev
2026-03-08 14:30:21 +01:00
Ursprung 18954cf70e
Commit 5e19736a25
13 geänderte Dateien mit 149 neuen und 108 gelöschten Zeilen

Datei anzeigen

@@ -332,7 +332,7 @@ class AgentOrchestrator:
self._running = False
logger.info("Agenten-Orchestrator gestoppt")
async def enqueue_refresh(self, incident_id: int, trigger_type: str = "manual") -> bool:
async def enqueue_refresh(self, incident_id: int, trigger_type: str = "manual", user_id: int = None) -> bool:
"""Refresh-Auftrag in die Queue stellen. Gibt False zurueck wenn bereits in Queue/aktiv."""
if incident_id in self._queued_ids or self._current_task == incident_id:
logger.info(f"Refresh fuer Lage {incident_id} uebersprungen: bereits aktiv/in Queue")
@@ -341,7 +341,7 @@ class AgentOrchestrator:
visibility, created_by, tenant_id = await self._get_incident_visibility(incident_id)
self._queued_ids.add(incident_id)
await self._queue.put((incident_id, trigger_type))
await self._queue.put((incident_id, trigger_type, user_id))
queue_size = self._queue.qsize()
logger.info(f"Refresh fuer Lage {incident_id} eingereiht (Queue: {queue_size}, Trigger: {trigger_type})")
@@ -386,7 +386,11 @@ class AgentOrchestrator:
except asyncio.TimeoutError:
continue
incident_id, trigger_type = item
if len(item) == 3:
incident_id, trigger_type, user_id = item
else:
incident_id, trigger_type = item
user_id = None
self._queued_ids.discard(incident_id)
self._current_task = incident_id
logger.info(f"Starte Refresh für Lage {incident_id} (Trigger: {trigger_type})")
@@ -398,7 +402,7 @@ class AgentOrchestrator:
try:
for attempt in range(3):
try:
await self._run_refresh(incident_id, trigger_type=trigger_type, retry_count=attempt)
await self._run_refresh(incident_id, trigger_type=trigger_type, retry_count=attempt, user_id=user_id)
last_error = None
break # Erfolg
except asyncio.CancelledError:
@@ -509,7 +513,7 @@ class AgentOrchestrator:
await db.close()
return visibility, created_by, tenant_id
async def _run_refresh(self, incident_id: int, trigger_type: str = "manual", retry_count: int = 0):
async def _run_refresh(self, incident_id: int, trigger_type: str = "manual", retry_count: int = 0, user_id: int = None):
"""Führt einen kompletten Refresh-Zyklus durch."""
import aiosqlite
from database import get_db
@@ -604,7 +608,7 @@ class AgentOrchestrator:
keywords = feed_sel_keywords
articles = await rss_parser.search_feeds_selective(title, selected_feeds, keywords=keywords)
else:
articles = await rss_parser.search_feeds(title, international=international, tenant_id=tenant_id, keywords=keywords)
articles = await rss_parser.search_feeds(title, international=international, tenant_id=tenant_id, keywords=keywords, user_id=user_id)
logger.info(f"RSS: {len(articles)} relevante Artikel gefunden (international={international})")
return articles, feed_usage
@@ -612,7 +616,7 @@ class AgentOrchestrator:
async def _web_search_pipeline():
"""Claude WebSearch-Recherche."""
researcher = ResearcherAgent()
results, usage = await researcher.search(title, description, incident_type, international=international)
results, usage = await researcher.search(title, description, incident_type, international=international, user_id=user_id)
logger.info(f"Claude-Recherche: {len(results)} Ergebnisse")
return results, usage

Datei anzeigen

@@ -269,7 +269,7 @@ class ResearcherAgent:
logger.warning(f"Keyword-Extraktion fehlgeschlagen: {e}")
return None, None
async def search(self, title: str, description: str = "", incident_type: str = "adhoc", international: bool = True) -> tuple[list[dict], ClaudeUsage | None]:
async def search(self, title: str, description: str = "", incident_type: str = "adhoc", international: bool = True, user_id: int = None) -> tuple[list[dict], ClaudeUsage | None]:
"""Sucht nach Informationen zu einem Vorfall."""
from config import OUTPUT_LANGUAGE
if incident_type == "research":
@@ -290,7 +290,7 @@ class ResearcherAgent:
articles = self._parse_response(result)
# Ausgeschlossene Quellen dynamisch aus DB laden
excluded_sources = await self._get_excluded_sources()
excluded_sources = await self._get_excluded_sources(user_id=user_id)
# Ausgeschlossene Quellen filtern
filtered = []
@@ -317,14 +317,23 @@ class ResearcherAgent:
logger.error(f"Recherche-Fehler: {e}")
return [], None
async def _get_excluded_sources(self) -> list[str]:
"""Lädt ausgeschlossene Quellen aus der Datenbank."""
async def _get_excluded_sources(self, user_id: int = None) -> list[str]:
"""Laedt ausgeschlossene Quellen (global + per-User)."""
try:
from source_rules import get_source_rules
from source_rules import get_source_rules, get_user_excluded_domains
rules = await get_source_rules()
return rules.get("excluded_domains", [])
excluded = list(rules.get("excluded_domains", []))
# User-spezifische Ausschluesse hinzufuegen
if user_id:
user_excluded = await get_user_excluded_domains(user_id)
for domain in user_excluded:
if domain not in excluded:
excluded.append(domain)
return excluded
except Exception as e:
logger.warning(f"Fallback auf config.py für Excluded Sources: {e}")
logger.warning(f"Fallback auf config.py fuer Excluded Sources: {e}")
from config import EXCLUDED_SOURCES
return list(EXCLUDED_SOURCES)