Telegram-Kanaele als Quelle: Parser, Pipeline, UI-Checkbox, Validate-Endpoint
- Neuer source_type telegram_channel in models.py (Source + Incident) - DB-Migration: include_telegram Spalte fuer incidents - feeds/telegram_parser.py: Telethon-basierter Parser (analog RSS) - Orchestrator: Telegram-Pipeline parallel zu RSS + WebSearch - sources.py: POST /api/sources/telegram/validate Endpoint - incidents.py: include_telegram in Create/Update/Response - dashboard.html: Telegram-Checkbox + Filter-Option - app.js: FormData, EditModal, SourceStats, TypeLabels - config.py: TELEGRAM_API_ID, API_HASH, SESSION_PATH - requirements.txt: telethon hinzugefuegt
Dieser Commit ist enthalten in:
@@ -535,6 +535,7 @@ class AgentOrchestrator:
|
||||
description = incident["description"] or ""
|
||||
incident_type = incident["type"] or "adhoc"
|
||||
international = bool(incident["international_sources"]) if "international_sources" in incident.keys() else True
|
||||
include_telegram = bool(incident["include_telegram"]) if "include_telegram" in incident.keys() else False
|
||||
visibility = incident["visibility"] if "visibility" in incident.keys() else "public"
|
||||
created_by = incident["created_by"] if "created_by" in incident.keys() else None
|
||||
tenant_id = incident["tenant_id"] if "tenant_id" in incident.keys() else None
|
||||
@@ -620,11 +621,24 @@ class AgentOrchestrator:
|
||||
logger.info(f"Claude-Recherche: {len(results)} Ergebnisse")
|
||||
return results, usage
|
||||
|
||||
# Beide Pipelines parallel starten
|
||||
(rss_articles, rss_feed_usage), (search_results, search_usage) = await asyncio.gather(
|
||||
_rss_pipeline(),
|
||||
_web_search_pipeline(),
|
||||
)
|
||||
async def _telegram_pipeline():
|
||||
"""Telegram-Kanal-Suche."""
|
||||
from feeds.telegram_parser import TelegramParser
|
||||
tg_parser = TelegramParser()
|
||||
articles = await tg_parser.search_channels(title, tenant_id=tenant_id, keywords=None)
|
||||
logger.info(f"Telegram-Pipeline: {len(articles)} Nachrichten")
|
||||
return articles, None
|
||||
|
||||
# Pipelines parallel starten (RSS + WebSearch + optional Telegram)
|
||||
pipelines = [_rss_pipeline(), _web_search_pipeline()]
|
||||
if include_telegram:
|
||||
pipelines.append(_telegram_pipeline())
|
||||
|
||||
pipeline_results = await asyncio.gather(*pipelines)
|
||||
|
||||
(rss_articles, rss_feed_usage) = pipeline_results[0]
|
||||
(search_results, search_usage) = pipeline_results[1]
|
||||
telegram_articles = pipeline_results[2][0] if include_telegram else []
|
||||
|
||||
if rss_feed_usage:
|
||||
usage_acc.add(rss_feed_usage)
|
||||
@@ -635,7 +649,7 @@ class AgentOrchestrator:
|
||||
self._check_cancelled(incident_id)
|
||||
|
||||
# Alle Ergebnisse zusammenführen
|
||||
all_results = rss_articles + search_results
|
||||
all_results = rss_articles + search_results + telegram_articles
|
||||
|
||||
# Duplikate entfernen (normalisierte URL + Headline-Ähnlichkeit)
|
||||
seen_urls = set()
|
||||
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren