GEOINT-Modus aus Monitor entfernt

Wird als eigenstaendige Anwendung auf separater Subdomain neu aufgebaut.
Alle GEOINT-Dateien entfernt, dashboard.html/components.js/main.py
auf pre-GEOINT Stand zurueckgesetzt.
Dieser Commit ist enthalten in:
Claude Dev
2026-03-24 11:06:19 +01:00
Ursprung 8212617276
Commit e64447ab7f
14 geänderte Dateien mit 4112 neuen und 1999 gelöschten Zeilen

Datei anzeigen

@@ -1,300 +0,0 @@
"""GEOINT-Router: Proxy fuer externe Echtzeit-Datenquellen (Flugverkehr, Schiffsverkehr, GDELT)."""
import asyncio
import json as _json
import logging
import time
from typing import Optional
import httpx
import websockets
from fastapi import APIRouter, Depends, Query
from auth import get_current_user
logger = logging.getLogger("osint.geoint")
router = APIRouter(tags=["geoint"])
# ---------------------------------------------------------------------------
# Einfacher In-Memory-Cache
# ---------------------------------------------------------------------------
_cache: dict[str, tuple[float, dict]] = {}
def _get_cached(key: str, ttl: float) -> Optional[dict]:
if key in _cache:
ts, data = _cache[key]
if time.time() - ts < ttl:
return data
return None
def _set_cache(key: str, data: dict):
_cache[key] = (time.time(), data)
if len(_cache) > 50:
oldest = min(_cache, key=lambda k: _cache[k][0])
del _cache[oldest]
# ---------------------------------------------------------------------------
# Flugverkehr: Globaler Snapshot (airplanes.live)
# ---------------------------------------------------------------------------
_FLIGHT_GRID = [
# Europa
(48.0, 2.0), # Westeuropa (Paris)
(48.0, 16.0), # Mitteleuropa (Wien)
(55.0, 10.0), # Nordeuropa (Daenemark)
(40.0, -4.0), # Iberische Halbinsel
(41.0, 12.0), # Suedeuropa (Rom)
(38.0, 24.0), # Suedosteuropa (Griechenland)
(55.0, 25.0), # Baltikum
(60.0, 25.0), # Skandinavien-Ost
(52.0, 30.0), # Osteuropa
(45.0, 37.0), # Schwarzes Meer / Tuerkei Ost
# UK / Island
(54.0, -2.0), # UK
(63.0, -19.0), # Island
# Naher Osten (erweitert)
(33.0, 36.0), # Levante (Syrien/Libanon/Israel)
(30.0, 31.0), # Aegypten / Kairo
(25.0, 45.0), # Saudi-Arabien Zentral
(26.5, 56.0), # Strasse von Hormuz / VAE
(25.0, 51.5), # Katar / Bahrain
(33.0, 44.0), # Irak (Bagdad)
(33.0, 52.0), # Iran (Teheran)
(15.0, 45.0), # Jemen / Rotes Meer
(21.0, 40.0), # Saudi-Arabien West (Dschidda)
# Nordafrika
(34.0, 2.0), # Maghreb (Algier)
(33.0, -7.0), # Marokko (Casablanca)
(32.0, 13.0), # Libyen (Tripolis)
# Zentralasien
(41.0, 69.0), # Usbekistan (Taschkent)
(39.0, 63.0), # Turkmenistan
# Nordamerika Ostkueste
(40.0, -74.0), # New York
(33.0, -84.0), # Atlanta
(42.0, -88.0), # Chicago
(26.0, -80.0), # Florida (Miami)
(45.0, -74.0), # Montreal
# Nordamerika Westkueste
(34.0, -118.0), # Los Angeles
(47.0, -122.0), # Seattle
(37.0, -122.0), # San Francisco
# Nordamerika Zentral
(30.0, -97.0), # Texas (Austin)
(39.0, -105.0), # Denver
# Ostasien
(35.0, 140.0), # Japan (Tokio)
(37.0, 127.0), # Korea (Seoul)
(31.0, 121.0), # Shanghai
(40.0, 117.0), # Peking
(22.0, 114.0), # Hongkong
(25.0, 121.0), # Taiwan
# Suedasien
(19.0, 73.0), # Mumbai
(28.0, 77.0), # Delhi
(13.0, 80.0), # Chennai
(7.0, 80.0), # Sri Lanka
# Suedostasien
(1.0, 104.0), # Singapur
(14.0, 101.0), # Bangkok
(-6.0, 107.0), # Jakarta
(10.0, 107.0), # Ho-Chi-Minh
# Ozeanien
(-34.0, 151.0), # Sydney
(-37.0, 175.0), # Neuseeland
# Afrika
(-1.0, 37.0), # Nairobi
(-34.0, 18.0), # Kapstadt
(6.0, 3.0), # Lagos
(9.0, 39.0), # Addis Abeba
# Suedamerika
(-23.0, -43.0), # Rio de Janeiro
(-34.0, -58.0), # Buenos Aires
(-12.0, -77.0), # Lima
(4.0, -74.0), # Bogota
]
_flight_lock = asyncio.Lock()
async def _fetch_global_flights() -> dict:
"""Holt Flugdaten fuer alle Stuetzpunkte parallel."""
cached = _get_cached("flights_global", ttl=30)
if cached:
return cached
async with _flight_lock:
cached = _get_cached("flights_global", ttl=30)
if cached:
return cached
seen: dict[str, dict] = {}
errors = 0
async with httpx.AsyncClient(timeout=10) as client:
for i in range(0, len(_FLIGHT_GRID), 8):
batch = _FLIGHT_GRID[i:i + 8]
tasks = [client.get(f"https://api.airplanes.live/v2/point/{lat:.2f}/{lon:.2f}/250")
for lat, lon in batch]
results = await asyncio.gather(*tasks, return_exceptions=True)
for r in results:
if isinstance(r, Exception):
errors += 1
continue
try:
data = r.json()
for ac in data.get("ac", []):
hex_id = ac.get("hex")
if hex_id and hex_id not in seen:
seen[hex_id] = ac
except Exception:
errors += 1
if i + 8 < len(_FLIGHT_GRID):
await asyncio.sleep(0.3)
result = {"ac": list(seen.values()), "total": len(seen), "errors": errors}
logger.info(f"GEOINT Flights: {len(seen)} Flugzeuge ({errors} Fehler)")
_set_cache("flights_global", result)
return result
@router.get("/flights")
async def get_flights(_user: dict = Depends(get_current_user)):
"""Globaler Flugverkehr-Snapshot. 30s Cache."""
return await _fetch_global_flights()
# ---------------------------------------------------------------------------
# Schiffsverkehr: AISStream.io (globales Echtzeit-AIS via WebSocket)
# ---------------------------------------------------------------------------
_AISSTREAM_KEY = "1a56b078db829727abd4d617937bae51c6f9973e"
_AISSTREAM_URL = "wss://stream.aisstream.io/v0/stream"
# Globaler Schiffs-Store: {mmsi: {lat, lon, sog, cog, heading, name, ship_type, ts}}
_ships_store: dict[int, dict] = {}
_ships_lock = asyncio.Lock()
_ships_ws_task: Optional[asyncio.Task] = None
_ships_connected = False
async def _aisstream_listener():
"""Dauerhafter WebSocket-Client fuer AISStream. Sammelt Schiffspositionen."""
global _ships_connected
while True:
try:
logger.info("AISStream: Verbinde...")
async with websockets.connect(_AISSTREAM_URL, ping_interval=30, ping_timeout=10,
close_timeout=5) as ws:
# Subscription: globale BoundingBox, nur Positionsberichte
sub = {
"APIKey": _AISSTREAM_KEY,
"BoundingBoxes": [[[-90, -180], [90, 180]]],
"FilterMessageTypes": ["PositionReport"],
}
await ws.send(_json.dumps(sub))
_ships_connected = True
logger.info("AISStream: Verbunden, empfange Schiffsdaten...")
async for raw in ws:
try:
text = raw.decode("utf-8") if isinstance(raw, bytes) else raw
msg = _json.loads(text)
meta = msg.get("MetaData", {})
mmsi = meta.get("MMSI")
if not mmsi:
continue
pos = msg.get("Message", {}).get("PositionReport", {})
lat = meta.get("latitude") or pos.get("Latitude")
lon = meta.get("longitude") or pos.get("Longitude")
if not lat or not lon or not (-90 <= lat <= 90 and -180 <= lon <= 180):
continue
_ships_store[mmsi] = {
"mmsi": mmsi,
"lat": round(lat, 5),
"lon": round(lon, 5),
"sog": round(pos.get("Sog", 0), 1),
"cog": round(pos.get("Cog", 0), 1),
"heading": pos.get("TrueHeading", 0),
"name": (meta.get("ShipName") or "").strip(),
"ts": time.time(),
}
if len(_ships_store) % 1000 == 0:
logger.info(f"AISStream: {len(_ships_store)} Schiffe gesammelt")
# Alte Eintraege alle 60s bereinigen (>15 Min alt)
if len(_ships_store) % 500 == 0:
cutoff = time.time() - 900
stale = [k for k, v in _ships_store.items() if v["ts"] < cutoff]
for k in stale:
del _ships_store[k]
except Exception as parse_err:
if len(_ships_store) < 5:
logger.warning(f"AISStream Parse-Fehler: {parse_err}, raw type: {type(raw)}, first 100: {str(raw)[:100]}")
continue
except Exception as e:
_ships_connected = False
logger.warning(f"AISStream Fehler: {e}. Reconnect in 10s...")
await asyncio.sleep(10)
def _start_aisstream():
"""Startet den AISStream-Listener als Background-Task."""
global _ships_ws_task
if _ships_ws_task is None or _ships_ws_task.done():
_ships_ws_task = asyncio.create_task(_aisstream_listener())
logger.info("AISStream Background-Task gestartet")
@router.get("/ships")
async def get_ships(_user: dict = Depends(get_current_user)):
"""Globaler Schiffsverkehr aus AISStream. Echtzeit-Positionen."""
# Lazy-Start: WebSocket-Listener beim ersten Abruf starten
_start_aisstream()
ships = list(_ships_store.values())
return {
"ships": ships,
"total": len(ships),
"connected": _ships_connected,
}
# ---------------------------------------------------------------------------
# GDELT Nachrichten
# ---------------------------------------------------------------------------
@router.get("/gdelt")
async def get_gdelt(
query: str = Query("conflict", max_length=200),
_user: dict = Depends(get_current_user),
):
"""Proxy fuer GDELT GEO 2.0 API. 60s Cache."""
cache_key = f"gdelt:{query[:50]}"
cached = _get_cached(cache_key, ttl=60)
if cached:
return cached
url = (
"https://api.gdeltproject.org/api/v2/geo/geo"
f"?query={query}&mode=PointData&format=GeoJSON"
"&timespan=24h&maxrows=200"
)
try:
async with httpx.AsyncClient(timeout=12) as client:
resp = await client.get(url)
resp.raise_for_status()
data = resp.json()
except Exception as e:
logger.warning(f"GDELT Fehler: {e}")
return {"type": "FeatureCollection", "features": []}
_set_cache(cache_key, data)
return data