GEOINT: Globaler Flugverkehr + Schiffsverkehr-Layer
Flugverkehr: Globaler Snapshot ueber 29 Stuetzpunkte weltweit. Backend aggregiert parallel, 30s Cache, kein Flackern (atomarer Swap). Keine regionale Begrenzung mehr. Schiffsverkehr: Neuer Layer via Digitraffic AIS API (kostenlos, kein Key). 18.000+ Schiffe global, 60s Refresh. Blaue Schiffs-Icons mit Heading-Rotation. Popup zeigt MMSI, SOG, COG, Navigationsstatus. Backend: Batch-Fetching mit asyncio.Lock gegen Race Conditions.
Dieser Commit ist enthalten in:
@@ -1,4 +1,5 @@
|
||||
"""GEOINT-Router: Proxy fuer externe Echtzeit-Datenquellen (Flugverkehr, GDELT)."""
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
@@ -28,44 +29,187 @@ def _get_cached(key: str, ttl: float) -> Optional[dict]:
|
||||
|
||||
def _set_cache(key: str, data: dict):
|
||||
_cache[key] = (time.time(), data)
|
||||
# Cache-Groesse begrenzen (max 50 Eintraege)
|
||||
if len(_cache) > 50:
|
||||
oldest = min(_cache, key=lambda k: _cache[k][0])
|
||||
del _cache[oldest]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Flugverkehr (airplanes.live)
|
||||
# Flugverkehr: Globaler Snapshot (airplanes.live)
|
||||
# ---------------------------------------------------------------------------
|
||||
@router.get("/flights")
|
||||
async def get_flights(
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lon: float = Query(..., ge=-180, le=180),
|
||||
radius: int = Query(100, ge=10, le=250),
|
||||
_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""Proxy fuer airplanes.live API. 10s Cache, max 300 Aircraft."""
|
||||
cache_key = f"flights:{round(lat*2)/2:.1f}:{round(lon*2)/2:.1f}:{radius}"
|
||||
cached = _get_cached(cache_key, ttl=20)
|
||||
|
||||
# Stuetzpunkte fuer globale Abdeckung (je 250nm Radius ≈ 460km)
|
||||
# Abdeckt: Europa, Naher Osten, Nordafrika, Nordamerika, Ostasien
|
||||
_FLIGHT_GRID = [
|
||||
# Europa
|
||||
(48.0, 2.0), # Westeuropa (Paris)
|
||||
(48.0, 16.0), # Mitteleuropa (Wien)
|
||||
(55.0, 10.0), # Nordeuropa (Daenemark)
|
||||
(40.0, -4.0), # Iberische Halbinsel
|
||||
(41.0, 12.0), # Suedeuropa (Rom)
|
||||
(38.0, 24.0), # Suedosteuropa (Griechenland)
|
||||
(55.0, 25.0), # Baltikum
|
||||
(60.0, 25.0), # Skandinavien-Ost
|
||||
(52.0, 30.0), # Osteuropa
|
||||
# Naher Osten
|
||||
(33.0, 36.0), # Levante
|
||||
(25.0, 45.0), # Arabien
|
||||
(33.0, 52.0), # Iran
|
||||
# Nordafrika
|
||||
(34.0, 2.0), # Maghreb
|
||||
(30.0, 31.0), # Aegypten
|
||||
# UK / Island
|
||||
(54.0, -2.0), # UK
|
||||
(63.0, -19.0), # Island
|
||||
# Nordamerika Ostkueste
|
||||
(40.0, -74.0), # New York
|
||||
(33.0, -84.0), # Atlanta
|
||||
(42.0, -88.0), # Chicago
|
||||
# Nordamerika Westkueste
|
||||
(34.0, -118.0), # Los Angeles
|
||||
(47.0, -122.0), # Seattle
|
||||
# Ostasien
|
||||
(35.0, 140.0), # Japan
|
||||
(37.0, 127.0), # Korea
|
||||
(31.0, 121.0), # Shanghai
|
||||
(22.0, 114.0), # Hongkong
|
||||
# Suedasien
|
||||
(19.0, 73.0), # Mumbai
|
||||
(28.0, 77.0), # Delhi
|
||||
# Suedostasien
|
||||
(1.0, 104.0), # Singapur
|
||||
(14.0, 101.0), # Bangkok
|
||||
]
|
||||
|
||||
_flight_lock = asyncio.Lock()
|
||||
|
||||
|
||||
async def _fetch_global_flights() -> dict:
|
||||
"""Holt Flugdaten fuer alle Stuetzpunkte parallel."""
|
||||
cached = _get_cached("flights_global", ttl=30)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
url = f"https://api.airplanes.live/v2/point/{lat:.4f}/{lon:.4f}/{radius}"
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=8) as client:
|
||||
resp = await client.get(url)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"airplanes.live Fehler: {e}")
|
||||
return {"ac": []}
|
||||
async with _flight_lock:
|
||||
# Doppelcheck nach Lock
|
||||
cached = _get_cached("flights_global", ttl=30)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
# Auf 300 Aircraft begrenzen
|
||||
if "ac" in data and len(data["ac"]) > 300:
|
||||
data["ac"] = data["ac"][:300]
|
||||
seen: dict[str, dict] = {}
|
||||
errors = 0
|
||||
|
||||
_set_cache(cache_key, data)
|
||||
return data
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
# In Batches von 8 um Rate-Limits zu vermeiden
|
||||
for i in range(0, len(_FLIGHT_GRID), 8):
|
||||
batch = _FLIGHT_GRID[i:i + 8]
|
||||
tasks = []
|
||||
for lat, lon in batch:
|
||||
url = f"https://api.airplanes.live/v2/point/{lat:.2f}/{lon:.2f}/250"
|
||||
tasks.append(client.get(url))
|
||||
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
for r in results:
|
||||
if isinstance(r, Exception):
|
||||
errors += 1
|
||||
continue
|
||||
try:
|
||||
data = r.json()
|
||||
for ac in data.get("ac", []):
|
||||
hex_id = ac.get("hex")
|
||||
if hex_id and hex_id not in seen:
|
||||
seen[hex_id] = ac
|
||||
except Exception:
|
||||
errors += 1
|
||||
|
||||
# Kurze Pause zwischen Batches
|
||||
if i + 8 < len(_FLIGHT_GRID):
|
||||
await asyncio.sleep(0.3)
|
||||
|
||||
result = {"ac": list(seen.values()), "total": len(seen), "errors": errors}
|
||||
logger.info(
|
||||
f"GEOINT Flights: {len(seen)} Flugzeuge aus {len(_FLIGHT_GRID)} Punkten"
|
||||
f" ({errors} Fehler)"
|
||||
)
|
||||
_set_cache("flights_global", result)
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/flights")
|
||||
async def get_flights(
|
||||
_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""Globaler Flugverkehr-Snapshot. 30s Cache, dedupliziert."""
|
||||
return await _fetch_global_flights()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Schiffsverkehr: Digitraffic AIS (kostenlos, global, kein API-Key)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_ships_lock = asyncio.Lock()
|
||||
|
||||
|
||||
async def _fetch_global_ships() -> dict:
|
||||
"""Holt globale AIS-Schiffspositionen von Digitraffic."""
|
||||
cached = _get_cached("ships_global", ttl=60)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
async with _ships_lock:
|
||||
cached = _get_cached("ships_global", ttl=60)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
url = "https://meri.digitraffic.fi/api/ais/v1/locations"
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=20) as client:
|
||||
resp = await client.get(
|
||||
url,
|
||||
headers={
|
||||
"Digitraffic-User": "AegisSight-GEOINT",
|
||||
"Accept-Encoding": "gzip",
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except Exception as e:
|
||||
logger.warning(f"Digitraffic AIS Fehler: {e}")
|
||||
return {"features": [], "total": 0}
|
||||
|
||||
features = data.get("features", [])
|
||||
# Nur Schiffe mit gueltigen Koordinaten und Bewegung (sog > 0.5 kn)
|
||||
ships = []
|
||||
for f in features:
|
||||
geom = f.get("geometry")
|
||||
props = f.get("properties", {})
|
||||
if not geom or not geom.get("coordinates"):
|
||||
continue
|
||||
lon, lat = geom["coordinates"]
|
||||
if not (-180 <= lon <= 180 and -90 <= lat <= 90):
|
||||
continue
|
||||
ships.append({
|
||||
"mmsi": props.get("mmsi"),
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"sog": props.get("sog", 0),
|
||||
"cog": props.get("cog", 0),
|
||||
"heading": props.get("heading", 0),
|
||||
"navStat": props.get("navStat", 0),
|
||||
})
|
||||
|
||||
result = {"ships": ships, "total": len(ships)}
|
||||
logger.info(f"GEOINT Ships: {len(ships)} Schiffe geladen")
|
||||
_set_cache("ships_global", result)
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/ships")
|
||||
async def get_ships(
|
||||
_user: dict = Depends(get_current_user),
|
||||
):
|
||||
"""Globaler Schiffsverkehr-Snapshot. 60s Cache."""
|
||||
return await _fetch_global_ships()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren