AegisSight Globe: Initiales Release
Eigenstaendige GEOINT-Anwendung mit CesiumJS 3D-Globus. Echtzeit-Datenlayer: Flugverkehr (airplanes.live, 64 Stuetzpunkte), Schiffsverkehr (AISStream.io WebSocket), Erdbeben (USGS), Nachrichten (GDELT GEO). FastAPI Backend, taktisches Dark-UI.
Dieser Commit ist enthalten in:
89
src/data_flights.py
Normale Datei
89
src/data_flights.py
Normale Datei
@@ -0,0 +1,89 @@
|
||||
"""Flugverkehr-Collector: Globaler Snapshot via airplanes.live."""
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter
|
||||
|
||||
logger = logging.getLogger("globe.flights")
|
||||
router = APIRouter()
|
||||
|
||||
# 64 Stuetzpunkte fuer globale Abdeckung (je 250nm Radius)
|
||||
_GRID = [
|
||||
(48,2),(48,16),(55,10),(40,-4),(41,12),(38,24),(55,25),(60,25),(52,30),(45,37),
|
||||
(54,-2),(63,-19),
|
||||
(33,36),(30,31),(25,45),(26.5,56),(25,51.5),(33,44),(33,52),(15,45),(21,40),
|
||||
(34,2),(33,-7),(32,13),(41,69),(39,63),
|
||||
(40,-74),(33,-84),(42,-88),(26,-80),(45,-74),(34,-118),(47,-122),(37,-122),(30,-97),(39,-105),
|
||||
(35,140),(37,127),(31,121),(40,117),(22,114),(25,121),
|
||||
(19,73),(28,77),(13,80),(7,80),(1,104),(14,101),(-6,107),(10,107),
|
||||
(-34,151),(-37,175),(-1,37),(-34,18),(6,3),(9,39),
|
||||
(-23,-43),(-34,-58),(-12,-77),(4,-74),
|
||||
]
|
||||
|
||||
_cache: dict = {"data": None, "ts": 0}
|
||||
_lock = asyncio.Lock()
|
||||
_task = None
|
||||
|
||||
|
||||
async def _fetch_all():
|
||||
"""Holt Flugdaten fuer alle Stuetzpunkte."""
|
||||
now = time.time()
|
||||
if _cache["data"] and now - _cache["ts"] < 25:
|
||||
return _cache["data"]
|
||||
|
||||
async with _lock:
|
||||
if _cache["data"] and time.time() - _cache["ts"] < 25:
|
||||
return _cache["data"]
|
||||
|
||||
seen = {}
|
||||
errors = 0
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
for i in range(0, len(_GRID), 10):
|
||||
batch = _GRID[i:i+10]
|
||||
tasks = [client.get(f"https://api.airplanes.live/v2/point/{lat:.2f}/{lon:.2f}/250")
|
||||
for lat, lon in batch]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
for r in results:
|
||||
if isinstance(r, Exception):
|
||||
errors += 1
|
||||
continue
|
||||
try:
|
||||
for ac in r.json().get("ac", []):
|
||||
h = ac.get("hex")
|
||||
if h and h not in seen:
|
||||
seen[h] = ac
|
||||
except Exception:
|
||||
errors += 1
|
||||
if i + 10 < len(_GRID):
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
_cache["data"] = {"ac": list(seen.values()), "total": len(seen), "errors": errors}
|
||||
_cache["ts"] = time.time()
|
||||
logger.info(f"Flights: {len(seen)} Flugzeuge ({errors} Fehler)")
|
||||
return _cache["data"]
|
||||
|
||||
|
||||
async def _collector_loop():
|
||||
"""Background-Loop: Flugdaten alle 30s vorladen."""
|
||||
await asyncio.sleep(5)
|
||||
while True:
|
||||
try:
|
||||
await _fetch_all()
|
||||
except Exception as e:
|
||||
logger.warning(f"Flight collector error: {e}")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
|
||||
def start_flight_collector():
|
||||
global _task
|
||||
if _task is None or _task.done():
|
||||
_task = asyncio.create_task(_collector_loop())
|
||||
logger.info("Flight collector gestartet")
|
||||
|
||||
|
||||
@router.get("/flights")
|
||||
async def get_flights():
|
||||
data = await _fetch_all()
|
||||
return data
|
||||
32
src/data_gdelt.py
Normale Datei
32
src/data_gdelt.py
Normale Datei
@@ -0,0 +1,32 @@
|
||||
"""GDELT GEO 2.0: Geokodierte Echtzeit-Nachrichten."""
|
||||
import logging
|
||||
import time
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
logger = logging.getLogger("globe.gdelt")
|
||||
router = APIRouter()
|
||||
|
||||
_cache: dict[str, tuple] = {}
|
||||
|
||||
|
||||
@router.get("/gdelt")
|
||||
async def get_gdelt(query: str = Query("conflict OR crisis", max_length=200)):
|
||||
key = query[:50]
|
||||
if key in _cache and time.time() - _cache[key][0] < 60:
|
||||
return _cache[key][1]
|
||||
url = f"https://api.gdeltproject.org/api/v2/geo/geo?query={query}&mode=PointData&format=GeoJSON×pan=24h&maxrows=250"
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=12) as client:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
_cache[key] = (time.time(), data)
|
||||
if len(_cache) > 30:
|
||||
oldest = min(_cache, key=lambda k: _cache[k][0])
|
||||
del _cache[oldest]
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"GDELT Fehler: {e}")
|
||||
return {"type": "FeatureCollection", "features": []}
|
||||
29
src/data_quakes.py
Normale Datei
29
src/data_quakes.py
Normale Datei
@@ -0,0 +1,29 @@
|
||||
"""Erdbeben-Daten: USGS GeoJSON API."""
|
||||
import logging
|
||||
import time
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter
|
||||
|
||||
logger = logging.getLogger("globe.quakes")
|
||||
router = APIRouter()
|
||||
|
||||
_cache: dict = {"data": None, "ts": 0}
|
||||
|
||||
|
||||
@router.get("/quakes")
|
||||
async def get_quakes():
|
||||
now = time.time()
|
||||
if _cache["data"] and now - _cache["ts"] < 300:
|
||||
return _cache["data"]
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
r = await client.get("https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson")
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
_cache["data"] = data
|
||||
_cache["ts"] = time.time()
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.warning(f"USGS Fehler: {e}")
|
||||
return _cache["data"] or {"type": "FeatureCollection", "features": []}
|
||||
89
src/data_ships.py
Normale Datei
89
src/data_ships.py
Normale Datei
@@ -0,0 +1,89 @@
|
||||
"""Schiffsverkehr-Collector: AISStream.io WebSocket (global, Echtzeit)."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import websockets
|
||||
from fastapi import APIRouter
|
||||
|
||||
logger = logging.getLogger("globe.ships")
|
||||
router = APIRouter()
|
||||
|
||||
_AISSTREAM_KEY = os.getenv("AISSTREAM_KEY", "1a56b078db829727abd4d617937bae51c6f9973e")
|
||||
_AISSTREAM_URL = "wss://stream.aisstream.io/v0/stream"
|
||||
|
||||
# {mmsi: {mmsi, lat, lon, sog, cog, heading, name, ts}}
|
||||
_store: dict[int, dict] = {}
|
||||
_connected = False
|
||||
_task = None
|
||||
|
||||
|
||||
async def _listener():
|
||||
"""Dauerhafter WebSocket-Client fuer AISStream."""
|
||||
global _connected
|
||||
while True:
|
||||
try:
|
||||
logger.info("AISStream: Verbinde...")
|
||||
async with websockets.connect(
|
||||
_AISSTREAM_URL, ping_interval=30, ping_timeout=10, close_timeout=5
|
||||
) as ws:
|
||||
sub = {
|
||||
"APIKey": _AISSTREAM_KEY,
|
||||
"BoundingBoxes": [[[-90, -180], [90, 180]]],
|
||||
"FilterMessageTypes": ["PositionReport"],
|
||||
}
|
||||
await ws.send(json.dumps(sub))
|
||||
_connected = True
|
||||
logger.info("AISStream: Verbunden")
|
||||
|
||||
async for raw in ws:
|
||||
try:
|
||||
text = raw.decode("utf-8") if isinstance(raw, bytes) else raw
|
||||
msg = json.loads(text)
|
||||
meta = msg.get("MetaData", {})
|
||||
mmsi = meta.get("MMSI")
|
||||
if not mmsi:
|
||||
continue
|
||||
pos = msg.get("Message", {}).get("PositionReport", {})
|
||||
lat = meta.get("latitude") or pos.get("Latitude")
|
||||
lon = meta.get("longitude") or pos.get("Longitude")
|
||||
if not lat or not lon or not (-90 <= lat <= 90 and -180 <= lon <= 180):
|
||||
continue
|
||||
_store[mmsi] = {
|
||||
"mmsi": mmsi,
|
||||
"lat": round(lat, 5),
|
||||
"lon": round(lon, 5),
|
||||
"sog": round(pos.get("Sog", 0), 1),
|
||||
"cog": round(pos.get("Cog", 0), 1),
|
||||
"heading": pos.get("TrueHeading", 0),
|
||||
"name": (meta.get("ShipName") or "").strip(),
|
||||
"ts": time.time(),
|
||||
}
|
||||
# Stale-Cleanup alle 1000 Updates
|
||||
if len(_store) % 1000 == 0:
|
||||
cutoff = time.time() - 900
|
||||
stale = [k for k, v in _store.items() if v["ts"] < cutoff]
|
||||
for k in stale:
|
||||
del _store[k]
|
||||
if len(_store) % 5000 == 0:
|
||||
logger.info(f"AISStream: {len(_store)} Schiffe")
|
||||
except Exception:
|
||||
continue
|
||||
except Exception as e:
|
||||
_connected = False
|
||||
logger.warning(f"AISStream Fehler: {e}. Reconnect in 10s...")
|
||||
await asyncio.sleep(10)
|
||||
|
||||
|
||||
def start_ais_collector():
|
||||
global _task
|
||||
if _task is None or _task.done():
|
||||
_task = asyncio.create_task(_listener())
|
||||
logger.info("AIS collector gestartet")
|
||||
|
||||
|
||||
@router.get("/ships")
|
||||
async def get_ships():
|
||||
return {"ships": list(_store.values()), "total": len(_store), "connected": _connected}
|
||||
49
src/main.py
Normale Datei
49
src/main.py
Normale Datei
@@ -0,0 +1,49 @@
|
||||
"""AegisSight Globe — GEOINT 3D-Globus mit Echtzeit-Datenfusion."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
# Logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
||||
handlers=[
|
||||
logging.FileHandler(Path(__file__).parent.parent / "logs" / "globe.log"),
|
||||
logging.StreamHandler(),
|
||||
],
|
||||
)
|
||||
logger = logging.getLogger("globe")
|
||||
|
||||
app = FastAPI(title="AegisSight Globe", docs_url=None, redoc_url=None)
|
||||
|
||||
# --- Data modules ---
|
||||
from data_flights import router as flights_router, start_flight_collector
|
||||
from data_ships import router as ships_router, start_ais_collector
|
||||
from data_quakes import router as quakes_router
|
||||
from data_gdelt import router as gdelt_router
|
||||
|
||||
app.include_router(flights_router, prefix="/api")
|
||||
app.include_router(ships_router, prefix="/api")
|
||||
app.include_router(quakes_router, prefix="/api")
|
||||
app.include_router(gdelt_router, prefix="/api")
|
||||
|
||||
# --- Static files ---
|
||||
static_dir = Path(__file__).parent.parent / "static"
|
||||
app.mount("/static", StaticFiles(directory=str(static_dir)), name="static")
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def index():
|
||||
return FileResponse(str(static_dir / "index.html"))
|
||||
|
||||
|
||||
# --- Startup ---
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
logger.info("AegisSight Globe gestartet")
|
||||
start_ais_collector()
|
||||
start_flight_collector()
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren