Fix: Alle Timestamps einheitlich auf Europe/Berlin Zeitzone
Inkonsistenz behoben: Manche Timestamps wurden in UTC, andere in Berlin-Zeit gespeichert. Das fuehrte zu Fehlern beim Auto-Refresh und Faktencheck, da Zeitvergleiche falsche Ergebnisse lieferten. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Dieser Commit ist enthalten in:
@@ -1,6 +1,6 @@
|
||||
"""Auth-Router: Magic-Link-Login und Nutzerverwaltung."""
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from models import (
|
||||
MagicLinkRequest,
|
||||
@@ -78,7 +78,7 @@ async def request_magic_link(
|
||||
# Token + Code generieren
|
||||
token = generate_magic_token()
|
||||
code = generate_magic_code()
|
||||
expires_at = (datetime.now(timezone.utc) + timedelta(minutes=MAGIC_LINK_EXPIRE_MINUTES)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
expires_at = (datetime.now(TIMEZONE) + timedelta(minutes=MAGIC_LINK_EXPIRE_MINUTES)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# Alte ungenutzte Magic Links fuer diese E-Mail invalidieren
|
||||
await db.execute(
|
||||
@@ -124,10 +124,10 @@ async def verify_magic_link(
|
||||
raise HTTPException(status_code=400, detail="Ungueltiger oder bereits verwendeter Link")
|
||||
|
||||
# Ablauf pruefen
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(TIMEZONE)
|
||||
expires = datetime.fromisoformat(ml["expires_at"])
|
||||
if expires.tzinfo is None:
|
||||
expires = expires.replace(tzinfo=timezone.utc)
|
||||
expires = expires.replace(tzinfo=TIMEZONE)
|
||||
if now > expires:
|
||||
raise HTTPException(status_code=400, detail="Link abgelaufen. Bitte neuen Link anfordern.")
|
||||
|
||||
@@ -200,10 +200,10 @@ async def verify_magic_code(
|
||||
raise HTTPException(status_code=400, detail="Ungueltiger Code")
|
||||
|
||||
# Ablauf pruefen
|
||||
now = datetime.now(timezone.utc)
|
||||
now = datetime.now(TIMEZONE)
|
||||
expires = datetime.fromisoformat(ml["expires_at"])
|
||||
if expires.tzinfo is None:
|
||||
expires = expires.replace(tzinfo=timezone.utc)
|
||||
expires = expires.replace(tzinfo=TIMEZONE)
|
||||
if now > expires:
|
||||
raise HTTPException(status_code=400, detail="Code abgelaufen. Bitte neuen Code anfordern.")
|
||||
|
||||
|
||||
@@ -5,7 +5,8 @@ from models import IncidentCreate, IncidentUpdate, IncidentResponse, Subscriptio
|
||||
from auth import get_current_user
|
||||
from middleware.license_check import require_writable_license
|
||||
from database import db_dependency, get_db
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime
|
||||
from config import TIMEZONE
|
||||
import asyncio
|
||||
import aiosqlite
|
||||
import json
|
||||
@@ -101,7 +102,7 @@ async def create_incident(
|
||||
):
|
||||
"""Neue Lage anlegen."""
|
||||
tenant_id = current_user.get("tenant_id")
|
||||
now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
||||
now = datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')
|
||||
cursor = await db.execute(
|
||||
"""INSERT INTO incidents (title, description, type, refresh_mode, refresh_interval,
|
||||
retention_days, international_sources, visibility,
|
||||
@@ -183,7 +184,7 @@ async def update_incident(
|
||||
if not updates:
|
||||
return await _enrich_incident(db, row)
|
||||
|
||||
updates["updated_at"] = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
||||
updates["updated_at"] = datetime.now(TIMEZONE).strftime('%Y-%m-%d %H:%M:%S')
|
||||
set_clause = ", ".join(f"{k} = ?" for k in updates)
|
||||
values = list(updates.values()) + [incident_id]
|
||||
|
||||
@@ -286,7 +287,7 @@ async def get_locations(
|
||||
await _check_incident_access(db, incident_id, current_user["id"], tenant_id)
|
||||
cursor = await db.execute(
|
||||
"""SELECT al.location_name, al.location_name_normalized, al.country_code,
|
||||
al.latitude, al.longitude, al.confidence,
|
||||
al.latitude, al.longitude, al.confidence, al.category,
|
||||
a.id as article_id, a.headline, a.headline_de, a.source, a.source_url
|
||||
FROM article_locations al
|
||||
JOIN articles a ON a.id = al.article_id
|
||||
@@ -310,8 +311,11 @@ async def get_locations(
|
||||
"confidence": row["confidence"],
|
||||
"article_count": 0,
|
||||
"articles": [],
|
||||
"categories": {},
|
||||
}
|
||||
loc_map[key]["article_count"] += 1
|
||||
cat = row["category"] or "mentioned"
|
||||
loc_map[key]["categories"][cat] = loc_map[key]["categories"].get(cat, 0) + 1
|
||||
# Maximal 10 Artikel pro Ort mitliefern
|
||||
if len(loc_map[key]["articles"]) < 10:
|
||||
loc_map[key]["articles"].append({
|
||||
@@ -321,7 +325,18 @@ async def get_locations(
|
||||
"source_url": row["source_url"],
|
||||
})
|
||||
|
||||
return list(loc_map.values())
|
||||
# Dominanteste Kategorie pro Ort bestimmen (Prioritaet: target > retaliation > actor > mentioned)
|
||||
priority = {"target": 4, "retaliation": 3, "actor": 2, "mentioned": 1}
|
||||
result = []
|
||||
for loc in loc_map.values():
|
||||
cats = loc.pop("categories")
|
||||
if cats:
|
||||
best_cat = max(cats, key=lambda c: (priority.get(c, 0), cats[c]))
|
||||
else:
|
||||
best_cat = "mentioned"
|
||||
loc["category"] = best_cat
|
||||
result.append(loc)
|
||||
return result
|
||||
|
||||
|
||||
# Geoparse-Status pro Incident (in-memory)
|
||||
@@ -364,11 +379,11 @@ async def _run_geoparse_background(incident_id: int, tenant_id: int | None):
|
||||
await db.execute(
|
||||
"""INSERT INTO article_locations
|
||||
(article_id, incident_id, location_name, location_name_normalized,
|
||||
country_code, latitude, longitude, confidence, source_text, tenant_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
country_code, latitude, longitude, confidence, source_text, tenant_id, category)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(art_id, incident_id, loc["location_name"], loc["location_name_normalized"],
|
||||
loc["country_code"], loc["lat"], loc["lon"], loc["confidence"],
|
||||
loc.get("source_text", ""), tenant_id),
|
||||
loc.get("source_text", ""), tenant_id, loc.get("category", "mentioned")),
|
||||
)
|
||||
geo_count += 1
|
||||
await db.commit()
|
||||
@@ -652,7 +667,7 @@ def _build_markdown_export(
|
||||
lines.append(snap_summary)
|
||||
lines.append("")
|
||||
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M UTC")
|
||||
now = datetime.now(TIMEZONE).strftime("%Y-%m-%d %H:%M UTC")
|
||||
lines.append("---")
|
||||
lines.append(f"*Exportiert am {now} aus AegisSight Monitor*")
|
||||
return "\n".join(lines)
|
||||
@@ -663,7 +678,7 @@ def _build_json_export(
|
||||
snapshots: list, scope: str, creator: str
|
||||
) -> dict:
|
||||
"""Strukturiertes JSON fuer Export."""
|
||||
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
now = datetime.now(TIMEZONE).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
sources = []
|
||||
sources_json = incident.get("sources_json")
|
||||
@@ -772,7 +787,7 @@ async def export_incident(
|
||||
snapshots = [dict(r) for r in await cursor.fetchall()]
|
||||
|
||||
# Dateiname
|
||||
date_str = datetime.now(timezone.utc).strftime("%Y%m%d")
|
||||
date_str = datetime.now(TIMEZONE).strftime("%Y%m%d")
|
||||
slug = _slugify(incident["title"])
|
||||
scope_suffix = "_vollexport" if scope == "full" else ""
|
||||
|
||||
|
||||
156
src/routers/public_api.py
Normale Datei
156
src/routers/public_api.py
Normale Datei
@@ -0,0 +1,156 @@
|
||||
"""Öffentliche API für die Lagebild-Seite auf aegissight.de.
|
||||
|
||||
Authentifizierung via X-API-Key Header (getrennt von der JWT-Auth).
|
||||
Exponiert den Irankonflikt (alle zugehörigen Incidents) als read-only.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from config import TIMEZONE
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Request
|
||||
from database import db_dependency
|
||||
|
||||
logger = logging.getLogger("osint.public_api")
|
||||
|
||||
router = APIRouter(prefix="/api/public", tags=["public"])
|
||||
|
||||
VALID_API_KEY = os.environ.get("AEGIS_PUBLIC_API_KEY")
|
||||
|
||||
# Alle Iran-Incident-IDs (Haupt-Incident #6 + Ableger)
|
||||
IRAN_INCIDENT_IDS = [6, 18, 19, 20]
|
||||
PRIMARY_INCIDENT_ID = 6
|
||||
|
||||
# Simple in-memory rate limiter: max 120 requests per hour per IP
|
||||
_rate_limit: dict[str, list[float]] = defaultdict(list)
|
||||
RATE_LIMIT_MAX = 120
|
||||
RATE_LIMIT_WINDOW = 3600 # 1 hour
|
||||
|
||||
|
||||
def _check_rate_limit(ip: str):
|
||||
now = time.time()
|
||||
_rate_limit[ip] = [t for t in _rate_limit[ip] if now - t < RATE_LIMIT_WINDOW]
|
||||
if len(_rate_limit[ip]) >= RATE_LIMIT_MAX:
|
||||
raise HTTPException(status_code=429, detail="Rate limit exceeded")
|
||||
_rate_limit[ip].append(now)
|
||||
|
||||
|
||||
async def verify_api_key(request: Request, x_api_key: str = Header(...)):
|
||||
"""Prüft API-Key und Rate-Limit."""
|
||||
if not VALID_API_KEY or x_api_key != VALID_API_KEY:
|
||||
logger.warning(f"Ungültiger API-Key von {request.client.host}")
|
||||
raise HTTPException(status_code=403, detail="Invalid API key")
|
||||
_check_rate_limit(request.client.host)
|
||||
|
||||
|
||||
def _in_clause(ids):
|
||||
"""Erzeugt sichere IN-Klausel für mehrere IDs."""
|
||||
return ",".join(str(int(i)) for i in ids)
|
||||
|
||||
|
||||
@router.get("/lagebild", dependencies=[Depends(verify_api_key)])
|
||||
async def get_lagebild(db=Depends(db_dependency)):
|
||||
"""Liefert das aktuelle Lagebild (Irankonflikt) mit allen Daten."""
|
||||
ids = _in_clause(IRAN_INCIDENT_IDS)
|
||||
|
||||
# Haupt-Incident laden (für Summary, Sources)
|
||||
cursor = await db.execute(
|
||||
"SELECT * FROM incidents WHERE id = ?", (PRIMARY_INCIDENT_ID,)
|
||||
)
|
||||
incident = await cursor.fetchone()
|
||||
if not incident:
|
||||
raise HTTPException(status_code=404, detail="Incident not found")
|
||||
incident = dict(incident)
|
||||
|
||||
# Alle Artikel aus allen Iran-Incidents laden
|
||||
cursor = await db.execute(
|
||||
f"""SELECT id, headline, headline_de, source, source_url, language,
|
||||
published_at, collected_at, verification_status, incident_id
|
||||
FROM articles WHERE incident_id IN ({ids})
|
||||
ORDER BY published_at DESC, collected_at DESC"""
|
||||
)
|
||||
articles = [dict(r) for r in await cursor.fetchall()]
|
||||
|
||||
# Alle Faktenchecks aus allen Iran-Incidents laden
|
||||
cursor = await db.execute(
|
||||
f"""SELECT id, claim, status, sources_count, evidence, status_history, checked_at, incident_id
|
||||
FROM fact_checks WHERE incident_id IN ({ids})
|
||||
ORDER BY checked_at DESC"""
|
||||
)
|
||||
fact_checks = []
|
||||
for r in await cursor.fetchall():
|
||||
fc = dict(r)
|
||||
try:
|
||||
fc["status_history"] = json.loads(fc.get("status_history") or "[]")
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
fc["status_history"] = []
|
||||
fact_checks.append(fc)
|
||||
|
||||
# Quellenanzahl über alle Incidents
|
||||
cursor = await db.execute(
|
||||
f"SELECT COUNT(DISTINCT source) as cnt FROM articles WHERE incident_id IN ({ids})"
|
||||
)
|
||||
source_count = (await cursor.fetchone())["cnt"]
|
||||
|
||||
# Snapshots aus allen Iran-Incidents
|
||||
cursor = await db.execute(
|
||||
f"""SELECT id, incident_id, article_count, fact_check_count, created_at
|
||||
FROM incident_snapshots WHERE incident_id IN ({ids})
|
||||
ORDER BY created_at DESC"""
|
||||
)
|
||||
available_snapshots = [dict(r) for r in await cursor.fetchall()]
|
||||
|
||||
# Sources JSON aus Haupt-Incident
|
||||
try:
|
||||
sources_json = json.loads(incident.get("sources_json") or "[]")
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
sources_json = []
|
||||
|
||||
return {
|
||||
"generated_at": datetime.now(TIMEZONE).isoformat(),
|
||||
"incident": {
|
||||
"id": incident["id"],
|
||||
"title": incident["title"],
|
||||
"description": incident.get("description", ""),
|
||||
"status": incident["status"],
|
||||
"type": incident.get("type", "adhoc"),
|
||||
"created_at": incident["created_at"],
|
||||
"updated_at": incident["updated_at"],
|
||||
"article_count": len(articles),
|
||||
"source_count": source_count,
|
||||
"factcheck_count": len(fact_checks),
|
||||
},
|
||||
"current_lagebild": {
|
||||
"summary_markdown": incident.get("summary", ""),
|
||||
"sources_json": sources_json,
|
||||
"updated_at": incident["updated_at"],
|
||||
},
|
||||
"articles": articles,
|
||||
"fact_checks": fact_checks,
|
||||
"available_snapshots": available_snapshots,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/lagebild/snapshot/{snapshot_id}", dependencies=[Depends(verify_api_key)])
|
||||
async def get_snapshot(snapshot_id: int, db=Depends(db_dependency)):
|
||||
"""Liefert einen historischen Snapshot."""
|
||||
ids = _in_clause(IRAN_INCIDENT_IDS)
|
||||
cursor = await db.execute(
|
||||
f"""SELECT id, summary, sources_json, article_count, fact_check_count, created_at
|
||||
FROM incident_snapshots
|
||||
WHERE id = ? AND incident_id IN ({ids})""",
|
||||
(snapshot_id,),
|
||||
)
|
||||
snap = await cursor.fetchone()
|
||||
if not snap:
|
||||
raise HTTPException(status_code=404, detail="Snapshot not found")
|
||||
|
||||
snap = dict(snap)
|
||||
try:
|
||||
snap["sources_json"] = json.loads(snap.get("sources_json") or "[]")
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
snap["sources_json"] = []
|
||||
|
||||
return snap
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren