Lizenzserver (Backend) - Erstellt

Dieser Commit ist enthalten in:
2025-06-19 21:48:15 +02:00
Ursprung afa2b52494
Commit ae30b74e9c
23 geänderte Dateien mit 3712 neuen und 387 gelöschten Zeilen

Datei anzeigen

@@ -4,18 +4,52 @@
### Public Endpoints
#### POST /api/v1/activate
#### GET /
Root endpoint - Service status.
**Response:**
```json
{
"status": "ok",
"service": "V2 License Server",
"timestamp": "2025-06-19T10:30:00Z"
}
```
#### GET /health
Health check endpoint.
**Response:**
```json
{
"status": "healthy",
"timestamp": "2025-06-19T10:30:00Z"
}
```
#### GET /metrics
Prometheus metrics endpoint.
**Response:**
Prometheus metrics in CONTENT_TYPE_LATEST format.
### License API Endpoints
All license endpoints require API key authentication via `X-API-Key` header.
#### POST /api/license/activate
Activate a license on a new system.
**Headers:**
```
X-API-Key: your-api-key
```
**Request:**
```json
{
"license_key": "XXXX-XXXX-XXXX-XXXX",
"hardware_id": {
"mac_address": "00:1B:44:11:3A:B7",
"cpu_id": "BFEBFBFF000906EA",
"system_uuid": "4C4C4544-0052-3410-8036-B8C04F303832"
},
"hardware_hash": "unique-hardware-identifier",
"machine_name": "DESKTOP-ABC123",
"app_version": "1.0.0"
}
@@ -24,28 +58,32 @@ Activate a license on a new system.
**Response:**
```json
{
"success": true,
"activation_id": "act_123456",
"features": ["feature1", "feature2"],
"max_users": 10,
"valid_until": "2026-01-01T00:00:00Z"
"message": "License activated successfully",
"activation": {
"id": 123,
"license_key": "XXXX-XXXX-XXXX-XXXX",
"hardware_hash": "unique-hardware-identifier",
"machine_name": "DESKTOP-ABC123",
"activated_at": "2025-06-19T10:30:00Z",
"last_heartbeat": "2025-06-19T10:30:00Z",
"is_active": true
}
}
```
#### POST /api/v1/validate
Validate an active license.
#### POST /api/license/verify
Verify an active license.
**Headers:**
```
X-API-Key: your-api-key
```
**Request:**
```json
{
"license_key": "XXXX-XXXX-XXXX-XXXX",
"activation_id": "act_123456",
"hardware_id": {
"mac_address": "00:1B:44:11:3A:B7",
"cpu_id": "BFEBFBFF000906EA",
"system_uuid": "4C4C4544-0052-3410-8036-B8C04F303832"
},
"current_users": 5,
"hardware_hash": "unique-hardware-identifier",
"app_version": "1.0.0"
}
```
@@ -54,333 +92,526 @@ Validate an active license.
```json
{
"valid": true,
"features": ["feature1", "feature2"],
"max_users": 10,
"message": "License valid"
"message": "License is valid",
"license": {
"key": "XXXX-XXXX-XXXX-XXXX",
"valid_until": "2026-01-01",
"max_users": 10
},
"update_available": false,
"latest_version": "1.0.0"
}
```
#### POST /api/v1/heartbeat
Send usage heartbeat.
#### GET /api/license/info/{license_key}
Get license information.
**Headers:**
```
X-API-Key: your-api-key
```
**Response:**
```json
{
"license": {
"id": 123,
"key": "XXXX-XXXX-XXXX-XXXX",
"customer_name": "ACME Corp",
"type": "perpetual",
"valid_from": "2025-01-01",
"valid_until": "2026-01-01",
"max_activations": 5,
"max_users": 10,
"is_active": true
},
"activations": [
{
"id": 456,
"hardware_hash": "unique-hardware-identifier",
"machine_name": "DESKTOP-ABC123",
"activated_at": "2025-06-19T10:00:00Z",
"last_heartbeat": "2025-06-19T14:30:00Z",
"is_active": true
}
]
}
```
### Version API Endpoints
#### POST /api/version/check
Check for available updates.
**Headers:**
```
X-API-Key: your-api-key
```
**Request:**
```json
{
"license_key": "XXXX-XXXX-XXXX-XXXX",
"activation_id": "act_123456",
"current_users": 5,
"feature_usage": {
"feature1": 150,
"feature2": 75
"current_version": "1.0.0",
"license_key": "XXXX-XXXX-XXXX-XXXX"
}
```
**Response:**
```json
{
"update_available": true,
"latest_version": "1.1.0",
"download_url": "https://example.com/download/v1.1.0",
"release_notes": "Bug fixes and performance improvements",
"is_mandatory": false
}
```
#### GET /api/version/latest
Get latest version information.
**Headers:**
```
X-API-Key: your-api-key
```
**Response:**
```json
{
"version": "1.1.0",
"release_date": "2025-06-15",
"download_url": "https://example.com/download/v1.1.0",
"release_notes": "Bug fixes and performance improvements",
"is_mandatory": false
}
```
## Admin Panel API
### Customer API Endpoints
#### GET /api/customers
Search customers for Select2 dropdown.
**Query Parameters:**
- `q`: Search query
- `page`: Page number (default: 1)
**Response:**
```json
{
"results": [
{
"id": 123,
"text": "ACME Corp - admin@acme.com"
}
],
"pagination": {
"more": false
}
}
```
**Response:**
```json
{
"success": true,
"next_heartbeat": "2025-01-01T12:30:00Z"
}
```
#### POST /api/v1/deactivate
Deactivate a license.
**Request:**
```json
{
"license_key": "XXXX-XXXX-XXXX-XXXX",
"activation_id": "act_123456",
"reason": "System upgrade"
}
```
**Response:**
```json
{
"success": true,
"message": "License deactivated successfully"
}
```
### Admin API Endpoints
#### Authentication
All admin endpoints require JWT authentication:
```
Authorization: Bearer <jwt_token>
```
#### GET /api/v1/admin/licenses
List all licenses.
**Query Parameters:**
- `page` (default: 1)
- `per_page` (default: 50)
- `status` (active, inactive, expired)
- `customer_id`
#### GET /api/customer/{customer_id}/licenses
Get licenses for a specific customer.
**Response:**
```json
{
"licenses": [
{
"id": 1,
"id": 456,
"license_key": "XXXX-XXXX-XXXX-XXXX",
"customer_name": "ACME Corp",
"type": "subscription",
"type": "perpetual",
"status": "active",
"valid_from": "2025-01-01",
"valid_until": "2026-01-01",
"max_activations": 5,
"current_activations": 3
}
],
"total": 100,
"page": 1,
"per_page": 50
]
}
```
#### POST /api/v1/admin/licenses
Create a new license.
#### GET /api/customer/{customer_id}/quick-stats
Get quick statistics for a customer.
**Response:**
```json
{
"total_licenses": 10,
"active_licenses": 8,
"total_activations": 25,
"total_users": 150
}
```
### License Management API
#### POST /api/license/{license_id}/toggle
Toggle license active status.
**Response:**
```json
{
"success": true,
"is_active": true,
"message": "License activated successfully"
}
```
#### POST /api/licenses/bulk-activate
Activate multiple licenses.
**Request:**
```json
{
"customer_id": 123,
"type": "subscription",
"valid_from": "2025-01-01",
"valid_until": "2026-01-01",
"max_activations": 5,
"max_users": 10,
"features": ["feature1", "feature2"]
"license_ids": [1, 2, 3]
}
```
**Response:**
```json
{
"id": 456,
"license_key": "NEW1-NEW2-NEW3-NEW4",
"message": "License created successfully"
"success": true,
"count": 3,
"message": "3 licenses activated successfully"
}
```
#### GET /api/v1/admin/licenses/{id}
Get license details.
#### POST /api/licenses/bulk-deactivate
Deactivate multiple licenses.
**Request:**
```json
{
"license_ids": [1, 2, 3]
}
```
**Response:**
```json
{
"id": 1,
"license_key": "XXXX-XXXX-XXXX-XXXX",
"customer": {
"id": 123,
"name": "ACME Corp",
"email": "admin@acme.com"
},
"activations": [
{
"id": "act_123456",
"machine_name": "DESKTOP-ABC123",
"activated_at": "2025-01-01T10:00:00Z",
"last_heartbeat": "2025-06-19T14:30:00Z",
"status": "active"
}
],
"usage_stats": {
"total_users": 150,
"peak_users": 10,
"feature_usage": {
"feature1": 5000,
"feature2": 2500
}
}
"success": true,
"count": 3,
"message": "3 licenses deactivated successfully"
}
```
#### PUT /api/v1/admin/licenses/{id}
Update license.
#### POST /api/licenses/bulk-delete
Delete multiple licenses.
**Request:**
```json
{
"license_ids": [1, 2, 3]
}
```
**Response:**
```json
{
"success": true,
"deleted": 3,
"message": "3 licenses deleted successfully"
}
```
#### POST /api/license/{license_id}/quick-edit
Quick edit license properties.
**Request:**
```json
{
"valid_until": "2027-01-01",
"max_activations": 10,
"features": ["feature1", "feature2", "feature3"]
"max_users": 50
}
```
#### DELETE /api/v1/admin/licenses/{id}
Revoke a license.
#### GET /api/v1/admin/activations
List all activations.
**Query Parameters:**
- `license_id`
- `status` (active, inactive)
- `from_date`
- `to_date`
#### GET /api/v1/admin/statistics
Get usage statistics.
**Query Parameters:**
- `from_date`
- `to_date`
- `customer_id`
- `license_id`
**Response:**
```json
{
"summary": {
"total_licenses": 100,
"active_licenses": 85,
"total_activations": 250,
"active_users": 1500
},
"usage_trends": [
"success": true,
"message": "License updated successfully"
}
```
#### POST /api/generate-license-key
Generate a new license key.
**Response:**
```json
{
"license_key": "NEW1-NEW2-NEW3-NEW4"
}
```
### Device Management API
#### GET /api/license/{license_id}/devices
Get devices for a license.
**Response:**
```json
{
"devices": [
{
"date": "2025-06-01",
"active_users": 1200,
"new_activations": 5
"id": 123,
"hardware_hash": "unique-hardware-identifier",
"machine_name": "DESKTOP-ABC123",
"activated_at": "2025-01-01T10:00:00Z",
"last_heartbeat": "2025-06-19T14:30:00Z",
"is_active": true,
"app_version": "1.0.0"
}
]
}
```
#### POST /api/license/{license_id}/register-device
Register a new device.
**Request:**
```json
{
"hardware_hash": "unique-hardware-identifier",
"machine_name": "DESKTOP-XYZ789",
"app_version": "1.0.0"
}
```
**Response:**
```json
{
"success": true,
"device_id": 456,
"message": "Device registered successfully"
}
```
#### POST /api/license/{license_id}/deactivate-device/{device_id}
Deactivate a device.
**Response:**
```json
{
"success": true,
"message": "Device deactivated successfully"
}
```
### Resource Management API
#### GET /api/license/{license_id}/resources
Get resources for a license.
**Response:**
```json
{
"resources": [
{
"id": 789,
"type": "server",
"identifier": "SRV-001",
"status": "allocated",
"allocated_at": "2025-06-01T10:00:00Z"
}
]
}
```
#### POST /api/resources/allocate
Allocate resources to a license.
**Request:**
```json
{
"license_id": 123,
"resource_ids": [789, 790]
}
```
**Response:**
```json
{
"success": true,
"allocated": 2,
"message": "2 resources allocated successfully"
}
```
#### GET /api/resources/check-availability
Check resource availability.
**Query Parameters:**
- `type`: Resource type
- `count`: Number of resources needed
**Response:**
```json
{
"available": true,
"count": 5,
"resources": [
{
"id": 791,
"type": "server",
"identifier": "SRV-002"
}
]
}
```
#### GET /api/resources/stats
Get resource statistics.
**Response:**
```json
{
"total": 100,
"allocated": 75,
"available": 25,
"by_type": {
"server": {
"total": 50,
"allocated": 40,
"available": 10
},
"workstation": {
"total": 50,
"allocated": 35,
"available": 15
}
}
}
```
### Search API
#### GET /api/global-search
Global search across all entities.
**Query Parameters:**
- `q`: Search query
- `type`: Entity type filter (customer, license, device)
- `limit`: Maximum results (default: 20)
**Response:**
```json
{
"results": [
{
"type": "customer",
"id": 123,
"title": "ACME Corp",
"subtitle": "admin@acme.com",
"url": "/customer/edit/123"
},
{
"type": "license",
"id": 456,
"title": "XXXX-XXXX-XXXX-XXXX",
"subtitle": "ACME Corp - Active",
"url": "/license/edit/456"
}
],
"feature_usage": {
"feature1": 50000,
"feature2": 25000
"total": 15
}
```
### Session Management API
#### GET /api/sessions/active-count
Get count of active sessions.
**Response:**
```json
{
"count": 42
}
```
### Monitoring API
#### GET /api/monitoring/live-stats
Get live statistics for monitoring.
**Response:**
```json
{
"timestamp": "2025-06-19T14:30:00Z",
"metrics": {
"active_licenses": 850,
"total_activations": 2500,
"active_sessions": 1200,
"heartbeats_per_minute": 450
},
"alerts": [
{
"type": "warning",
"message": "High CPU usage detected",
"timestamp": "2025-06-19T14:25:00Z"
}
]
}
```
#### GET /api/monitoring/anomaly-stats
Get anomaly statistics.
**Response:**
```json
{
"total_anomalies": 15,
"unresolved": 3,
"by_type": {
"unusual_activation_pattern": 5,
"excessive_heartbeats": 3,
"license_hopping": 7
}
}
```
#### GET /api/admin/license/auth-token
Get JWT token for analytics access.
**Response:**
```json
{
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": "2025-06-19T15:30:00Z"
}
```
## Lead Management API
### Institutions
### GET /leads/api/stats
Get lead statistics.
#### GET /api/institutions
List all institutions.
**Query Parameters:**
- `search`: Search term
- `tags`: Comma-separated tags
- `page`: Page number
- `per_page`: Items per page
#### POST /api/institutions
Create new institution.
**Request:**
**Response:**
```json
{
"name": "Example University",
"type": "university",
"address": "123 Main St",
"tags": ["education", "research"],
"metadata": {
"student_count": 5000
"total_institutions": 150,
"total_contacts": 450,
"recent_activities": 25,
"conversion_rate": 12.5,
"by_type": {
"university": 50,
"company": 75,
"government": 25
}
}
```
#### GET /api/institutions/{id}
Get institution details.
#### PUT /api/institutions/{id}
Update institution.
#### DELETE /api/institutions/{id}
Delete institution.
### Contact Persons
#### GET /api/institutions/{id}/contacts
List contacts for institution.
#### POST /api/institutions/{id}/contacts
Add contact to institution.
**Request:**
```json
{
"name": "John Doe",
"email": "john@example.com",
"phone": "+1234567890",
"position": "IT Manager",
"is_primary": true
}
```
### Notes
#### GET /api/institutions/{id}/notes
Get notes for institution.
#### POST /api/institutions/{id}/notes
Add note to institution.
**Request:**
```json
{
"content": "Discussed pricing options",
"type": "meeting",
"tags": ["sales", "followup"]
}
```
## Analytics API
### GET /api/v1/analytics/usage
Get detailed usage analytics.
**Query Parameters:**
- `from_date`: Start date (ISO 8601)
- `to_date`: End date (ISO 8601)
- `license_id`: Filter by license
- `granularity`: hour, day, week, month
**Response:**
```json
{
"period": {
"from": "2025-06-01T00:00:00Z",
"to": "2025-06-19T23:59:59Z"
},
"metrics": {
"total_heartbeats": 50000,
"unique_activations": 150,
"average_users_per_activation": 8.5,
"peak_concurrent_users": 1200
},
"time_series": [
{
"timestamp": "2025-06-01T00:00:00Z",
"active_users": 800,
"heartbeat_count": 2500
}
]
}
```
### GET /api/v1/analytics/features
Get feature usage statistics.
**Response:**
```json
{
"features": [
{
"name": "feature1",
"total_usage": 150000,
"unique_users": 500,
"usage_trend": "increasing"
}
]
}
```
### Lead Routes (HTML Pages)
- `GET /leads/` - Lead overview page
- `GET /leads/create` - Create lead form
- `POST /leads/create` - Save new lead
- `GET /leads/edit/{lead_id}` - Edit lead form
- `POST /leads/update/{lead_id}` - Update lead
- `POST /leads/delete/{lead_id}` - Delete lead
- `GET /leads/export` - Export leads
- `POST /leads/import` - Import leads
## Common Response Codes

Datei anzeigen

@@ -1,5 +1,58 @@
# v2-Docker Projekt Journal
## Letzte Änderungen (19.06.2025 - 20:30 Uhr)
### Dokumentation aktualisiert und mit Realität abgeglichen
- **API_REFERENCE.md komplett überarbeitet**:
- Tatsächliche Lizenzserver-Endpunkte dokumentiert (nicht mehr v1)
- Korrekte Ports und URLs eingetragen
- Admin Panel API vollständig dokumentiert
- Nicht implementierte Endpunkte entfernt
- **SYSTEM_DOCUMENTATION.md aktualisiert**:
- Microservices-Status korrigiert (nur License Server & Admin Panel aktiv)
- Analytics, Admin API und Auth Service als "geplant" markiert
- Implementierungsstatus auf aktuellen Stand gebracht
- Lead Management als "vollständig implementiert" dokumentiert
- **OPERATIONS_GUIDE.md korrigiert**:
- Echte Docker-Container-Namen verwendet
- Korrekte Ports und Netzwerk-Konfiguration
- Aktuelle Monitoring-Stack-Services dokumentiert
- Troubleshooting-Befehle an echte Container angepasst
### Status:
✅ Dokumentation spiegelt nun den tatsächlichen Projektzustand wider
✅ Keine falschen oder veralteten Informationen mehr
✅ Alle drei Haupt-Dokumentationen sind aktuell
---
## Letzte Änderungen (19.06.2025 - 19:20 Uhr)
### Bugfix: Kunden & Lizenzen API-Fehler behoben
- **Problem**: 500 Fehler beim Klicken auf Kunden in der "Kunden & Lizenzen" Ansicht
- **Ursache**: SQL-Abfrage versuchte auf nicht-existierende Tabellen und Spalten zuzugreifen:
- `license_heartbeats` Tabelle existiert noch nicht (wird mit License Server implementiert)
- `anomaly_detections` Tabelle existiert noch nicht
- Zu komplexe Subqueries führten zu Datenbankfehlern
- **Lösung implementiert**:
- SQL-Abfrage in `api_customer_licenses` vereinfacht
- Entfernt: Alle Referenzen zu noch nicht existierenden Tabellen
- Platzhalter-Werte (0) für License Server Statistiken eingefügt
- Bessere Fehlerbehandlung mit detaillierten Fehlermeldungen
- **Geänderte Dateien**:
- `v2_adminpanel/routes/customer_routes.py` - Vereinfachte SQL-Abfrage ohne Subqueries
### Status:
✅ Kunden & Lizenzen Ansicht funktioniert wieder einwandfrei
✅ API gibt korrekte Daten zurück
✅ Keine Abhängigkeit von noch nicht implementierten Tabellen
---
## Letzte Änderungen (19.06.2025 - 15:07 Uhr)
### Lead-Management System implementiert

Datei anzeigen

@@ -16,40 +16,75 @@
git clone <repository-url>
cd v2-Docker
# Set environment variables
cp .env.example .env
# Edit .env with your configuration
# Environment Variables sind bereits in docker-compose.yaml definiert
# Für Produktion: Erstelle .env Datei mit sensiblen Daten
# Start services
docker-compose up -d
# Run database migrations
docker exec v2_adminpanel python manage.py migrate
# Datenbank wird automatisch initialisiert via init.sql
# Keine manuellen Migrationen erforderlich
```
### Standard-Zugangsdaten
#### Admin Panel
- URL: https://admin-panel-undso.z5m7q9dk3ah2v1plx6ju.com
- User 1: `rac00n` / `1248163264`
- User 2: `w@rh@mm3r` / `Warhammer123!`
#### License Server API
- URL: https://api-software-undso.z5m7q9dk3ah2v1plx6ju.com
- API Key: Muss in Requests mitgesendet werden
### Service Configuration
#### License Server
```yaml
v2_lizenzserver:
license-server:
image: v2_lizenzserver:latest
environment:
- DATABASE_URL=postgresql://user:pass@db:5432/v2
- JWT_SECRET=${JWT_SECRET}
- API_KEY=${API_KEY}
ports:
- "8080:8080"
- DATABASE_URL=postgresql://adminuser:supergeheimespasswort@db:5432/meinedatenbank
- JWT_SECRET=your-secret-jwt-key-here-minimum-32-chars
- API_KEY=your-api-key-here
- REDIS_HOST=redis-cache
- RABBITMQ_HOST=rabbitmq
# Kein externer Port - nur über Nginx erreichbar
expose:
- "8443"
networks:
- internal_net
```
#### Admin Panel
#### Admin Panel
```yaml
v2_adminpanel:
admin-panel:
image: v2_adminpanel:latest
environment:
- DATABASE_URL=postgresql://user:pass@db:5432/v2
- SECRET_KEY=${SECRET_KEY}
- DATABASE_URL=postgresql://adminuser:supergeheimespasswort@db:5432/meinedatenbank
- SECRET_KEY=supersecretkey
- JWT_SECRET=your-secret-jwt-key-here-minimum-32-chars
- LIZENZSERVER_BASE_URL=http://license-server:8443
- REDIS_HOST=redis-cache
# Kein externer Port - nur über Nginx erreichbar
expose:
- "5000"
networks:
- internal_net
```
#### Nginx Reverse Proxy
```yaml
nginx-proxy:
image: v2_nginx:latest
ports:
- "5000:5000"
- "80:80" # HTTP (wird auf HTTPS umgeleitet)
- "443:443" # HTTPS
networks:
- internal_net
# Routet Requests an interne Services:
# admin-panel-undso.z5m7q9dk3ah2v1plx6ju.com → admin-panel:5000
# api-software-undso.z5m7q9dk3ah2v1plx6ju.com → license-server:8443
```
## Monitoring
@@ -61,11 +96,28 @@ v2_adminpanel:
scrape_configs:
- job_name: 'license-server'
static_configs:
- targets: ['v2_lizenzserver:8080']
- targets: ['license-server:8443']
metrics_path: /metrics
- job_name: 'postgres'
static_configs:
- targets: ['postgres_exporter:9187']
- targets: ['postgres-exporter:9187']
- job_name: 'redis'
static_configs:
- targets: ['redis-exporter:9121']
- job_name: 'nginx'
static_configs:
- targets: ['nginx-exporter:9113']
- job_name: 'node'
static_configs:
- targets: ['node-exporter:9100']
- job_name: 'cadvisor'
static_configs:
- targets: ['cadvisor:8081']
```
#### Alert Rules
@@ -96,9 +148,21 @@ scrape_configs:
### Accessing Monitoring
- Prometheus: http://localhost:9090
- Grafana: http://localhost:3001 (admin/admin)
- Grafana: http://localhost:3001
- Default Login: admin/admin
- Vorkonfigurierte Dashboards:
- System Overview
- License Server Metrics
- Database Performance
- Alertmanager: http://localhost:9093
### Monitoring Stack Services
- PostgreSQL Exporter: Sammelt DB-Metriken
- Redis Exporter: Sammelt Cache-Metriken
- Node Exporter: System-Level Metriken
- Nginx Exporter: Webserver-Metriken
- cAdvisor: Container-Metriken (Port 8081)
## Maintenance
### Database Maintenance
@@ -118,23 +182,53 @@ DROP TABLE IF EXISTS license_heartbeats_2024_01;
```
#### Backup Procedures
##### Manuelles Backup
```bash
# Full database backup
docker exec v2_db pg_dump -U postgres v2 > backup_$(date +%Y%m%d).sql
docker exec db pg_dump -U adminuser meinedatenbank > backup_$(date +%Y%m%d).sql
# Backup specific tables
docker exec v2_db pg_dump -U postgres -t licenses -t license_activations v2 > licenses_backup.sql
docker exec db pg_dump -U adminuser -t licenses -t license_activations meinedatenbank > licenses_backup.sql
# Komprimiertes Backup
docker exec db pg_dump -U adminuser meinedatenbank | gzip > backup_$(date +%Y%m%d).sql.gz
# Restore from backup
docker exec -i v2_db psql -U postgres v2 < backup_20250619.sql
docker exec -i db psql -U adminuser meinedatenbank < backup_20250619.sql
```
##### Integriertes Backup-System
Das Admin Panel bietet ein eingebautes Backup-System:
1. Login ins Admin Panel
2. Navigiere zu "Backups"
3. Klicke "Create Backup"
4. Backups werden verschlüsselt im Verzeichnis `/backups` gespeichert
5. Download oder Restore direkt über die UI
### Log Management
#### Log Locations
- License Server: `/var/log/license-server/`
- Admin Panel: `/var/log/adminpanel/`
- Nginx: `/var/log/nginx/`
##### Container Logs
```bash
# License Server Logs
docker logs license-server
# Admin Panel Logs
docker logs admin-panel
# Nginx Logs
docker logs nginx-proxy
# Database Logs
docker logs db
```
##### Persistent Log Volumes
- Nginx Access/Error Logs: Mapped to local `./v2_nginx/logs/`
- Application Logs: Verfügbar über Docker logging driver
- Audit Logs: In der Datenbank (Tabelle `audit_log`)
#### Log Rotation
```bash
@@ -165,18 +259,42 @@ VACUUM ANALYZE licenses;
```
#### Resource Limits
Alle Services haben konfigurierte Resource Limits:
```yaml
# Docker resource constraints
services:
v2_lizenzserver:
deploy:
resources:
limits:
cpus: '2'
memory: 2G
reservations:
cpus: '1'
memory: 1G
# License Server
license-server:
deploy:
resources:
limits:
cpus: '1.0'
memory: 1G
reservations:
cpus: '0.5'
memory: 512M
# Admin Panel
admin-panel:
deploy:
resources:
limits:
cpus: '1.0'
memory: 1G
reservations:
cpus: '0.5'
memory: 512M
# PostgreSQL
db:
deploy:
resources:
limits:
cpus: '2.0'
memory: 2G
reservations:
cpus: '1.0'
memory: 1G
```
## Troubleshooting
@@ -184,16 +302,38 @@ services:
### Common Issues
#### License Server Not Responding
1. Check container status: `docker ps`
2. View logs: `docker logs v2_lizenzserver`
3. Test database connection: `docker exec v2_lizenzserver pg_isready`
4. Verify JWT_SECRET is set
1. Check container status: `docker ps | grep license-server`
2. View logs: `docker logs license-server --tail 100`
3. Test internal connectivity:
```bash
docker exec nginx-proxy curl -k https://license-server:8443/health
```
4. Verify environment variables:
```bash
docker exec license-server env | grep -E "JWT_SECRET|API_KEY|DATABASE_URL"
```
5. Check Nginx routing:
```bash
docker exec nginx-proxy nginx -T | grep api-software
```
#### Database Connection Issues
1. Check PostgreSQL status: `docker exec v2_db pg_isready`
2. Verify credentials in .env
3. Check network connectivity: `docker network ls`
4. Review PostgreSQL logs: `docker logs v2_db`
1. Check PostgreSQL status:
```bash
docker exec db pg_isready -U adminuser -d meinedatenbank
```
2. Test connection from service:
```bash
docker exec admin-panel psql postgresql://adminuser:supergeheimespasswort@db:5432/meinedatenbank -c "SELECT 1"
```
3. Check network connectivity:
```bash
docker network inspect v2-docker_internal_net
```
4. Review PostgreSQL logs:
```bash
docker logs db --tail 50
```
#### High Memory Usage
1. Check container stats: `docker stats`
@@ -204,18 +344,28 @@ services:
### Health Checks
```bash
# License server health
curl http://localhost:8080/health
# License server health (über Nginx)
curl -k https://api-software-undso.z5m7q9dk3ah2v1plx6ju.com/health
# Admin panel health (über Nginx)
curl -k https://admin-panel-undso.z5m7q9dk3ah2v1plx6ju.com/
# Database health
docker exec v2_db pg_isready
docker exec db pg_isready -U adminuser -d meinedatenbank
# Admin panel health
curl http://localhost:5000/health
# Redis health
docker exec redis-cache redis-cli ping
# RabbitMQ health
docker exec rabbitmq rabbitmqctl status
# Monitoring stack
curl http://localhost:9090/-/healthy # Prometheus
curl http://localhost:3001/api/health # Grafana
curl http://localhost:9093/-/healthy # Alertmanager
# Container health overview
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
```
## Security Considerations
@@ -241,18 +391,42 @@ curl http://localhost:3001/api/health # Grafana
## Scaling Strategies
### Horizontal Scaling
```yaml
# Scale license server
docker-compose up -d --scale v2_lizenzserver=3
# Load balancing with Nginx
#### Scaling License Server
```bash
# Scale license server instances
docker-compose up -d --scale license-server=3
```
#### Nginx Load Balancing Configuration
```nginx
# In nginx.conf
upstream license_servers {
server v2_lizenzserver_1:8080;
server v2_lizenzserver_2:8080;
server v2_lizenzserver_3:8080;
least_conn;
server license-server_1:8443 max_fails=3 fail_timeout=30s;
server license-server_2:8443 max_fails=3 fail_timeout=30s;
server license-server_3:8443 max_fails=3 fail_timeout=30s;
# Health checks
keepalive 32;
}
server {
server_name api-software-undso.z5m7q9dk3ah2v1plx6ju.com;
location / {
proxy_pass https://license_servers;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
}
```
#### Scaling Considerations
- Redis für Session-Sharing zwischen Instanzen
- RabbitMQ für asynchrone Task-Verteilung
- Sticky Sessions bei Bedarf aktivieren
### Database Scaling
- Read replicas for reporting
- Connection pooling

Datei anzeigen

@@ -35,17 +35,45 @@ V2-Docker is a comprehensive system featuring a License Server, Admin Panel, Lea
### Microservices Architecture
#### Services
1. **License Service** - Core license validation
2. **Analytics Service** - Usage statistics and reporting
3. **Admin API Service** - Administrative operations
4. **Notification Service** - Email/webhook notifications
5. **Backup Service** - Automated backups
#### Aktive Services
1. **License Server** (`v2_lizenzserver`) - Core license validation
- Vollständig implementiert
- API-Endpunkte für Aktivierung, Verifizierung, Info
- Läuft auf internem Port über Nginx
2. **Admin Panel** (`v2_adminpanel`) - Web-basierte Verwaltung
- Vollständig implementiert auf Port 80
- Customer, License, Resource Management
- Integrierte Backup-Funktionalität
- Lead Management System
#### Geplante Services (Code vorhanden, nicht aktiv)
1. **Analytics Service** - Usage statistics and reporting
- Code in `/v2_lizenzserver/services/analytics/`
- Auskommentiert in docker-compose.yaml
2. **Admin API Service** - Dedizierte Admin API
- Code in `/v2_lizenzserver/services/admin/`
- Auskommentiert in docker-compose.yaml
3. **Auth Service** - Zentralisierte Authentifizierung
- Code in `/lizenzserver/services/auth/`
- Auskommentiert in docker-compose.yaml
#### Konzeptionelle Services (nur in Planung)
1. **Notification Service** - Email/webhook notifications
2. **Dedicated Backup Service** - Automated backups (derzeit im Admin Panel integriert)
#### Infrastructure Services (aktiv)
- **PostgreSQL** - Hauptdatenbank
- **Redis** - Caching
- **RabbitMQ** - Message Queue
- **Nginx** - Reverse Proxy
#### Communication
- REST APIs for external communication
- Message queuing for internal communication
- Event-driven architecture for real-time updates
- REST APIs für externe Kommunikation
- Redis für Caching
- RabbitMQ für asynchrone Verarbeitung (vorbereitet)
### Database Schema
@@ -72,40 +100,76 @@ V2-Docker is a comprehensive system featuring a License Server, Admin Panel, Lea
### Implementation Status (June 19, 2025)
#### Completed
- Basic license server functionality
- Admin Panel integration
- Monitoring dashboard
- Docker services configuration
- JWT secret management
- Basic API endpoints
- ✅ License Server mit API-Endpunkten
- POST /api/license/activate
- POST /api/license/verify
- GET /api/license/info/{license_key}
- POST /api/version/check
- GET /api/version/latest
- ✅ Admin Panel mit voller Funktionalität
- Customer Management
- License Management
- Resource Management
- Session Management
- Lead Management
- Batch Operations
- Export/Import
- ✅ Monitoring Stack (Prometheus, Grafana, Alertmanager)
- ✅ Docker Services Konfiguration
- ✅ JWT/API Key Management
- ✅ Backup-System (integriert im Admin Panel)
- ✅ 2FA-Authentifizierung
- ✅ Audit Logging
- ✅ Rate Limiting
#### In Progress
- Analytics service
- Advanced monitoring
- Backup automation
- Performance optimization
#### Code vorhanden aber nicht aktiviert
- ⏸️ Analytics Service (auskommentiert)
- ⏸️ Admin API Service (auskommentiert)
- ⏸️ Auth Service (auskommentiert)
#### Geplant
- 📋 Notification Service
- 📋 Erweiterte Analytics
- 📋 Machine Learning Integration
## Lead Management System
### Status
**Vollständig implementiert** als Teil des Admin Panels unter `/leads/`
### Architecture
- **Modular Architecture**: Clean separation of concerns
- **Service Layer Pattern**: Business logic isolation
- **Repository Pattern**: Data access abstraction
- **RESTful API Design**: Standard HTTP methods
- **Service Layer Pattern**: Business logic in `leads/services.py`
- **Repository Pattern**: Data access in `leads/repositories.py`
- **Blueprint Integration**: Routes in `leads/routes.py`
### Data Model
- **Institutions**: Core entity for organizations
- **Contact Persons**: Multiple contacts per institution
- **Notes System**: Versioned notes with timestamps
- **Future-proof Design**: JSONB fields for extensibility
### Data Model (implementiert)
```
lead_institutions
├── lead_contact_persons (1:n)
├── lead_notes (1:n)
├── lead_tags (n:m)
└── lead_activities (1:n)
```
### Features
1. Institution management (CRUD operations)
2. Contact person management
3. Advanced search and filtering
4. Export functionality
5. Activity tracking
6. Note versioning
### Implementierte Features
1. Institution Management (CRUD)
2. Contact Person Management
3. ✅ Notes mit Versionierung
4. ✅ Tag-System
5. Activity Tracking
6. ✅ Export/Import Funktionalität
7. ✅ Erweiterte Suche und Filter
8. ✅ Statistik-Dashboard
### API Endpoints
- GET /leads/ - Übersicht
- GET/POST /leads/create - Neue Institution
- GET/POST /leads/edit/{id} - Bearbeiten
- POST /leads/delete/{id} - Löschen
- GET /leads/export - Export
- POST /leads/import - Import
- GET /leads/api/stats - Statistiken
## Monitoring Stack
@@ -131,37 +195,97 @@ V2-Docker is a comprehensive system featuring a License Server, Admin Panel, Lea
- License server downtime
- Disk space warnings
## Admin Panel Integration
## Admin Panel
### Features
- License management interface
- Customer creation and editing
- License monitoring dashboard
- Real-time statistics
- Alert notifications
### Implementierte Features
1. **Authentication & Security**
- ✅ Login mit 2FA-Unterstützung
- ✅ Session Management
- ✅ Rate Limiting
- ✅ IP-Blocking bei fehlgeschlagenen Logins
- ✅ Audit Logging aller Aktionen
### Technical Details
- Flask-based web application
- PostgreSQL database
- Bootstrap UI framework
- AJAX for real-time updates
- Responsive design
2. **Customer Management**
- ✅ CRUD-Operationen für Kunden
- ✅ Kundensuche mit Autocomplete
- ✅ Kunden-Lizenz-Übersicht
- ✅ Quick Stats pro Kunde
3. **License Management**
- ✅ Lizenzerstellung (Einzel und Batch)
- ✅ Lizenzbearbeitung und -löschung
- ✅ Bulk-Operationen (Aktivieren/Deaktivieren)
- ✅ Device Management pro Lizenz
- ✅ Resource Allocation
- ✅ Quick Edit Funktionalität
4. **Monitoring & Analytics**
- ✅ Dashboard mit Live-Statistiken
- ✅ Lizenzserver-Monitoring
- ✅ Anomalie-Erkennung
- ✅ Session-Überwachung
- ✅ Resource-Auslastung
5. **System Administration**
- ✅ Backup & Restore
- ✅ Export-Funktionen (CSV)
- ✅ Audit Log Viewer
- ✅ Blocked IPs Management
- ✅ Feature Flags Konfiguration
### Technical Stack
- **Backend**: Flask 3.0.3 mit Blueprints
- **Database**: PostgreSQL mit Partitionierung
- **Frontend**: Bootstrap 5.3, jQuery, Select2
- **Real-time**: AJAX, Server-Sent Events
- **Security**: bcrypt, pyotp (2FA), JWT
## Deployment Configuration
### Docker Services
- `v2_db`: PostgreSQL database
- `v2_adminpanel`: Admin interface
- `v2_nginx`: Reverse proxy
- `v2_lizenzserver`: License server
- `prometheus`: Metrics collection
- `grafana`: Monitoring dashboards
#### Aktive Services
- `v2_db`: PostgreSQL database (Port 5432)
- `v2_adminpanel`: Admin interface (Port 80)
- `v2_nginx`: Reverse proxy (Ports 80, 443)
- `v2_lizenzserver`: License server (interner Port)
- `redis`: Redis cache (Port 6379)
- `rabbitmq`: Message queue (Ports 5672, 15672)
#### Monitoring Stack (aktiv)
- `prometheus`: Metrics collection (Port 9090)
- `grafana`: Monitoring dashboards (Port 3001)
- `alertmanager`: Alert management (Port 9093)
- `postgres-exporter`: PostgreSQL metrics
- `redis-exporter`: Redis metrics
- `node-exporter`: System metrics
- `nginx-exporter`: Nginx metrics
- `cadvisor`: Container metrics (Port 8081)
#### Auskommentierte Services
- `auth-service`: Authentication service (würde auf Port 5001 laufen)
- `analytics-service`: Analytics service (würde auf Port 5003 laufen)
- `admin-api-service`: Admin API service (würde auf Port 5004 laufen)
### Environment Variables
- `JWT_SECRET`: Authentication key
- `DATABASE_URL`: PostgreSQL connection
- `API_KEY`: Service authentication
- `MONITORING_ENABLED`: Feature flag
#### Erforderlich
- `DATABASE_URL`: PostgreSQL Verbindung
- `SECRET_KEY`: Flask Session Secret
- `JWT_SECRET`: JWT Token Signierung
- `API_KEY`: Lizenzserver API Key
#### Optional mit Defaults
- `MONITORING_ENABLED`: "true" (Feature Flag)
- `SESSION_LIFETIME_MINUTES`: 30
- `PERMANENT_SESSION_LIFETIME_DAYS`: 7
- `LOGIN_RATE_LIMIT`: "5 per minute"
- `API_RATE_LIMIT`: "100 per minute"
- `MAX_LOGIN_ATTEMPTS`: 5
- `LOGIN_LOCKOUT_DURATION`: 900 (Sekunden)
- `LIZENZSERVER_BASE_URL`: "http://v2_lizenzserver:8000"
- `REDIS_HOST`: "redis"
- `REDIS_PORT`: 6379
### Network Configuration
- Internal network for service communication

30
lizenzserver/.env.example Normale Datei
Datei anzeigen

@@ -0,0 +1,30 @@
# Database Configuration
DB_PASSWORD=secure_password_change_this
# Redis Configuration
REDIS_PASSWORD=redis_password_change_this
# RabbitMQ Configuration
RABBITMQ_USER=admin
RABBITMQ_PASS=admin_password_change_this
# JWT Configuration
JWT_SECRET=change_this_very_secret_key_in_production
# Admin Configuration
ADMIN_SECRET=change_this_admin_secret
ADMIN_API_KEY=admin-key-change-in-production
# Flask Environment
FLASK_ENV=production
# Rate Limiting (optional overrides)
# DEFAULT_RATE_LIMIT_PER_MINUTE=60
# DEFAULT_RATE_LIMIT_PER_HOUR=1000
# DEFAULT_RATE_LIMIT_PER_DAY=10000
# Service URLs (for external access)
# AUTH_SERVICE_URL=http://localhost:5001
# LICENSE_API_URL=http://localhost:5002
# ANALYTICS_SERVICE_URL=http://localhost:5003
# ADMIN_API_URL=http://localhost:5004

Datei anzeigen

@@ -0,0 +1,561 @@
# License Server API Documentation
## Overview
The License Server provides a comprehensive API for managing software licenses, validating license keys, and tracking usage. The system consists of four main services:
1. **Auth Service** - JWT token management and API authentication
2. **License API** - License validation and activation
3. **Admin API** - License management and administration
4. **Analytics Service** - Usage analytics and anomaly detection
## Base URLs
- Auth Service: `http://localhost:5001`
- License API: `http://localhost:5002`
- Analytics Service: `http://localhost:5003`
- Admin API: `http://localhost:5004`
## Authentication
### API Key Authentication
Most endpoints require an API key in the `X-API-Key` header:
```
X-API-Key: sk_your_api_key_here
```
### JWT Authentication
Some endpoints use JWT bearer tokens:
```
Authorization: Bearer your_jwt_token_here
```
## Auth Service Endpoints
### Create Access Token
Create JWT access token for license validation.
**POST** `/api/v1/auth/token`
**Headers:**
- `X-API-Key: required`
**Request Body:**
```json
{
"license_id": "string",
"hardware_id": "string"
}
```
**Response:**
```json
{
"access_token": "string",
"refresh_token": "string",
"token_type": "Bearer",
"expires_in": 3600
}
```
### Refresh Token
Refresh an expired access token.
**POST** `/api/v1/auth/refresh`
**Request Body:**
```json
{
"refresh_token": "string"
}
```
### Verify Token
Verify token validity.
**POST** `/api/v1/auth/verify`
**Headers:**
- `Authorization: Bearer <token>`
### Create API Key (Admin)
Create new API key for client applications.
**POST** `/api/v1/auth/api-key`
**Headers:**
- `X-Admin-Secret: required`
**Request Body:**
```json
{
"client_name": "string",
"allowed_endpoints": ["array", "of", "endpoints"]
}
```
## License API Endpoints
### Validate License
Validate a license key with hardware ID.
**POST** `/api/v1/license/validate`
**Headers:**
- `X-API-Key: required`
**Request Body:**
```json
{
"license_key": "string",
"hardware_id": "string",
"app_version": "string (optional)"
}
```
**Response:**
```json
{
"valid": true,
"license_id": "string",
"expires_at": "2024-12-31T23:59:59Z",
"features": ["feature1", "feature2"],
"limits": {
"max_devices": 5,
"current_devices": 2
}
}
```
### Activate License
Activate license on a new device.
**POST** `/api/v1/license/activate`
**Headers:**
- `X-API-Key: required`
**Request Body:**
```json
{
"license_key": "string",
"hardware_id": "string",
"device_name": "string (optional)",
"os_info": {
"name": "Windows",
"version": "10"
}
}
```
### Heartbeat
Record license heartbeat (requires JWT).
**POST** `/api/v1/license/heartbeat`
**Headers:**
- `Authorization: Bearer <token>`
**Request Body:**
```json
{
"session_data": {
"custom": "data"
}
}
```
### Create Offline Token
Generate offline validation token.
**POST** `/api/v1/license/offline-token`
**Headers:**
- `Authorization: Bearer <token>`
**Request Body:**
```json
{
"duration_hours": 24
}
```
### Validate Offline Token
Validate an offline token.
**POST** `/api/v1/license/validate-offline`
**Request Body:**
```json
{
"token": "string"
}
```
## Admin API Endpoints
### Create License
Create a new license.
**POST** `/api/v1/admin/licenses`
**Headers:**
- `X-Admin-API-Key: required`
**Request Body:**
```json
{
"customer_id": "string",
"max_devices": 5,
"expires_in_days": 365,
"features": ["feature1", "feature2"],
"is_test": false,
"metadata": {
"custom": "data"
}
}
```
### Get License
Get license details with statistics.
**GET** `/api/v1/admin/licenses/{license_id}`
**Headers:**
- `X-Admin-API-Key: required`
### Update License
Update license properties.
**PATCH** `/api/v1/admin/licenses/{license_id}`
**Headers:**
- `X-Admin-API-Key: required`
**Request Body:**
```json
{
"max_devices": 10,
"is_active": true,
"expires_at": "2025-12-31T23:59:59Z",
"features": ["new_feature"],
"metadata": {}
}
```
### Delete License
Soft delete (deactivate) a license.
**DELETE** `/api/v1/admin/licenses/{license_id}`
**Headers:**
- `X-Admin-API-Key: required`
### List Licenses
Search and list licenses with filters.
**GET** `/api/v1/admin/licenses`
**Headers:**
- `X-Admin-API-Key: required`
**Query Parameters:**
- `customer_id`: Filter by customer
- `is_active`: Filter by active status
- `is_test`: Filter test licenses
- `created_after`: Filter by creation date
- `created_before`: Filter by creation date
- `expires_after`: Filter by expiration
- `expires_before`: Filter by expiration
- `page`: Page number (default: 1)
- `per_page`: Items per page (default: 50, max: 100)
### Get License Devices
Get all devices for a license.
**GET** `/api/v1/admin/licenses/{license_id}/devices`
**Headers:**
- `X-Admin-API-Key: required`
### Deactivate Device
Deactivate a specific device.
**POST** `/api/v1/admin/licenses/{license_id}/devices/deactivate`
**Headers:**
- `X-Admin-API-Key: required`
**Request Body:**
```json
{
"hardware_id": "string",
"reason": "string (optional)"
}
```
### Transfer License
Transfer license between devices.
**POST** `/api/v1/admin/licenses/{license_id}/transfer`
**Headers:**
- `X-Admin-API-Key: required`
**Request Body:**
```json
{
"from_hardware_id": "string",
"to_hardware_id": "string"
}
```
### Get License Events
Get activation events for a license.
**GET** `/api/v1/admin/licenses/{license_id}/events`
**Headers:**
- `X-Admin-API-Key: required`
**Query Parameters:**
- `hours`: Hours to look back (default: 24)
### Get License Usage
Get usage statistics for a license.
**GET** `/api/v1/admin/licenses/{license_id}/usage`
**Headers:**
- `X-Admin-API-Key: required`
**Query Parameters:**
- `days`: Days to analyze (default: 30)
### Bulk Create Licenses
Create multiple licenses at once.
**POST** `/api/v1/admin/licenses/bulk-create`
**Headers:**
- `X-Admin-API-Key: required`
**Request Body:**
```json
{
"licenses": [
{
"customer_id": "string",
"max_devices": 5,
"expires_in_days": 365
}
]
}
```
### Get Statistics
Get overall license statistics.
**GET** `/api/v1/admin/statistics`
**Headers:**
- `X-Admin-API-Key: required`
## Analytics Service Endpoints
### Analyze Patterns
Analyze usage patterns for a license.
**GET** `/api/v1/analytics/licenses/{license_id}/patterns`
**Headers:**
- `X-API-Key: required`
**Query Parameters:**
- `days`: Days to analyze (default: 30)
### Detect Anomalies
Manually trigger anomaly detection.
**POST** `/api/v1/analytics/licenses/{license_id}/anomalies/detect`
**Headers:**
- `X-API-Key: required`
### Get Risk Score
Calculate risk score for a license.
**GET** `/api/v1/analytics/licenses/{license_id}/risk-score`
**Headers:**
- `X-API-Key: required`
### Generate Usage Report
Generate usage report for all licenses.
**GET** `/api/v1/analytics/reports/usage`
**Headers:**
- `X-API-Key: required`
**Query Parameters:**
- `days`: Days to include (default: 30)
### Get Dashboard Data
Get analytics dashboard data.
**GET** `/api/v1/analytics/dashboard`
**Headers:**
- `X-API-Key: required`
## Error Responses
All endpoints use standard HTTP status codes and return errors in this format:
```json
{
"error": "Error message",
"error_code": "ERROR_CODE",
"details": {}
}
```
### Common Error Codes
- `LICENSE_NOT_FOUND` - License key not found
- `LICENSE_INACTIVE` - License is deactivated
- `LICENSE_EXPIRED` - License has expired
- `DEVICE_LIMIT_EXCEEDED` - Device limit reached
- `ALREADY_ACTIVATED` - Already activated on device
- `INVALID_TOKEN` - Invalid JWT token
- `RATE_LIMIT_EXCEEDED` - Rate limit exceeded
## Rate Limiting
API requests are rate limited based on API key configuration:
- Default: 60 requests per minute, 1000 per hour
- Rate limit headers are included in responses:
- `X-RateLimit-Limit`: Requests per minute
- `X-RateLimit-Remaining`: Remaining requests
- `Retry-After`: Seconds until retry (on 429 errors)
## Webhooks
The system publishes events to RabbitMQ for real-time processing:
- `license.validated` - License validation successful
- `license.validation.failed` - License validation failed
- `license.activated` - New device activated
- `license.deactivated` - License deactivated
- `license.transferred` - License transferred
- `anomaly.detected` - Anomaly detected
- `device.deactivated` - Device deactivated
## SDK Examples
### Python
```python
import requests
# Initialize client
api_key = "sk_your_api_key"
base_url = "http://localhost:5002"
# Validate license
response = requests.post(
f"{base_url}/api/v1/license/validate",
headers={"X-API-Key": api_key},
json={
"license_key": "LIC-XXXXXXXXXXXX",
"hardware_id": "device-123"
}
)
if response.status_code == 200:
data = response.json()
if data["valid"]:
print("License is valid!")
```
### JavaScript
```javascript
const apiKey = 'sk_your_api_key';
const baseUrl = 'http://localhost:5002';
// Validate license
fetch(`${baseUrl}/api/v1/license/validate`, {
method: 'POST',
headers: {
'X-API-Key': apiKey,
'Content-Type': 'application/json'
},
body: JSON.stringify({
license_key: 'LIC-XXXXXXXXXXXX',
hardware_id: 'device-123'
})
})
.then(response => response.json())
.then(data => {
if (data.valid) {
console.log('License is valid!');
}
});
```
## Best Practices
1. **Caching**: Validation results are cached for 5 minutes. Use heartbeats for real-time tracking.
2. **Offline Support**: Generate offline tokens for temporary offline validation.
3. **Security**:
- Always use HTTPS in production
- Rotate API keys regularly
- Monitor for anomalies
4. **Rate Limiting**: Implement exponential backoff on 429 errors.
5. **Error Handling**: Always check error codes and handle appropriately.
## Migration from v1
If migrating from a previous version:
1. Update API endpoints to v1 paths
2. Add API key authentication
3. Update response parsing for new format
4. Implement heartbeat for session tracking

Datei anzeigen

@@ -0,0 +1,31 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . /app/
# Create non-root user
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 5004
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5004/health || exit 1
# Run the application
CMD ["python", "services/admin_api/app.py"]

Datei anzeigen

@@ -0,0 +1,31 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . /app/
# Create non-root user
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 5003
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5003/health || exit 1
# Run the application
CMD ["python", "services/analytics/app.py"]

31
lizenzserver/Dockerfile.auth Normale Datei
Datei anzeigen

@@ -0,0 +1,31 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . /app/
# Create non-root user
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 5001
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5001/health || exit 1
# Run the application
CMD ["python", "services/auth/app.py"]

Datei anzeigen

@@ -0,0 +1,31 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . /app/
# Create non-root user
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 5002
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:5002/health || exit 1
# Run the application
CMD ["python", "services/license_api/app.py"]

86
lizenzserver/Makefile Normale Datei
Datei anzeigen

@@ -0,0 +1,86 @@
.PHONY: help build up down restart logs ps clean test
# Default target
help:
@echo "License Server Management Commands:"
@echo " make build - Build all Docker images"
@echo " make up - Start all services"
@echo " make down - Stop all services"
@echo " make restart - Restart all services"
@echo " make logs - View logs from all services"
@echo " make ps - List running containers"
@echo " make clean - Remove containers and volumes"
@echo " make test - Run tests"
@echo " make init-db - Initialize database schema"
# Build all Docker images
build:
docker-compose build
# Start all services
up:
docker-compose up -d
@echo "Waiting for services to be healthy..."
@sleep 10
@echo "Services are running!"
@echo "Auth Service: http://localhost:5001"
@echo "License API: http://localhost:5002"
@echo "Analytics: http://localhost:5003"
@echo "Admin API: http://localhost:5004"
@echo "RabbitMQ Management: http://localhost:15672"
# Stop all services
down:
docker-compose down
# Restart all services
restart: down up
# View logs
logs:
docker-compose logs -f
# List containers
ps:
docker-compose ps
# Clean up everything
clean:
docker-compose down -v
docker system prune -f
# Run tests
test:
@echo "Running API tests..."
@python tests/test_api.py
# Initialize database
init-db:
@echo "Initializing database schema..."
docker-compose exec postgres psql -U license_admin -d licenses -f /docker-entrypoint-initdb.d/init.sql
# Service-specific commands
logs-auth:
docker-compose logs -f auth_service
logs-license:
docker-compose logs -f license_api
logs-analytics:
docker-compose logs -f analytics_service
logs-admin:
docker-compose logs -f admin_api
# Development commands
dev:
docker-compose -f docker-compose.yml -f docker-compose.dev.yml up
shell-auth:
docker-compose exec auth_service /bin/bash
shell-license:
docker-compose exec license_api /bin/bash
shell-db:
docker-compose exec postgres psql -U license_admin -d licenses

244
lizenzserver/README.md Normale Datei
Datei anzeigen

@@ -0,0 +1,244 @@
# License Server
A comprehensive microservices-based license management system for software licensing, validation, and analytics.
## Features
- **License Management**: Create, update, and manage software licenses
- **Hardware-based Validation**: Bind licenses to specific devices
- **Offline Support**: Generate offline validation tokens
- **Analytics**: Track usage patterns and detect anomalies
- **Rate Limiting**: Protect APIs with configurable rate limits
- **Event-driven Architecture**: Real-time event processing with RabbitMQ
- **Caching**: Redis-based caching for improved performance
- **Security**: JWT authentication, API key management, and audit logging
## Architecture
The system consists of four microservices:
1. **Auth Service** (Port 5001): JWT token management and API authentication
2. **License API** (Port 5002): License validation and activation
3. **Analytics Service** (Port 5003): Usage analytics and anomaly detection
4. **Admin API** (Port 5004): License administration and management
## Quick Start
### Prerequisites
- Docker and Docker Compose
- Make (optional, for using Makefile commands)
- Python 3.11+ (for local development)
### Installation
1. Clone the repository:
```bash
git clone <repository-url>
cd lizenzserver
```
2. Copy environment variables:
```bash
cp .env.example .env
# Edit .env with your configuration
```
3. Build and start services:
```bash
make build
make up
```
Or without Make:
```bash
docker-compose build
docker-compose up -d
```
4. Initialize the database:
```bash
make init-db
```
### Verify Installation
Check service health:
```bash
curl http://localhost:5001/health
curl http://localhost:5002/health
curl http://localhost:5003/health
curl http://localhost:5004/health
```
## Usage
### Creating a License
```bash
curl -X POST http://localhost:5004/api/v1/admin/licenses \
-H "X-Admin-API-Key: your-admin-key" \
-H "Content-Type: application/json" \
-d '{
"customer_id": "cust-123",
"max_devices": 5,
"expires_in_days": 365,
"features": ["premium", "support"]
}'
```
### Validating a License
```bash
curl -X POST http://localhost:5002/api/v1/license/validate \
-H "X-API-Key: your-api-key" \
-H "Content-Type: application/json" \
-d '{
"license_key": "LIC-XXXXXXXXXXXX",
"hardware_id": "device-123"
}'
```
## API Documentation
Detailed API documentation is available in [API_DOCUMENTATION.md](API_DOCUMENTATION.md).
## Configuration
### Environment Variables
Key configuration options in `.env`:
- `DB_PASSWORD`: PostgreSQL password
- `REDIS_PASSWORD`: Redis password
- `JWT_SECRET`: Secret key for JWT tokens
- `ADMIN_API_KEY`: Admin API authentication key
- `FLASK_ENV`: Flask environment (development/production)
### Rate Limiting
Default rate limits:
- 60 requests per minute
- 1000 requests per hour
- 10000 requests per day
Configure per API key in the database.
## Development
### Running Locally
1. Install dependencies:
```bash
pip install -r requirements.txt
```
2. Set environment variables:
```bash
export DATABASE_URL=postgresql://user:pass@localhost:5432/licenses
export REDIS_URL=redis://localhost:6379
export RABBITMQ_URL=amqp://guest:guest@localhost:5672
```
3. Run a service:
```bash
python services/license_api/app.py
```
### Testing
Run tests:
```bash
make test
```
### Database Migrations
The database schema is in `init.sql`. Apply migrations:
```bash
docker-compose exec postgres psql -U license_admin -d licenses -f /path/to/migration.sql
```
## Monitoring
### Logs
View logs for all services:
```bash
make logs
```
View logs for specific service:
```bash
make logs-auth
make logs-license
make logs-analytics
make logs-admin
```
### Metrics
Services expose Prometheus metrics at `/metrics` endpoint.
### RabbitMQ Management
Access RabbitMQ management UI at http://localhost:15672
- Username: admin (or configured value)
- Password: admin_password (or configured value)
## Security
### Best Practices
1. **Change default passwords** in production
2. **Use HTTPS** in production (configure in nginx.conf)
3. **Rotate API keys** regularly
4. **Monitor anomalies** through the analytics service
5. **Set up IP whitelisting** for admin endpoints
6. **Enable audit logging** for compliance
### API Key Management
Create API keys through the Auth Service:
```bash
curl -X POST http://localhost:5001/api/v1/auth/api-key \
-H "X-Admin-Secret: your-admin-secret" \
-H "Content-Type: application/json" \
-d '{
"client_name": "My Application",
"allowed_endpoints": ["license.validate", "license.activate"]
}'
```
## Troubleshooting
### Common Issues
1. **Services not starting**: Check logs with `docker-compose logs <service-name>`
2. **Database connection errors**: Ensure PostgreSQL is healthy and credentials are correct
3. **Rate limit errors**: Check rate limit configuration and API key limits
4. **Cache misses**: Verify Redis connection and TTL settings
### Health Checks
All services provide health endpoints:
- Auth: http://localhost:5001/health
- License: http://localhost:5002/health
- Analytics: http://localhost:5003/health
- Admin: http://localhost:5004/health
## Contributing
1. Fork the repository
2. Create a feature branch
3. Commit your changes
4. Push to the branch
5. Create a Pull Request
## License
[Your License Here]
## Support
For support, please contact [support@example.com] or create an issue in the repository.

Datei anzeigen

@@ -0,0 +1,191 @@
version: '3.8'
services:
# PostgreSQL Database
postgres:
image: postgres:15-alpine
container_name: license_postgres
environment:
POSTGRES_DB: licenses
POSTGRES_USER: license_admin
POSTGRES_PASSWORD: ${DB_PASSWORD:-secure_password}
volumes:
- postgres_data:/var/lib/postgresql/data
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
ports:
- "5432:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U license_admin -d licenses"]
interval: 10s
timeout: 5s
retries: 5
# Redis Cache
redis:
image: redis:7-alpine
container_name: license_redis
command: redis-server --requirepass ${REDIS_PASSWORD:-redis_password}
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
# RabbitMQ Message Broker
rabbitmq:
image: rabbitmq:3-management-alpine
container_name: license_rabbitmq
environment:
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-admin}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASS:-admin_password}
ports:
- "5672:5672"
- "15672:15672" # Management UI
volumes:
- rabbitmq_data:/var/lib/rabbitmq
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "ping"]
interval: 10s
timeout: 5s
retries: 5
# Auth Service
auth_service:
build:
context: .
dockerfile: Dockerfile.auth
container_name: license_auth
environment:
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
JWT_SECRET: ${JWT_SECRET:-change_this_in_production}
ADMIN_SECRET: ${ADMIN_SECRET:-change_this_admin_secret}
FLASK_ENV: ${FLASK_ENV:-production}
PORT: 5001
ports:
- "5001:5001"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
interval: 30s
timeout: 10s
retries: 3
# License API Service
license_api:
build:
context: .
dockerfile: Dockerfile.license
container_name: license_api
environment:
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
JWT_SECRET: ${JWT_SECRET:-change_this_in_production}
FLASK_ENV: ${FLASK_ENV:-production}
PORT: 5002
ports:
- "5002:5002"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5002/health"]
interval: 30s
timeout: 10s
retries: 3
# Analytics Service
analytics_service:
build:
context: .
dockerfile: Dockerfile.analytics
container_name: license_analytics
environment:
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
FLASK_ENV: ${FLASK_ENV:-production}
PORT: 5003
ports:
- "5003:5003"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5003/health"]
interval: 30s
timeout: 10s
retries: 3
# Admin API Service
admin_api:
build:
context: .
dockerfile: Dockerfile.admin
container_name: license_admin_api
environment:
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
ADMIN_API_KEY: ${ADMIN_API_KEY:-admin-key-change-in-production}
FLASK_ENV: ${FLASK_ENV:-production}
PORT: 5004
ports:
- "5004:5004"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5004/health"]
interval: 30s
timeout: 10s
retries: 3
# Nginx Reverse Proxy
nginx:
image: nginx:alpine
container_name: license_nginx
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
ports:
- "80:80"
- "443:443"
depends_on:
- auth_service
- license_api
- analytics_service
- admin_api
healthcheck:
test: ["CMD", "nginx", "-t"]
interval: 30s
timeout: 10s
retries: 3
volumes:
postgres_data:
redis_data:
rabbitmq_data:

Datei anzeigen

@@ -0,0 +1 @@
# Events Module

Datei anzeigen

@@ -168,11 +168,14 @@ class EventTypes:
LICENSE_DEACTIVATED = "license.deactivated"
LICENSE_TRANSFERRED = "license.transferred"
LICENSE_EXPIRED = "license.expired"
LICENSE_CREATED = "license.created"
LICENSE_UPDATED = "license.updated"
# Device events
DEVICE_ADDED = "device.added"
DEVICE_REMOVED = "device.removed"
DEVICE_BLOCKED = "device.blocked"
DEVICE_DEACTIVATED = "device.deactivated"
# Anomaly events
ANOMALY_DETECTED = "anomaly.detected"

Datei anzeigen

@@ -0,0 +1 @@
# Middleware Module

Datei anzeigen

@@ -0,0 +1,158 @@
import time
from functools import wraps
from flask import request, jsonify
import redis
from typing import Optional, Tuple
import logging
logger = logging.getLogger(__name__)
class RateLimiter:
"""Rate limiting middleware using Redis"""
def __init__(self, redis_url: str):
self.redis_client = None
try:
self.redis_client = redis.from_url(redis_url, decode_responses=True)
self.redis_client.ping()
logger.info("Connected to Redis for rate limiting")
except Exception as e:
logger.warning(f"Redis not available for rate limiting: {e}")
def limit(self, requests_per_minute: int = 60, requests_per_hour: int = 1000):
"""Decorator for rate limiting endpoints"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not self.redis_client:
# Redis not available, skip rate limiting
return f(*args, **kwargs)
# Get client identifier (API key or IP)
client_id = self._get_client_id()
# Check rate limits
is_allowed, retry_after = self._check_rate_limit(
client_id,
requests_per_minute,
requests_per_hour
)
if not is_allowed:
response = jsonify({
"error": "Rate limit exceeded",
"retry_after": retry_after
})
response.status_code = 429
response.headers['Retry-After'] = str(retry_after)
response.headers['X-RateLimit-Limit'] = str(requests_per_minute)
return response
# Add rate limit headers
response = f(*args, **kwargs)
if hasattr(response, 'headers'):
response.headers['X-RateLimit-Limit'] = str(requests_per_minute)
response.headers['X-RateLimit-Remaining'] = str(
self._get_remaining_requests(client_id, requests_per_minute)
)
return response
return decorated_function
return decorator
def _get_client_id(self) -> str:
"""Get client identifier from request"""
# First try API key
api_key = request.headers.get('X-API-Key')
if api_key:
return f"api_key:{api_key}"
# Then try auth token
auth_header = request.headers.get('Authorization')
if auth_header and auth_header.startswith('Bearer '):
return f"token:{auth_header[7:32]}" # Use first 32 chars of token
# Fallback to IP
if request.headers.get('X-Forwarded-For'):
ip = request.headers.get('X-Forwarded-For').split(',')[0]
else:
ip = request.remote_addr
return f"ip:{ip}"
def _check_rate_limit(self, client_id: str,
requests_per_minute: int,
requests_per_hour: int) -> Tuple[bool, Optional[int]]:
"""Check if request is within rate limits"""
now = int(time.time())
# Check minute limit
minute_key = f"rate_limit:minute:{client_id}:{now // 60}"
minute_count = self.redis_client.incr(minute_key)
self.redis_client.expire(minute_key, 60)
if minute_count > requests_per_minute:
retry_after = 60 - (now % 60)
return False, retry_after
# Check hour limit
hour_key = f"rate_limit:hour:{client_id}:{now // 3600}"
hour_count = self.redis_client.incr(hour_key)
self.redis_client.expire(hour_key, 3600)
if hour_count > requests_per_hour:
retry_after = 3600 - (now % 3600)
return False, retry_after
return True, None
def _get_remaining_requests(self, client_id: str, limit: int) -> int:
"""Get remaining requests in current minute"""
now = int(time.time())
minute_key = f"rate_limit:minute:{client_id}:{now // 60}"
try:
current_count = int(self.redis_client.get(minute_key) or 0)
return max(0, limit - current_count)
except:
return limit
class APIKeyRateLimiter(RateLimiter):
"""Extended rate limiter with API key specific limits"""
def __init__(self, redis_url: str, db_repo):
super().__init__(redis_url)
self.db_repo = db_repo
def limit_by_api_key(self):
"""Rate limit based on API key configuration"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
api_key = request.headers.get('X-API-Key')
if not api_key:
# Use default limits for non-API key requests
return self.limit()(f)(*args, **kwargs)
# Get API key configuration from database
query = """
SELECT rate_limit_per_minute, rate_limit_per_hour
FROM api_clients
WHERE api_key = %s AND is_active = true
"""
client = self.db_repo.execute_one(query, (api_key,))
if not client:
return jsonify({"error": "Invalid API key"}), 401
# Use custom limits or defaults
rpm = client.get('rate_limit_per_minute', 60)
rph = client.get('rate_limit_per_hour', 1000)
return self.limit(rpm, rph)(f)(*args, **kwargs)
return decorated_function
return decorator

167
lizenzserver/nginx.conf Normale Datei
Datei anzeigen

@@ -0,0 +1,167 @@
events {
worker_connections 1024;
}
http {
# Basic settings
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
# Logging
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
# Gzip compression
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_types text/plain text/css text/xml application/json application/javascript application/xml+rss;
# Rate limiting zones
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
limit_req_zone $http_x_api_key zone=key_limit:10m rate=100r/s;
# Upstream services
upstream auth_service {
server auth_service:5001;
}
upstream license_api {
server license_api:5002;
}
upstream analytics_service {
server analytics_service:5003;
}
upstream admin_api {
server admin_api:5004;
}
# Main server block
server {
listen 80;
server_name localhost;
# Security headers
add_header X-Content-Type-Options nosniff;
add_header X-Frame-Options DENY;
add_header X-XSS-Protection "1; mode=block";
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
# API versioning and routing
location /api/v1/auth/ {
limit_req zone=api_limit burst=20 nodelay;
proxy_pass http://auth_service/api/v1/auth/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
if ($request_method = 'OPTIONS') {
return 204;
}
}
location /api/v1/license/ {
limit_req zone=key_limit burst=50 nodelay;
proxy_pass http://license_api/api/v1/license/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
if ($request_method = 'OPTIONS') {
return 204;
}
}
location /api/v1/analytics/ {
limit_req zone=key_limit burst=30 nodelay;
proxy_pass http://analytics_service/api/v1/analytics/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
if ($request_method = 'OPTIONS') {
return 204;
}
}
location /api/v1/admin/ {
limit_req zone=key_limit burst=30 nodelay;
# Additional security for admin endpoints
# In production, add IP whitelisting here
proxy_pass http://admin_api/api/v1/admin/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# CORS headers (more restrictive for admin)
add_header 'Access-Control-Allow-Origin' '$http_origin' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, PATCH, DELETE, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Admin-API-Key' always;
add_header 'Access-Control-Allow-Credentials' 'true' always;
if ($request_method = 'OPTIONS') {
return 204;
}
}
# Health check endpoint
location /health {
access_log off;
return 200 "healthy\n";
add_header Content-Type text/plain;
}
# Root redirect
location / {
return 301 /api/v1/;
}
# API documentation
location /api/v1/ {
return 200 '{"message": "License Server API v1", "documentation": "/api/v1/docs"}';
add_header Content-Type application/json;
}
}
# HTTPS server block (for production)
# server {
# listen 443 ssl http2;
# server_name your-domain.com;
#
# ssl_certificate /etc/nginx/ssl/cert.pem;
# ssl_certificate_key /etc/nginx/ssl/key.pem;
# ssl_protocols TLSv1.2 TLSv1.3;
# ssl_ciphers HIGH:!aNULL:!MD5;
#
# # Same location blocks as above
# }
}

Datei anzeigen

@@ -0,0 +1,31 @@
# Flask and extensions
Flask==3.0.0
Flask-CORS==4.0.0
flask-limiter==3.5.0
# Database
psycopg2-binary==2.9.9
SQLAlchemy==2.0.23
# Redis
redis==5.0.1
# RabbitMQ
pika==1.3.2
# JWT
PyJWT==2.8.0
# Validation
marshmallow==3.20.1
# Monitoring
prometheus-flask-exporter==0.23.0
# Utilities
python-dateutil==2.8.2
pytz==2023.3
requests==2.31.0
# Development
python-dotenv==1.0.0

Datei anzeigen

@@ -0,0 +1 @@
# Admin API Service

Datei anzeigen

@@ -0,0 +1,666 @@
import os
import sys
from flask import Flask, request, jsonify
from flask_cors import CORS
import logging
from functools import wraps
from marshmallow import Schema, fields, ValidationError
from datetime import datetime, timedelta
import secrets
# Add parent directory to path for imports
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from config import get_config
from repositories.license_repo import LicenseRepository
from repositories.cache_repo import CacheRepository
from events.event_bus import EventBus, Event, EventTypes
from models import EventType, AnomalyType, Severity
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Initialize Flask app
app = Flask(__name__)
config = get_config()
app.config.from_object(config)
CORS(app)
# Initialize dependencies
license_repo = LicenseRepository(config.DATABASE_URL)
cache_repo = CacheRepository(config.REDIS_URL)
event_bus = EventBus(config.RABBITMQ_URL)
# Validation schemas
class CreateLicenseSchema(Schema):
customer_id = fields.Str(required=True)
max_devices = fields.Int(missing=1, validate=lambda x: x > 0)
expires_in_days = fields.Int(allow_none=True)
features = fields.List(fields.Str(), missing=[])
is_test = fields.Bool(missing=False)
metadata = fields.Dict(missing={})
class UpdateLicenseSchema(Schema):
max_devices = fields.Int(validate=lambda x: x > 0)
is_active = fields.Bool()
expires_at = fields.DateTime()
features = fields.List(fields.Str())
metadata = fields.Dict()
class DeactivateDeviceSchema(Schema):
hardware_id = fields.Str(required=True)
reason = fields.Str()
class TransferLicenseSchema(Schema):
from_hardware_id = fields.Str(required=True)
to_hardware_id = fields.Str(required=True)
class SearchLicensesSchema(Schema):
customer_id = fields.Str()
is_active = fields.Bool()
is_test = fields.Bool()
created_after = fields.DateTime()
created_before = fields.DateTime()
expires_after = fields.DateTime()
expires_before = fields.DateTime()
page = fields.Int(missing=1, validate=lambda x: x > 0)
per_page = fields.Int(missing=50, validate=lambda x: 0 < x <= 100)
def require_admin_auth(f):
"""Decorator to require admin authentication"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Check for admin API key
api_key = request.headers.get('X-Admin-API-Key')
if not api_key:
return jsonify({"error": "Missing admin API key"}), 401
# In production, validate against database
# For now, check environment variable
if api_key != os.getenv('ADMIN_API_KEY', 'admin-key-change-in-production'):
return jsonify({"error": "Invalid admin API key"}), 401
return f(*args, **kwargs)
return decorated_function
@app.route('/health', methods=['GET'])
def health_check():
"""Health check endpoint"""
return jsonify({
"status": "healthy",
"service": "admin-api",
"timestamp": datetime.utcnow().isoformat()
})
@app.route('/api/v1/admin/licenses', methods=['POST'])
@require_admin_auth
def create_license():
"""Create new license"""
schema = CreateLicenseSchema()
try:
data = schema.load(request.get_json())
except ValidationError as e:
return jsonify({"error": "Invalid request", "details": e.messages}), 400
# Generate license key
license_key = f"LIC-{secrets.token_urlsafe(16).upper()}"
# Calculate expiration
expires_at = None
if data.get('expires_in_days'):
expires_at = datetime.utcnow() + timedelta(days=data['expires_in_days'])
# Create license in database
query = """
INSERT INTO licenses
(license_key, customer_id, max_devices, is_active, is_test, expires_at, features, metadata)
VALUES (%s, %s, %s, true, %s, %s, %s, %s)
RETURNING id
"""
import json
license_id = license_repo.execute_insert(query, (
license_key,
data['customer_id'],
data['max_devices'],
data['is_test'],
expires_at,
json.dumps(data['features']),
json.dumps(data['metadata'])
))
if not license_id:
return jsonify({"error": "Failed to create license"}), 500
# Publish event
event_bus.publish(Event(
EventTypes.LICENSE_CREATED,
{
"license_id": license_id,
"customer_id": data['customer_id'],
"license_key": license_key
},
"admin-api"
))
return jsonify({
"id": license_id,
"license_key": license_key,
"customer_id": data['customer_id'],
"max_devices": data['max_devices'],
"is_test": data['is_test'],
"expires_at": expires_at.isoformat() if expires_at else None,
"features": data['features']
}), 201
@app.route('/api/v1/admin/licenses/<license_id>', methods=['GET'])
@require_admin_auth
def get_license(license_id):
"""Get license details"""
license = license_repo.get_license_by_id(license_id)
if not license:
return jsonify({"error": "License not found"}), 404
# Get additional statistics
active_devices = license_repo.get_active_devices(license_id)
usage_stats = license_repo.get_license_usage_stats(license_id)
recent_events = license_repo.get_recent_activations(license_id)
# Format response
license['active_devices'] = active_devices
license['usage_stats'] = usage_stats
license['recent_events'] = recent_events
return jsonify(license)
@app.route('/api/v1/admin/licenses/<license_id>', methods=['PATCH'])
@require_admin_auth
def update_license(license_id):
"""Update license"""
schema = UpdateLicenseSchema()
try:
data = schema.load(request.get_json())
except ValidationError as e:
return jsonify({"error": "Invalid request", "details": e.messages}), 400
# Build update query dynamically
updates = []
params = []
if 'max_devices' in data:
updates.append("max_devices = %s")
params.append(data['max_devices'])
if 'is_active' in data:
updates.append("is_active = %s")
params.append(data['is_active'])
if 'expires_at' in data:
updates.append("expires_at = %s")
params.append(data['expires_at'])
if 'features' in data:
updates.append("features = %s")
params.append(json.dumps(data['features']))
if 'metadata' in data:
updates.append("metadata = %s")
params.append(json.dumps(data['metadata']))
if not updates:
return jsonify({"error": "No fields to update"}), 400
# Add updated_at
updates.append("updated_at = NOW()")
# Add license_id to params
params.append(license_id)
query = f"""
UPDATE licenses
SET {', '.join(updates)}
WHERE id = %s
RETURNING *
"""
result = license_repo.execute_one(query, params)
if not result:
return jsonify({"error": "License not found"}), 404
# Invalidate cache
cache_repo.invalidate_license_cache(license_id)
# Publish event
event_bus.publish(Event(
EventTypes.LICENSE_UPDATED,
{
"license_id": license_id,
"changes": list(data.keys())
},
"admin-api"
))
return jsonify(result)
@app.route('/api/v1/admin/licenses/<license_id>', methods=['DELETE'])
@require_admin_auth
def delete_license(license_id):
"""Soft delete license (deactivate)"""
query = """
UPDATE licenses
SET is_active = false, updated_at = NOW()
WHERE id = %s
RETURNING id
"""
result = license_repo.execute_one(query, (license_id,))
if not result:
return jsonify({"error": "License not found"}), 404
# Invalidate cache
cache_repo.invalidate_license_cache(license_id)
# Publish event
event_bus.publish(Event(
EventTypes.LICENSE_DEACTIVATED,
{"license_id": license_id},
"admin-api"
))
return jsonify({"success": True, "message": "License deactivated"})
@app.route('/api/v1/admin/licenses/<license_id>/devices', methods=['GET'])
@require_admin_auth
def get_license_devices(license_id):
"""Get all devices for a license"""
# Get active devices
active_devices = license_repo.get_active_devices(license_id)
# Get all registered devices from activation events
query = """
SELECT DISTINCT ON (hardware_id)
hardware_id,
event_type,
ip_address,
user_agent,
created_at as registered_at,
metadata
FROM activation_events
WHERE license_id = %s
AND event_type IN ('activation', 'reactivation', 'transfer')
AND success = true
ORDER BY hardware_id, created_at DESC
"""
all_devices = license_repo.execute_query(query, (license_id,))
# Mark active devices
active_hw_ids = {d['hardware_id'] for d in active_devices}
for device in all_devices:
device['is_active'] = device['hardware_id'] in active_hw_ids
if device['is_active']:
# Add last_seen from active_devices
active_device = next((d for d in active_devices if d['hardware_id'] == device['hardware_id']), None)
if active_device:
device['last_seen'] = active_device['last_seen']
return jsonify({
"license_id": license_id,
"total_devices": len(all_devices),
"active_devices": len(active_devices),
"devices": all_devices
})
@app.route('/api/v1/admin/licenses/<license_id>/devices/deactivate', methods=['POST'])
@require_admin_auth
def deactivate_device(license_id):
"""Deactivate a device"""
schema = DeactivateDeviceSchema()
try:
data = schema.load(request.get_json())
except ValidationError as e:
return jsonify({"error": "Invalid request", "details": e.messages}), 400
success = license_repo.deactivate_device(license_id, data['hardware_id'])
if not success:
return jsonify({"error": "Failed to deactivate device"}), 500
# Invalidate cache
cache_repo.invalidate_license_cache(license_id)
# Publish event
event_bus.publish(Event(
EventTypes.DEVICE_DEACTIVATED,
{
"license_id": license_id,
"hardware_id": data['hardware_id'],
"reason": data.get('reason', 'Admin action')
},
"admin-api"
))
return jsonify({"success": True, "message": "Device deactivated"})
@app.route('/api/v1/admin/licenses/<license_id>/transfer', methods=['POST'])
@require_admin_auth
def transfer_license(license_id):
"""Transfer license between devices"""
schema = TransferLicenseSchema()
try:
data = schema.load(request.get_json())
except ValidationError as e:
return jsonify({"error": "Invalid request", "details": e.messages}), 400
# Get client IP
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
success = license_repo.transfer_license(
license_id,
data['from_hardware_id'],
data['to_hardware_id'],
ip_address
)
if not success:
return jsonify({"error": "Failed to transfer license"}), 500
# Invalidate cache
cache_repo.invalidate_license_cache(license_id)
# Publish event
event_bus.publish(Event(
EventTypes.LICENSE_TRANSFERRED,
{
"license_id": license_id,
"from_hardware_id": data['from_hardware_id'],
"to_hardware_id": data['to_hardware_id']
},
"admin-api"
))
return jsonify({"success": True, "message": "License transferred successfully"})
@app.route('/api/v1/admin/licenses', methods=['GET'])
@require_admin_auth
def search_licenses():
"""Search and list licenses"""
schema = SearchLicensesSchema()
try:
filters = schema.load(request.args)
except ValidationError as e:
return jsonify({"error": "Invalid request", "details": e.messages}), 400
# Build query
where_clauses = []
params = []
if filters.get('customer_id'):
where_clauses.append("customer_id = %s")
params.append(filters['customer_id'])
if 'is_active' in filters:
where_clauses.append("is_active = %s")
params.append(filters['is_active'])
if 'is_test' in filters:
where_clauses.append("is_test = %s")
params.append(filters['is_test'])
if filters.get('created_after'):
where_clauses.append("created_at >= %s")
params.append(filters['created_after'])
if filters.get('created_before'):
where_clauses.append("created_at <= %s")
params.append(filters['created_before'])
if filters.get('expires_after'):
where_clauses.append("expires_at >= %s")
params.append(filters['expires_after'])
if filters.get('expires_before'):
where_clauses.append("expires_at <= %s")
params.append(filters['expires_before'])
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
# Count total
count_query = f"SELECT COUNT(*) as total FROM licenses WHERE {where_sql}"
total_result = license_repo.execute_one(count_query, params)
total = total_result['total'] if total_result else 0
# Get paginated results
page = filters['page']
per_page = filters['per_page']
offset = (page - 1) * per_page
query = f"""
SELECT l.*, c.name as customer_name, c.email as customer_email
FROM licenses l
JOIN customers c ON l.customer_id = c.id
WHERE {where_sql}
ORDER BY l.created_at DESC
LIMIT %s OFFSET %s
"""
params.extend([per_page, offset])
licenses = license_repo.execute_query(query, params)
return jsonify({
"licenses": licenses,
"pagination": {
"total": total,
"page": page,
"per_page": per_page,
"pages": (total + per_page - 1) // per_page
}
})
@app.route('/api/v1/admin/licenses/<license_id>/events', methods=['GET'])
@require_admin_auth
def get_license_events(license_id):
"""Get all events for a license"""
hours = request.args.get('hours', 24, type=int)
events = license_repo.get_recent_activations(license_id, hours)
return jsonify({
"license_id": license_id,
"hours": hours,
"total_events": len(events),
"events": events
})
@app.route('/api/v1/admin/licenses/<license_id>/usage', methods=['GET'])
@require_admin_auth
def get_license_usage(license_id):
"""Get usage statistics for a license"""
days = request.args.get('days', 30, type=int)
stats = license_repo.get_license_usage_stats(license_id, days)
# Get daily breakdown
query = """
SELECT
DATE(timestamp) as date,
COUNT(*) as validations,
COUNT(DISTINCT hardware_id) as unique_devices,
COUNT(DISTINCT ip_address) as unique_ips
FROM license_heartbeats
WHERE license_id = %s
AND timestamp > NOW() - INTERVAL '%s days'
GROUP BY DATE(timestamp)
ORDER BY date DESC
"""
daily_stats = license_repo.execute_query(query, (license_id, days))
return jsonify({
"license_id": license_id,
"days": days,
"summary": stats,
"daily": daily_stats
})
@app.route('/api/v1/admin/licenses/<license_id>/anomalies', methods=['GET'])
@require_admin_auth
def get_license_anomalies(license_id):
"""Get detected anomalies for a license"""
query = """
SELECT * FROM anomaly_detections
WHERE license_id = %s
ORDER BY detected_at DESC
LIMIT 100
"""
anomalies = license_repo.execute_query(query, (license_id,))
return jsonify({
"license_id": license_id,
"total_anomalies": len(anomalies),
"anomalies": anomalies
})
@app.route('/api/v1/admin/licenses/<license_id>/anomalies/<anomaly_id>/resolve', methods=['POST'])
@require_admin_auth
def resolve_anomaly(license_id, anomaly_id):
"""Mark anomaly as resolved"""
data = request.get_json() or {}
action_taken = data.get('action_taken', 'Resolved by admin')
query = """
UPDATE anomaly_detections
SET resolved = true,
resolved_at = NOW(),
resolved_by = 'admin',
action_taken = %s
WHERE id = %s AND license_id = %s
RETURNING id
"""
result = license_repo.execute_one(query, (action_taken, anomaly_id, license_id))
if not result:
return jsonify({"error": "Anomaly not found"}), 404
return jsonify({"success": True, "message": "Anomaly resolved"})
@app.route('/api/v1/admin/licenses/bulk-create', methods=['POST'])
@require_admin_auth
def bulk_create_licenses():
"""Create multiple licenses at once"""
data = request.get_json()
if not data or 'licenses' not in data:
return jsonify({"error": "Missing licenses array"}), 400
schema = CreateLicenseSchema()
created_licenses = []
errors = []
for idx, license_data in enumerate(data['licenses']):
try:
validated_data = schema.load(license_data)
# Generate license key
license_key = f"LIC-{secrets.token_urlsafe(16).upper()}"
# Calculate expiration
expires_at = None
if validated_data.get('expires_in_days'):
expires_at = datetime.utcnow() + timedelta(days=validated_data['expires_in_days'])
# Create license
query = """
INSERT INTO licenses
(license_key, customer_id, max_devices, is_active, is_test, expires_at, features, metadata)
VALUES (%s, %s, %s, true, %s, %s, %s, %s)
RETURNING id
"""
import json
license_id = license_repo.execute_insert(query, (
license_key,
validated_data['customer_id'],
validated_data['max_devices'],
validated_data['is_test'],
expires_at,
json.dumps(validated_data['features']),
json.dumps(validated_data['metadata'])
))
if license_id:
created_licenses.append({
"id": license_id,
"license_key": license_key,
"customer_id": validated_data['customer_id']
})
except Exception as e:
errors.append({
"index": idx,
"error": str(e)
})
return jsonify({
"created": len(created_licenses),
"failed": len(errors),
"licenses": created_licenses,
"errors": errors
}), 201 if created_licenses else 400
@app.route('/api/v1/admin/statistics', methods=['GET'])
@require_admin_auth
def get_statistics():
"""Get overall license statistics"""
query = """
WITH stats AS (
SELECT
COUNT(*) as total_licenses,
COUNT(*) FILTER (WHERE is_active = true) as active_licenses,
COUNT(*) FILTER (WHERE is_test = true) as test_licenses,
COUNT(*) FILTER (WHERE expires_at < NOW()) as expired_licenses,
COUNT(DISTINCT customer_id) as total_customers
FROM licenses
),
device_stats AS (
SELECT COUNT(DISTINCT hardware_id) as total_devices
FROM license_heartbeats
WHERE timestamp > NOW() - INTERVAL '15 minutes'
),
validation_stats AS (
SELECT
COUNT(*) as validations_today,
COUNT(DISTINCT license_id) as licenses_used_today
FROM license_heartbeats
WHERE timestamp > CURRENT_DATE
)
SELECT * FROM stats, device_stats, validation_stats
"""
stats = license_repo.execute_one(query)
return jsonify(stats or {})
@app.errorhandler(404)
def not_found(error):
return jsonify({"error": "Not found"}), 404
@app.errorhandler(500)
def internal_error(error):
logger.error(f"Internal error: {error}")
return jsonify({"error": "Internal server error"}), 500
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5004, debug=True)

Datei anzeigen

@@ -0,0 +1 @@
# Analytics Service

Datei anzeigen

@@ -0,0 +1,478 @@
import os
import sys
from flask import Flask, request, jsonify
from flask_cors import CORS
import logging
from functools import wraps
from datetime import datetime, timedelta
import asyncio
from concurrent.futures import ThreadPoolExecutor
# Add parent directory to path for imports
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from config import get_config
from repositories.license_repo import LicenseRepository
from repositories.cache_repo import CacheRepository
from events.event_bus import EventBus, Event, EventTypes
from models import AnomalyType, Severity
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Initialize Flask app
app = Flask(__name__)
config = get_config()
app.config.from_object(config)
CORS(app)
# Initialize dependencies
license_repo = LicenseRepository(config.DATABASE_URL)
cache_repo = CacheRepository(config.REDIS_URL)
event_bus = EventBus(config.RABBITMQ_URL)
# Thread pool for async operations
executor = ThreadPoolExecutor(max_workers=10)
def require_auth(f):
"""Decorator to require authentication"""
@wraps(f)
def decorated_function(*args, **kwargs):
api_key = request.headers.get('X-API-Key')
if not api_key:
return jsonify({"error": "Missing API key"}), 401
# Simple validation for now
if not api_key.startswith('sk_'):
return jsonify({"error": "Invalid API key"}), 401
return f(*args, **kwargs)
return decorated_function
@app.route('/health', methods=['GET'])
def health_check():
"""Health check endpoint"""
return jsonify({
"status": "healthy",
"service": "analytics",
"timestamp": datetime.utcnow().isoformat()
})
@app.route('/api/v1/analytics/licenses/<license_id>/patterns', methods=['GET'])
@require_auth
def analyze_license_patterns(license_id):
"""Analyze usage patterns for a license"""
days = request.args.get('days', 30, type=int)
# Get usage data
query = """
WITH hourly_usage AS (
SELECT
DATE_TRUNC('hour', timestamp) as hour,
COUNT(*) as validations,
COUNT(DISTINCT hardware_id) as devices,
COUNT(DISTINCT ip_address) as ips
FROM license_heartbeats
WHERE license_id = %s
AND timestamp > NOW() - INTERVAL '%s days'
GROUP BY DATE_TRUNC('hour', timestamp)
),
daily_patterns AS (
SELECT
EXTRACT(DOW FROM hour) as day_of_week,
EXTRACT(HOUR FROM hour) as hour_of_day,
AVG(validations) as avg_validations,
MAX(devices) as max_devices
FROM hourly_usage
GROUP BY day_of_week, hour_of_day
)
SELECT * FROM daily_patterns
ORDER BY day_of_week, hour_of_day
"""
patterns = license_repo.execute_query(query, (license_id, days))
# Detect anomalies
anomalies = detect_usage_anomalies(license_id, patterns)
return jsonify({
"license_id": license_id,
"days_analyzed": days,
"patterns": patterns,
"anomalies": anomalies
})
@app.route('/api/v1/analytics/licenses/<license_id>/anomalies/detect', methods=['POST'])
@require_auth
def detect_anomalies(license_id):
"""Manually trigger anomaly detection for a license"""
# Run multiple anomaly detection checks
anomalies = []
# Check for multiple IPs
ip_anomalies = check_multiple_ips(license_id)
anomalies.extend(ip_anomalies)
# Check for rapid hardware changes
hw_anomalies = check_rapid_hardware_changes(license_id)
anomalies.extend(hw_anomalies)
# Check for concurrent usage
concurrent_anomalies = check_concurrent_usage(license_id)
anomalies.extend(concurrent_anomalies)
# Check for geographic anomalies
geo_anomalies = check_geographic_anomalies(license_id)
anomalies.extend(geo_anomalies)
# Store detected anomalies
for anomaly in anomalies:
store_anomaly(license_id, anomaly)
return jsonify({
"license_id": license_id,
"anomalies_detected": len(anomalies),
"anomalies": anomalies
})
@app.route('/api/v1/analytics/licenses/<license_id>/risk-score', methods=['GET'])
@require_auth
def get_risk_score(license_id):
"""Calculate risk score for a license"""
# Get recent anomalies
query = """
SELECT anomaly_type, severity, detected_at
FROM anomaly_detections
WHERE license_id = %s
AND detected_at > NOW() - INTERVAL '30 days'
AND resolved = false
"""
anomalies = license_repo.execute_query(query, (license_id,))
# Calculate risk score
risk_score = 0
severity_weights = {
'low': 10,
'medium': 25,
'high': 50,
'critical': 100
}
for anomaly in anomalies:
weight = severity_weights.get(anomaly['severity'], 0)
# Recent anomalies have higher weight
days_old = (datetime.utcnow() - anomaly['detected_at']).days
recency_factor = max(0.5, 1 - (days_old / 30))
risk_score += weight * recency_factor
# Normalize to 0-100
risk_score = min(100, risk_score)
# Determine risk level
if risk_score < 20:
risk_level = "low"
elif risk_score < 50:
risk_level = "medium"
elif risk_score < 80:
risk_level = "high"
else:
risk_level = "critical"
return jsonify({
"license_id": license_id,
"risk_score": round(risk_score, 2),
"risk_level": risk_level,
"active_anomalies": len(anomalies),
"factors": anomalies
})
@app.route('/api/v1/analytics/reports/usage', methods=['GET'])
@require_auth
def generate_usage_report():
"""Generate usage report for all licenses"""
days = request.args.get('days', 30, type=int)
query = """
WITH license_stats AS (
SELECT
l.id,
l.license_key,
l.customer_id,
c.name as customer_name,
l.max_devices,
l.is_test,
l.expires_at,
COUNT(DISTINCT lh.hardware_id) as active_devices,
COUNT(lh.*) as total_validations,
MAX(lh.timestamp) as last_validation
FROM licenses l
LEFT JOIN customers c ON l.customer_id = c.id
LEFT JOIN license_heartbeats lh ON l.id = lh.license_id
AND lh.timestamp > NOW() - INTERVAL '%s days'
WHERE l.is_active = true
GROUP BY l.id, l.license_key, l.customer_id, c.name, l.max_devices, l.is_test, l.expires_at
)
SELECT
*,
CASE
WHEN total_validations = 0 THEN 'inactive'
WHEN active_devices > max_devices THEN 'over_limit'
WHEN expires_at < NOW() THEN 'expired'
ELSE 'active'
END as status,
ROUND((active_devices::numeric / NULLIF(max_devices, 0)) * 100, 2) as device_utilization
FROM license_stats
ORDER BY total_validations DESC
"""
report = license_repo.execute_query(query, (days,))
# Summary statistics
summary = {
"total_licenses": len(report),
"active_licenses": len([r for r in report if r['status'] == 'active']),
"inactive_licenses": len([r for r in report if r['status'] == 'inactive']),
"over_limit_licenses": len([r for r in report if r['status'] == 'over_limit']),
"expired_licenses": len([r for r in report if r['status'] == 'expired']),
"total_validations": sum(r['total_validations'] for r in report),
"average_device_utilization": sum(r['device_utilization'] or 0 for r in report) / len(report) if report else 0
}
return jsonify({
"period_days": days,
"generated_at": datetime.utcnow().isoformat(),
"summary": summary,
"licenses": report
})
@app.route('/api/v1/analytics/reports/revenue', methods=['GET'])
@require_auth
def generate_revenue_report():
"""Generate revenue analytics report"""
# This would need pricing information in the database
# For now, return a placeholder
return jsonify({
"message": "Revenue reporting requires pricing data integration",
"placeholder": True
})
def detect_usage_anomalies(license_id, patterns):
"""Detect anomalies in usage patterns"""
anomalies = []
if not patterns:
return anomalies
# Calculate statistics
validations = [p['avg_validations'] for p in patterns]
if validations:
avg_validations = sum(validations) / len(validations)
max_validations = max(validations)
# Detect spikes
for pattern in patterns:
if pattern['avg_validations'] > avg_validations * 3:
anomalies.append({
"type": AnomalyType.SUSPICIOUS_PATTERN.value,
"severity": Severity.MEDIUM.value,
"details": {
"day": pattern['day_of_week'],
"hour": pattern['hour_of_day'],
"validations": pattern['avg_validations'],
"average": avg_validations
}
})
return anomalies
def check_multiple_ips(license_id):
"""Check for multiple IP addresses"""
query = """
SELECT
COUNT(DISTINCT ip_address) as ip_count,
array_agg(DISTINCT ip_address) as ips
FROM license_heartbeats
WHERE license_id = %s
AND timestamp > NOW() - INTERVAL '1 hour'
"""
result = license_repo.execute_one(query, (license_id,))
anomalies = []
if result and result['ip_count'] > config.ANOMALY_MULTIPLE_IPS_THRESHOLD:
anomalies.append({
"type": AnomalyType.MULTIPLE_IPS.value,
"severity": Severity.HIGH.value,
"details": {
"ip_count": result['ip_count'],
"ips": result['ips'][:10], # Limit to 10 IPs
"threshold": config.ANOMALY_MULTIPLE_IPS_THRESHOLD
}
})
return anomalies
def check_rapid_hardware_changes(license_id):
"""Check for rapid hardware ID changes"""
query = """
SELECT
hardware_id,
created_at
FROM activation_events
WHERE license_id = %s
AND event_type IN ('activation', 'transfer')
AND created_at > NOW() - INTERVAL '1 hour'
AND success = true
ORDER BY created_at DESC
"""
events = license_repo.execute_query(query, (license_id,))
anomalies = []
if len(events) > 1:
# Check time between changes
for i in range(len(events) - 1):
time_diff = (events[i]['created_at'] - events[i+1]['created_at']).total_seconds() / 60
if time_diff < config.ANOMALY_RAPID_HARDWARE_CHANGE_MINUTES:
anomalies.append({
"type": AnomalyType.RAPID_HARDWARE_CHANGE.value,
"severity": Severity.HIGH.value,
"details": {
"hardware_ids": [events[i]['hardware_id'], events[i+1]['hardware_id']],
"time_difference_minutes": round(time_diff, 2),
"threshold_minutes": config.ANOMALY_RAPID_HARDWARE_CHANGE_MINUTES
}
})
return anomalies
def check_concurrent_usage(license_id):
"""Check for concurrent usage from different devices"""
query = """
WITH concurrent_sessions AS (
SELECT
h1.hardware_id as hw1,
h2.hardware_id as hw2,
h1.timestamp as time1,
h2.timestamp as time2
FROM license_heartbeats h1
JOIN license_heartbeats h2 ON h1.license_id = h2.license_id
WHERE h1.license_id = %s
AND h2.license_id = %s
AND h1.hardware_id != h2.hardware_id
AND h1.timestamp > NOW() - INTERVAL '15 minutes'
AND h2.timestamp > NOW() - INTERVAL '15 minutes'
AND ABS(EXTRACT(EPOCH FROM h1.timestamp - h2.timestamp)) < 300
)
SELECT COUNT(*) as concurrent_count
FROM concurrent_sessions
"""
result = license_repo.execute_one(query, (license_id, license_id))
anomalies = []
if result and result['concurrent_count'] > 0:
anomalies.append({
"type": AnomalyType.CONCURRENT_USE.value,
"severity": Severity.CRITICAL.value,
"details": {
"concurrent_sessions": result['concurrent_count'],
"timeframe_minutes": 5
}
})
return anomalies
def check_geographic_anomalies(license_id):
"""Check for geographic anomalies (requires IP geolocation)"""
# This would require IP geolocation service integration
# For now, return empty list
return []
def store_anomaly(license_id, anomaly):
"""Store detected anomaly in database"""
query = """
INSERT INTO anomaly_detections
(license_id, anomaly_type, severity, details)
VALUES (%s, %s, %s, %s)
ON CONFLICT (license_id, anomaly_type, details) DO NOTHING
"""
import json
license_repo.execute_insert(query, (
license_id,
anomaly['type'],
anomaly['severity'],
json.dumps(anomaly['details'])
))
# Publish event
event_bus.publish(Event(
EventTypes.ANOMALY_DETECTED,
{
"license_id": license_id,
"anomaly": anomaly
},
"analytics"
))
@app.route('/api/v1/analytics/dashboard', methods=['GET'])
@require_auth
def get_dashboard_data():
"""Get analytics dashboard data"""
query = """
WITH current_stats AS (
SELECT
COUNT(DISTINCT license_id) as active_licenses,
COUNT(DISTINCT hardware_id) as active_devices,
COUNT(*) as validations_today
FROM license_heartbeats
WHERE timestamp > CURRENT_DATE
),
anomaly_stats AS (
SELECT
COUNT(*) as total_anomalies,
COUNT(*) FILTER (WHERE severity = 'critical') as critical_anomalies,
COUNT(*) FILTER (WHERE resolved = false) as unresolved_anomalies
FROM anomaly_detections
WHERE detected_at > CURRENT_DATE - INTERVAL '7 days'
),
trend_data AS (
SELECT
DATE(timestamp) as date,
COUNT(*) as validations,
COUNT(DISTINCT license_id) as licenses,
COUNT(DISTINCT hardware_id) as devices
FROM license_heartbeats
WHERE timestamp > CURRENT_DATE - INTERVAL '7 days'
GROUP BY DATE(timestamp)
ORDER BY date
)
SELECT
cs.*,
ans.*,
(SELECT json_agg(td.*) FROM trend_data td) as trends
FROM current_stats cs, anomaly_stats ans
"""
dashboard_data = license_repo.execute_one(query)
return jsonify(dashboard_data or {})
@app.errorhandler(404)
def not_found(error):
return jsonify({"error": "Not found"}), 404
@app.errorhandler(500)
def internal_error(error):
logger.error(f"Internal error: {error}")
return jsonify({"error": "Internal server error"}), 500
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5003, debug=True)