Initial commit
Dieser Commit ist enthalten in:
30
lizenzserver/.env.example
Normale Datei
30
lizenzserver/.env.example
Normale Datei
@ -0,0 +1,30 @@
|
||||
# Database Configuration
|
||||
DB_PASSWORD=secure_password_change_this
|
||||
|
||||
# Redis Configuration
|
||||
REDIS_PASSWORD=redis_password_change_this
|
||||
|
||||
# RabbitMQ Configuration
|
||||
RABBITMQ_USER=admin
|
||||
RABBITMQ_PASS=admin_password_change_this
|
||||
|
||||
# JWT Configuration
|
||||
JWT_SECRET=change_this_very_secret_key_in_production
|
||||
|
||||
# Admin Configuration
|
||||
ADMIN_SECRET=change_this_admin_secret
|
||||
ADMIN_API_KEY=admin-key-change-in-production
|
||||
|
||||
# Flask Environment
|
||||
FLASK_ENV=production
|
||||
|
||||
# Rate Limiting (optional overrides)
|
||||
# DEFAULT_RATE_LIMIT_PER_MINUTE=60
|
||||
# DEFAULT_RATE_LIMIT_PER_HOUR=1000
|
||||
# DEFAULT_RATE_LIMIT_PER_DAY=10000
|
||||
|
||||
# Service URLs (for external access)
|
||||
# AUTH_SERVICE_URL=http://localhost:5001
|
||||
# LICENSE_API_URL=http://localhost:5002
|
||||
# ANALYTICS_SERVICE_URL=http://localhost:5003
|
||||
# ADMIN_API_URL=http://localhost:5004
|
||||
561
lizenzserver/API_DOCUMENTATION.md
Normale Datei
561
lizenzserver/API_DOCUMENTATION.md
Normale Datei
@ -0,0 +1,561 @@
|
||||
# License Server API Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The License Server provides a comprehensive API for managing software licenses, validating license keys, and tracking usage. The system consists of four main services:
|
||||
|
||||
1. **Auth Service** - JWT token management and API authentication
|
||||
2. **License API** - License validation and activation
|
||||
3. **Admin API** - License management and administration
|
||||
4. **Analytics Service** - Usage analytics and anomaly detection
|
||||
|
||||
## Base URLs
|
||||
|
||||
- Auth Service: `http://localhost:5001`
|
||||
- License API: `http://localhost:5002`
|
||||
- Analytics Service: `http://localhost:5003`
|
||||
- Admin API: `http://localhost:5004`
|
||||
|
||||
## Authentication
|
||||
|
||||
### API Key Authentication
|
||||
|
||||
Most endpoints require an API key in the `X-API-Key` header:
|
||||
|
||||
```
|
||||
X-API-Key: sk_your_api_key_here
|
||||
```
|
||||
|
||||
### JWT Authentication
|
||||
|
||||
Some endpoints use JWT bearer tokens:
|
||||
|
||||
```
|
||||
Authorization: Bearer your_jwt_token_here
|
||||
```
|
||||
|
||||
## Auth Service Endpoints
|
||||
|
||||
### Create Access Token
|
||||
|
||||
Create JWT access token for license validation.
|
||||
|
||||
**POST** `/api/v1/auth/token`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"license_id": "string",
|
||||
"hardware_id": "string"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"access_token": "string",
|
||||
"refresh_token": "string",
|
||||
"token_type": "Bearer",
|
||||
"expires_in": 3600
|
||||
}
|
||||
```
|
||||
|
||||
### Refresh Token
|
||||
|
||||
Refresh an expired access token.
|
||||
|
||||
**POST** `/api/v1/auth/refresh`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"refresh_token": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Verify Token
|
||||
|
||||
Verify token validity.
|
||||
|
||||
**POST** `/api/v1/auth/verify`
|
||||
|
||||
**Headers:**
|
||||
- `Authorization: Bearer <token>`
|
||||
|
||||
### Create API Key (Admin)
|
||||
|
||||
Create new API key for client applications.
|
||||
|
||||
**POST** `/api/v1/auth/api-key`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-Secret: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"client_name": "string",
|
||||
"allowed_endpoints": ["array", "of", "endpoints"]
|
||||
}
|
||||
```
|
||||
|
||||
## License API Endpoints
|
||||
|
||||
### Validate License
|
||||
|
||||
Validate a license key with hardware ID.
|
||||
|
||||
**POST** `/api/v1/license/validate`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"license_key": "string",
|
||||
"hardware_id": "string",
|
||||
"app_version": "string (optional)"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"valid": true,
|
||||
"license_id": "string",
|
||||
"expires_at": "2024-12-31T23:59:59Z",
|
||||
"features": ["feature1", "feature2"],
|
||||
"limits": {
|
||||
"max_devices": 5,
|
||||
"current_devices": 2
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Activate License
|
||||
|
||||
Activate license on a new device.
|
||||
|
||||
**POST** `/api/v1/license/activate`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"license_key": "string",
|
||||
"hardware_id": "string",
|
||||
"device_name": "string (optional)",
|
||||
"os_info": {
|
||||
"name": "Windows",
|
||||
"version": "10"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Heartbeat
|
||||
|
||||
Record license heartbeat (requires JWT).
|
||||
|
||||
**POST** `/api/v1/license/heartbeat`
|
||||
|
||||
**Headers:**
|
||||
- `Authorization: Bearer <token>`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"session_data": {
|
||||
"custom": "data"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Create Offline Token
|
||||
|
||||
Generate offline validation token.
|
||||
|
||||
**POST** `/api/v1/license/offline-token`
|
||||
|
||||
**Headers:**
|
||||
- `Authorization: Bearer <token>`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"duration_hours": 24
|
||||
}
|
||||
```
|
||||
|
||||
### Validate Offline Token
|
||||
|
||||
Validate an offline token.
|
||||
|
||||
**POST** `/api/v1/license/validate-offline`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"token": "string"
|
||||
}
|
||||
```
|
||||
|
||||
## Admin API Endpoints
|
||||
|
||||
### Create License
|
||||
|
||||
Create a new license.
|
||||
|
||||
**POST** `/api/v1/admin/licenses`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"customer_id": "string",
|
||||
"max_devices": 5,
|
||||
"expires_in_days": 365,
|
||||
"features": ["feature1", "feature2"],
|
||||
"is_test": false,
|
||||
"metadata": {
|
||||
"custom": "data"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Get License
|
||||
|
||||
Get license details with statistics.
|
||||
|
||||
**GET** `/api/v1/admin/licenses/{license_id}`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
### Update License
|
||||
|
||||
Update license properties.
|
||||
|
||||
**PATCH** `/api/v1/admin/licenses/{license_id}`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"max_devices": 10,
|
||||
"is_active": true,
|
||||
"expires_at": "2025-12-31T23:59:59Z",
|
||||
"features": ["new_feature"],
|
||||
"metadata": {}
|
||||
}
|
||||
```
|
||||
|
||||
### Delete License
|
||||
|
||||
Soft delete (deactivate) a license.
|
||||
|
||||
**DELETE** `/api/v1/admin/licenses/{license_id}`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
### List Licenses
|
||||
|
||||
Search and list licenses with filters.
|
||||
|
||||
**GET** `/api/v1/admin/licenses`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Query Parameters:**
|
||||
- `customer_id`: Filter by customer
|
||||
- `is_active`: Filter by active status
|
||||
- `is_test`: Filter test licenses
|
||||
- `created_after`: Filter by creation date
|
||||
- `created_before`: Filter by creation date
|
||||
- `expires_after`: Filter by expiration
|
||||
- `expires_before`: Filter by expiration
|
||||
- `page`: Page number (default: 1)
|
||||
- `per_page`: Items per page (default: 50, max: 100)
|
||||
|
||||
### Get License Devices
|
||||
|
||||
Get all devices for a license.
|
||||
|
||||
**GET** `/api/v1/admin/licenses/{license_id}/devices`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
### Deactivate Device
|
||||
|
||||
Deactivate a specific device.
|
||||
|
||||
**POST** `/api/v1/admin/licenses/{license_id}/devices/deactivate`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"hardware_id": "string",
|
||||
"reason": "string (optional)"
|
||||
}
|
||||
```
|
||||
|
||||
### Transfer License
|
||||
|
||||
Transfer license between devices.
|
||||
|
||||
**POST** `/api/v1/admin/licenses/{license_id}/transfer`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"from_hardware_id": "string",
|
||||
"to_hardware_id": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Get License Events
|
||||
|
||||
Get activation events for a license.
|
||||
|
||||
**GET** `/api/v1/admin/licenses/{license_id}/events`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Query Parameters:**
|
||||
- `hours`: Hours to look back (default: 24)
|
||||
|
||||
### Get License Usage
|
||||
|
||||
Get usage statistics for a license.
|
||||
|
||||
**GET** `/api/v1/admin/licenses/{license_id}/usage`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Query Parameters:**
|
||||
- `days`: Days to analyze (default: 30)
|
||||
|
||||
### Bulk Create Licenses
|
||||
|
||||
Create multiple licenses at once.
|
||||
|
||||
**POST** `/api/v1/admin/licenses/bulk-create`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
**Request Body:**
|
||||
```json
|
||||
{
|
||||
"licenses": [
|
||||
{
|
||||
"customer_id": "string",
|
||||
"max_devices": 5,
|
||||
"expires_in_days": 365
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Get Statistics
|
||||
|
||||
Get overall license statistics.
|
||||
|
||||
**GET** `/api/v1/admin/statistics`
|
||||
|
||||
**Headers:**
|
||||
- `X-Admin-API-Key: required`
|
||||
|
||||
## Analytics Service Endpoints
|
||||
|
||||
### Analyze Patterns
|
||||
|
||||
Analyze usage patterns for a license.
|
||||
|
||||
**GET** `/api/v1/analytics/licenses/{license_id}/patterns`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
**Query Parameters:**
|
||||
- `days`: Days to analyze (default: 30)
|
||||
|
||||
### Detect Anomalies
|
||||
|
||||
Manually trigger anomaly detection.
|
||||
|
||||
**POST** `/api/v1/analytics/licenses/{license_id}/anomalies/detect`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
### Get Risk Score
|
||||
|
||||
Calculate risk score for a license.
|
||||
|
||||
**GET** `/api/v1/analytics/licenses/{license_id}/risk-score`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
### Generate Usage Report
|
||||
|
||||
Generate usage report for all licenses.
|
||||
|
||||
**GET** `/api/v1/analytics/reports/usage`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
**Query Parameters:**
|
||||
- `days`: Days to include (default: 30)
|
||||
|
||||
### Get Dashboard Data
|
||||
|
||||
Get analytics dashboard data.
|
||||
|
||||
**GET** `/api/v1/analytics/dashboard`
|
||||
|
||||
**Headers:**
|
||||
- `X-API-Key: required`
|
||||
|
||||
## Error Responses
|
||||
|
||||
All endpoints use standard HTTP status codes and return errors in this format:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": "Error message",
|
||||
"error_code": "ERROR_CODE",
|
||||
"details": {}
|
||||
}
|
||||
```
|
||||
|
||||
### Common Error Codes
|
||||
|
||||
- `LICENSE_NOT_FOUND` - License key not found
|
||||
- `LICENSE_INACTIVE` - License is deactivated
|
||||
- `LICENSE_EXPIRED` - License has expired
|
||||
- `DEVICE_LIMIT_EXCEEDED` - Device limit reached
|
||||
- `ALREADY_ACTIVATED` - Already activated on device
|
||||
- `INVALID_TOKEN` - Invalid JWT token
|
||||
- `RATE_LIMIT_EXCEEDED` - Rate limit exceeded
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
API requests are rate limited based on API key configuration:
|
||||
|
||||
- Default: 60 requests per minute, 1000 per hour
|
||||
- Rate limit headers are included in responses:
|
||||
- `X-RateLimit-Limit`: Requests per minute
|
||||
- `X-RateLimit-Remaining`: Remaining requests
|
||||
- `Retry-After`: Seconds until retry (on 429 errors)
|
||||
|
||||
## Webhooks
|
||||
|
||||
The system publishes events to RabbitMQ for real-time processing:
|
||||
|
||||
- `license.validated` - License validation successful
|
||||
- `license.validation.failed` - License validation failed
|
||||
- `license.activated` - New device activated
|
||||
- `license.deactivated` - License deactivated
|
||||
- `license.transferred` - License transferred
|
||||
- `anomaly.detected` - Anomaly detected
|
||||
- `device.deactivated` - Device deactivated
|
||||
|
||||
## SDK Examples
|
||||
|
||||
### Python
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
# Initialize client
|
||||
api_key = "sk_your_api_key"
|
||||
base_url = "http://localhost:5002"
|
||||
|
||||
# Validate license
|
||||
response = requests.post(
|
||||
f"{base_url}/api/v1/license/validate",
|
||||
headers={"X-API-Key": api_key},
|
||||
json={
|
||||
"license_key": "LIC-XXXXXXXXXXXX",
|
||||
"hardware_id": "device-123"
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if data["valid"]:
|
||||
print("License is valid!")
|
||||
```
|
||||
|
||||
### JavaScript
|
||||
|
||||
```javascript
|
||||
const apiKey = 'sk_your_api_key';
|
||||
const baseUrl = 'http://localhost:5002';
|
||||
|
||||
// Validate license
|
||||
fetch(`${baseUrl}/api/v1/license/validate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'X-API-Key': apiKey,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
license_key: 'LIC-XXXXXXXXXXXX',
|
||||
hardware_id: 'device-123'
|
||||
})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.valid) {
|
||||
console.log('License is valid!');
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Caching**: Validation results are cached for 5 minutes. Use heartbeats for real-time tracking.
|
||||
|
||||
2. **Offline Support**: Generate offline tokens for temporary offline validation.
|
||||
|
||||
3. **Security**:
|
||||
- Always use HTTPS in production
|
||||
- Rotate API keys regularly
|
||||
- Monitor for anomalies
|
||||
|
||||
4. **Rate Limiting**: Implement exponential backoff on 429 errors.
|
||||
|
||||
5. **Error Handling**: Always check error codes and handle appropriately.
|
||||
|
||||
## Migration from v1
|
||||
|
||||
If migrating from a previous version:
|
||||
|
||||
1. Update API endpoints to v1 paths
|
||||
2. Add API key authentication
|
||||
3. Update response parsing for new format
|
||||
4. Implement heartbeat for session tracking
|
||||
31
lizenzserver/Dockerfile.admin
Normale Datei
31
lizenzserver/Dockerfile.admin
Normale Datei
@ -0,0 +1,31 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
postgresql-client \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . /app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5004
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:5004/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "services/admin_api/app.py"]
|
||||
31
lizenzserver/Dockerfile.analytics
Normale Datei
31
lizenzserver/Dockerfile.analytics
Normale Datei
@ -0,0 +1,31 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
postgresql-client \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . /app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5003
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:5003/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "services/analytics/app.py"]
|
||||
31
lizenzserver/Dockerfile.auth
Normale Datei
31
lizenzserver/Dockerfile.auth
Normale Datei
@ -0,0 +1,31 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
postgresql-client \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . /app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5001
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:5001/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "services/auth/app.py"]
|
||||
31
lizenzserver/Dockerfile.license
Normale Datei
31
lizenzserver/Dockerfile.license
Normale Datei
@ -0,0 +1,31 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
postgresql-client \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . /app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5002
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:5002/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "services/license_api/app.py"]
|
||||
86
lizenzserver/Makefile
Normale Datei
86
lizenzserver/Makefile
Normale Datei
@ -0,0 +1,86 @@
|
||||
.PHONY: help build up down restart logs ps clean test
|
||||
|
||||
# Default target
|
||||
help:
|
||||
@echo "License Server Management Commands:"
|
||||
@echo " make build - Build all Docker images"
|
||||
@echo " make up - Start all services"
|
||||
@echo " make down - Stop all services"
|
||||
@echo " make restart - Restart all services"
|
||||
@echo " make logs - View logs from all services"
|
||||
@echo " make ps - List running containers"
|
||||
@echo " make clean - Remove containers and volumes"
|
||||
@echo " make test - Run tests"
|
||||
@echo " make init-db - Initialize database schema"
|
||||
|
||||
# Build all Docker images
|
||||
build:
|
||||
docker-compose build
|
||||
|
||||
# Start all services
|
||||
up:
|
||||
docker-compose up -d
|
||||
@echo "Waiting for services to be healthy..."
|
||||
@sleep 10
|
||||
@echo "Services are running!"
|
||||
@echo "Auth Service: http://localhost:5001"
|
||||
@echo "License API: http://localhost:5002"
|
||||
@echo "Analytics: http://localhost:5003"
|
||||
@echo "Admin API: http://localhost:5004"
|
||||
@echo "RabbitMQ Management: http://localhost:15672"
|
||||
|
||||
# Stop all services
|
||||
down:
|
||||
docker-compose down
|
||||
|
||||
# Restart all services
|
||||
restart: down up
|
||||
|
||||
# View logs
|
||||
logs:
|
||||
docker-compose logs -f
|
||||
|
||||
# List containers
|
||||
ps:
|
||||
docker-compose ps
|
||||
|
||||
# Clean up everything
|
||||
clean:
|
||||
docker-compose down -v
|
||||
docker system prune -f
|
||||
|
||||
# Run tests
|
||||
test:
|
||||
@echo "Running API tests..."
|
||||
@python tests/test_api.py
|
||||
|
||||
# Initialize database
|
||||
init-db:
|
||||
@echo "Initializing database schema..."
|
||||
docker-compose exec postgres psql -U license_admin -d licenses -f /docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
# Service-specific commands
|
||||
logs-auth:
|
||||
docker-compose logs -f auth_service
|
||||
|
||||
logs-license:
|
||||
docker-compose logs -f license_api
|
||||
|
||||
logs-analytics:
|
||||
docker-compose logs -f analytics_service
|
||||
|
||||
logs-admin:
|
||||
docker-compose logs -f admin_api
|
||||
|
||||
# Development commands
|
||||
dev:
|
||||
docker-compose -f docker-compose.yml -f docker-compose.dev.yml up
|
||||
|
||||
shell-auth:
|
||||
docker-compose exec auth_service /bin/bash
|
||||
|
||||
shell-license:
|
||||
docker-compose exec license_api /bin/bash
|
||||
|
||||
shell-db:
|
||||
docker-compose exec postgres psql -U license_admin -d licenses
|
||||
244
lizenzserver/README.md
Normale Datei
244
lizenzserver/README.md
Normale Datei
@ -0,0 +1,244 @@
|
||||
# License Server
|
||||
|
||||
A comprehensive microservices-based license management system for software licensing, validation, and analytics.
|
||||
|
||||
## Features
|
||||
|
||||
- **License Management**: Create, update, and manage software licenses
|
||||
- **Hardware-based Validation**: Bind licenses to specific devices
|
||||
- **Offline Support**: Generate offline validation tokens
|
||||
- **Analytics**: Track usage patterns and detect anomalies
|
||||
- **Rate Limiting**: Protect APIs with configurable rate limits
|
||||
- **Event-driven Architecture**: Real-time event processing with RabbitMQ
|
||||
- **Caching**: Redis-based caching for improved performance
|
||||
- **Security**: JWT authentication, API key management, and audit logging
|
||||
|
||||
## Architecture
|
||||
|
||||
The system consists of four microservices:
|
||||
|
||||
1. **Auth Service** (Port 5001): JWT token management and API authentication
|
||||
2. **License API** (Port 5002): License validation and activation
|
||||
3. **Analytics Service** (Port 5003): Usage analytics and anomaly detection
|
||||
4. **Admin API** (Port 5004): License administration and management
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
- Make (optional, for using Makefile commands)
|
||||
- Python 3.11+ (for local development)
|
||||
|
||||
### Installation
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd lizenzserver
|
||||
```
|
||||
|
||||
2. Copy environment variables:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your configuration
|
||||
```
|
||||
|
||||
3. Build and start services:
|
||||
```bash
|
||||
make build
|
||||
make up
|
||||
```
|
||||
|
||||
Or without Make:
|
||||
```bash
|
||||
docker-compose build
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
4. Initialize the database:
|
||||
```bash
|
||||
make init-db
|
||||
```
|
||||
|
||||
### Verify Installation
|
||||
|
||||
Check service health:
|
||||
```bash
|
||||
curl http://localhost:5001/health
|
||||
curl http://localhost:5002/health
|
||||
curl http://localhost:5003/health
|
||||
curl http://localhost:5004/health
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Creating a License
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:5004/api/v1/admin/licenses \
|
||||
-H "X-Admin-API-Key: your-admin-key" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"customer_id": "cust-123",
|
||||
"max_devices": 5,
|
||||
"expires_in_days": 365,
|
||||
"features": ["premium", "support"]
|
||||
}'
|
||||
```
|
||||
|
||||
### Validating a License
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:5002/api/v1/license/validate \
|
||||
-H "X-API-Key: your-api-key" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"license_key": "LIC-XXXXXXXXXXXX",
|
||||
"hardware_id": "device-123"
|
||||
}'
|
||||
```
|
||||
|
||||
## API Documentation
|
||||
|
||||
Detailed API documentation is available in [API_DOCUMENTATION.md](API_DOCUMENTATION.md).
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Key configuration options in `.env`:
|
||||
|
||||
- `DB_PASSWORD`: PostgreSQL password
|
||||
- `REDIS_PASSWORD`: Redis password
|
||||
- `JWT_SECRET`: Secret key for JWT tokens
|
||||
- `ADMIN_API_KEY`: Admin API authentication key
|
||||
- `FLASK_ENV`: Flask environment (development/production)
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
Default rate limits:
|
||||
- 60 requests per minute
|
||||
- 1000 requests per hour
|
||||
- 10000 requests per day
|
||||
|
||||
Configure per API key in the database.
|
||||
|
||||
## Development
|
||||
|
||||
### Running Locally
|
||||
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
2. Set environment variables:
|
||||
```bash
|
||||
export DATABASE_URL=postgresql://user:pass@localhost:5432/licenses
|
||||
export REDIS_URL=redis://localhost:6379
|
||||
export RABBITMQ_URL=amqp://guest:guest@localhost:5672
|
||||
```
|
||||
|
||||
3. Run a service:
|
||||
```bash
|
||||
python services/license_api/app.py
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Run tests:
|
||||
```bash
|
||||
make test
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
|
||||
The database schema is in `init.sql`. Apply migrations:
|
||||
```bash
|
||||
docker-compose exec postgres psql -U license_admin -d licenses -f /path/to/migration.sql
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Logs
|
||||
|
||||
View logs for all services:
|
||||
```bash
|
||||
make logs
|
||||
```
|
||||
|
||||
View logs for specific service:
|
||||
```bash
|
||||
make logs-auth
|
||||
make logs-license
|
||||
make logs-analytics
|
||||
make logs-admin
|
||||
```
|
||||
|
||||
### Metrics
|
||||
|
||||
Services expose Prometheus metrics at `/metrics` endpoint.
|
||||
|
||||
### RabbitMQ Management
|
||||
|
||||
Access RabbitMQ management UI at http://localhost:15672
|
||||
- Username: admin (or configured value)
|
||||
- Password: admin_password (or configured value)
|
||||
|
||||
## Security
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. **Change default passwords** in production
|
||||
2. **Use HTTPS** in production (configure in nginx.conf)
|
||||
3. **Rotate API keys** regularly
|
||||
4. **Monitor anomalies** through the analytics service
|
||||
5. **Set up IP whitelisting** for admin endpoints
|
||||
6. **Enable audit logging** for compliance
|
||||
|
||||
### API Key Management
|
||||
|
||||
Create API keys through the Auth Service:
|
||||
```bash
|
||||
curl -X POST http://localhost:5001/api/v1/auth/api-key \
|
||||
-H "X-Admin-Secret: your-admin-secret" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"client_name": "My Application",
|
||||
"allowed_endpoints": ["license.validate", "license.activate"]
|
||||
}'
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Services not starting**: Check logs with `docker-compose logs <service-name>`
|
||||
2. **Database connection errors**: Ensure PostgreSQL is healthy and credentials are correct
|
||||
3. **Rate limit errors**: Check rate limit configuration and API key limits
|
||||
4. **Cache misses**: Verify Redis connection and TTL settings
|
||||
|
||||
### Health Checks
|
||||
|
||||
All services provide health endpoints:
|
||||
- Auth: http://localhost:5001/health
|
||||
- License: http://localhost:5002/health
|
||||
- Analytics: http://localhost:5003/health
|
||||
- Admin: http://localhost:5004/health
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Commit your changes
|
||||
4. Push to the branch
|
||||
5. Create a Pull Request
|
||||
|
||||
## License
|
||||
|
||||
[Your License Here]
|
||||
|
||||
## Support
|
||||
|
||||
For support, please contact [support@example.com] or create an issue in the repository.
|
||||
89
lizenzserver/config.py
Normale Datei
89
lizenzserver/config.py
Normale Datei
@ -0,0 +1,89 @@
|
||||
import os
|
||||
from datetime import timedelta
|
||||
|
||||
class Config:
|
||||
"""Base configuration with sensible defaults"""
|
||||
|
||||
# Database
|
||||
DATABASE_URL = os.getenv('DATABASE_URL', 'postgresql://admin:adminpass@localhost:5432/v2')
|
||||
|
||||
# Redis
|
||||
REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379')
|
||||
|
||||
# RabbitMQ
|
||||
RABBITMQ_URL = os.getenv('RABBITMQ_URL', 'amqp://guest:guest@localhost:5672')
|
||||
|
||||
# JWT
|
||||
JWT_SECRET = os.getenv('JWT_SECRET', 'change-this-in-production')
|
||||
JWT_ALGORITHM = 'HS256'
|
||||
JWT_ACCESS_TOKEN_EXPIRES = timedelta(hours=1)
|
||||
JWT_REFRESH_TOKEN_EXPIRES = timedelta(days=30)
|
||||
|
||||
# API Rate Limiting
|
||||
DEFAULT_RATE_LIMIT_PER_MINUTE = 60
|
||||
DEFAULT_RATE_LIMIT_PER_HOUR = 1000
|
||||
DEFAULT_RATE_LIMIT_PER_DAY = 10000
|
||||
|
||||
# Offline tokens
|
||||
MAX_OFFLINE_TOKEN_DURATION_HOURS = 72
|
||||
DEFAULT_OFFLINE_TOKEN_DURATION_HOURS = 24
|
||||
|
||||
# Heartbeat settings
|
||||
HEARTBEAT_INTERVAL_SECONDS = 300 # 5 minutes
|
||||
HEARTBEAT_TIMEOUT_SECONDS = 900 # 15 minutes
|
||||
|
||||
# Session settings
|
||||
MAX_CONCURRENT_SESSIONS = 1
|
||||
SESSION_TIMEOUT_MINUTES = 30
|
||||
|
||||
# Cache TTL
|
||||
CACHE_TTL_VALIDATION = 300 # 5 minutes
|
||||
CACHE_TTL_LICENSE_STATUS = 60 # 1 minute
|
||||
CACHE_TTL_DEVICE_LIST = 300 # 5 minutes
|
||||
|
||||
# Anomaly detection thresholds
|
||||
ANOMALY_RAPID_HARDWARE_CHANGE_MINUTES = 10
|
||||
ANOMALY_MULTIPLE_IPS_THRESHOLD = 5
|
||||
ANOMALY_GEO_DISTANCE_KM = 1000
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL = os.getenv('LOG_LEVEL', 'INFO')
|
||||
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
# Service ports
|
||||
AUTH_SERVICE_PORT = int(os.getenv('PORT', 5001))
|
||||
LICENSE_API_PORT = int(os.getenv('PORT', 5002))
|
||||
ANALYTICS_SERVICE_PORT = int(os.getenv('PORT', 5003))
|
||||
ADMIN_API_PORT = int(os.getenv('PORT', 5004))
|
||||
|
||||
class DevelopmentConfig(Config):
|
||||
"""Development configuration"""
|
||||
DEBUG = True
|
||||
TESTING = False
|
||||
|
||||
class ProductionConfig(Config):
|
||||
"""Production configuration"""
|
||||
DEBUG = False
|
||||
TESTING = False
|
||||
|
||||
# Override with production values
|
||||
JWT_SECRET = os.environ['JWT_SECRET'] # Required in production
|
||||
|
||||
class TestingConfig(Config):
|
||||
"""Testing configuration"""
|
||||
DEBUG = True
|
||||
TESTING = True
|
||||
DATABASE_URL = 'postgresql://admin:adminpass@localhost:5432/v2_test'
|
||||
|
||||
# Configuration dictionary
|
||||
config = {
|
||||
'development': DevelopmentConfig,
|
||||
'production': ProductionConfig,
|
||||
'testing': TestingConfig,
|
||||
'default': DevelopmentConfig
|
||||
}
|
||||
|
||||
def get_config():
|
||||
"""Get configuration based on environment"""
|
||||
env = os.getenv('FLASK_ENV', 'development')
|
||||
return config.get(env, config['default'])
|
||||
123
lizenzserver/docker-compose.yaml
Normale Datei
123
lizenzserver/docker-compose.yaml
Normale Datei
@ -0,0 +1,123 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
license-auth:
|
||||
build: ./services/auth
|
||||
container_name: license-auth
|
||||
environment:
|
||||
- JWT_SECRET=${JWT_SECRET:-your-secret-key-change-in-production}
|
||||
- DATABASE_URL=postgresql://admin:adminpass@postgres:5432/v2
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- PORT=5001
|
||||
ports:
|
||||
- "5001:5001"
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
license-api:
|
||||
build: ./services/license_api
|
||||
container_name: license-api
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://admin:adminpass@postgres:5432/v2
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- RABBITMQ_URL=amqp://guest:guest@rabbitmq:5672
|
||||
- JWT_SECRET=${JWT_SECRET:-your-secret-key-change-in-production}
|
||||
- PORT=5002
|
||||
ports:
|
||||
- "5002:5002"
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
license-analytics:
|
||||
build: ./services/analytics
|
||||
container_name: license-analytics
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://admin:adminpass@postgres:5432/v2
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- RABBITMQ_URL=amqp://guest:guest@rabbitmq:5672
|
||||
- PORT=5003
|
||||
ports:
|
||||
- "5003:5003"
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
license-admin-api:
|
||||
build: ./services/admin_api
|
||||
container_name: license-admin-api
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://admin:adminpass@postgres:5432/v2
|
||||
- REDIS_URL=redis://redis:6379
|
||||
- RABBITMQ_URL=amqp://guest:guest@rabbitmq:5672
|
||||
- JWT_SECRET=${JWT_SECRET:-your-secret-key-change-in-production}
|
||||
- PORT=5004
|
||||
ports:
|
||||
- "5004:5004"
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
- rabbitmq
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
container_name: license-postgres
|
||||
environment:
|
||||
- POSTGRES_DB=v2
|
||||
- POSTGRES_USER=admin
|
||||
- POSTGRES_PASSWORD=adminpass
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: license-redis
|
||||
command: redis-server --appendonly yes
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management-alpine
|
||||
container_name: license-rabbitmq
|
||||
environment:
|
||||
- RABBITMQ_DEFAULT_USER=guest
|
||||
- RABBITMQ_DEFAULT_PASS=guest
|
||||
ports:
|
||||
- "5672:5672"
|
||||
- "15672:15672"
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
networks:
|
||||
- v2_network
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
rabbitmq_data:
|
||||
|
||||
networks:
|
||||
v2_network:
|
||||
external: true
|
||||
191
lizenzserver/docker-compose.yml
Normale Datei
191
lizenzserver/docker-compose.yml
Normale Datei
@ -0,0 +1,191 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# PostgreSQL Database
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
container_name: license_postgres
|
||||
environment:
|
||||
POSTGRES_DB: licenses
|
||||
POSTGRES_USER: license_admin
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-secure_password}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
ports:
|
||||
- "5432:5432"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U license_admin -d licenses"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# Redis Cache
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: license_redis
|
||||
command: redis-server --requirepass ${REDIS_PASSWORD:-redis_password}
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# RabbitMQ Message Broker
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management-alpine
|
||||
container_name: license_rabbitmq
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: ${RABBITMQ_USER:-admin}
|
||||
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASS:-admin_password}
|
||||
ports:
|
||||
- "5672:5672"
|
||||
- "15672:15672" # Management UI
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# Auth Service
|
||||
auth_service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.auth
|
||||
container_name: license_auth
|
||||
environment:
|
||||
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
|
||||
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
|
||||
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
|
||||
JWT_SECRET: ${JWT_SECRET:-change_this_in_production}
|
||||
ADMIN_SECRET: ${ADMIN_SECRET:-change_this_admin_secret}
|
||||
FLASK_ENV: ${FLASK_ENV:-production}
|
||||
PORT: 5001
|
||||
ports:
|
||||
- "5001:5001"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5001/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# License API Service
|
||||
license_api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.license
|
||||
container_name: license_api
|
||||
environment:
|
||||
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
|
||||
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
|
||||
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
|
||||
JWT_SECRET: ${JWT_SECRET:-change_this_in_production}
|
||||
FLASK_ENV: ${FLASK_ENV:-production}
|
||||
PORT: 5002
|
||||
ports:
|
||||
- "5002:5002"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5002/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Analytics Service
|
||||
analytics_service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.analytics
|
||||
container_name: license_analytics
|
||||
environment:
|
||||
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
|
||||
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
|
||||
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
|
||||
FLASK_ENV: ${FLASK_ENV:-production}
|
||||
PORT: 5003
|
||||
ports:
|
||||
- "5003:5003"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5003/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Admin API Service
|
||||
admin_api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.admin
|
||||
container_name: license_admin_api
|
||||
environment:
|
||||
DATABASE_URL: postgresql://license_admin:${DB_PASSWORD:-secure_password}@postgres:5432/licenses
|
||||
REDIS_URL: redis://:${REDIS_PASSWORD:-redis_password}@redis:6379
|
||||
RABBITMQ_URL: amqp://${RABBITMQ_USER:-admin}:${RABBITMQ_PASS:-admin_password}@rabbitmq:5672
|
||||
ADMIN_API_KEY: ${ADMIN_API_KEY:-admin-key-change-in-production}
|
||||
FLASK_ENV: ${FLASK_ENV:-production}
|
||||
PORT: 5004
|
||||
ports:
|
||||
- "5004:5004"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5004/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Nginx Reverse Proxy
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: license_nginx
|
||||
volumes:
|
||||
- ./nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
depends_on:
|
||||
- auth_service
|
||||
- license_api
|
||||
- analytics_service
|
||||
- admin_api
|
||||
healthcheck:
|
||||
test: ["CMD", "nginx", "-t"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
rabbitmq_data:
|
||||
1
lizenzserver/events/__init__.py
Normale Datei
1
lizenzserver/events/__init__.py
Normale Datei
@ -0,0 +1 @@
|
||||
# Events Module
|
||||
191
lizenzserver/events/event_bus.py
Normale Datei
191
lizenzserver/events/event_bus.py
Normale Datei
@ -0,0 +1,191 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Callable, List
|
||||
from datetime import datetime
|
||||
import pika
|
||||
from pika.exceptions import AMQPConnectionError
|
||||
import threading
|
||||
from collections import defaultdict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Event:
|
||||
"""Base event class"""
|
||||
def __init__(self, event_type: str, data: Dict[str, Any], source: str = "unknown"):
|
||||
self.id = self._generate_id()
|
||||
self.type = event_type
|
||||
self.data = data
|
||||
self.source = source
|
||||
self.timestamp = datetime.utcnow().isoformat()
|
||||
|
||||
def _generate_id(self) -> str:
|
||||
import uuid
|
||||
return str(uuid.uuid4())
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": self.type,
|
||||
"data": self.data,
|
||||
"source": self.source,
|
||||
"timestamp": self.timestamp
|
||||
}
|
||||
|
||||
def to_json(self) -> str:
|
||||
return json.dumps(self.to_dict())
|
||||
|
||||
class EventBus:
|
||||
"""Event bus for pub/sub pattern with RabbitMQ backend"""
|
||||
|
||||
def __init__(self, rabbitmq_url: str):
|
||||
self.rabbitmq_url = rabbitmq_url
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
self.exchange_name = "license_events"
|
||||
self.local_handlers: Dict[str, List[Callable]] = defaultdict(list)
|
||||
self._connect()
|
||||
|
||||
def _connect(self):
|
||||
"""Establish connection to RabbitMQ"""
|
||||
try:
|
||||
parameters = pika.URLParameters(self.rabbitmq_url)
|
||||
self.connection = pika.BlockingConnection(parameters)
|
||||
self.channel = self.connection.channel()
|
||||
|
||||
# Declare exchange
|
||||
self.channel.exchange_declare(
|
||||
exchange=self.exchange_name,
|
||||
exchange_type='topic',
|
||||
durable=True
|
||||
)
|
||||
|
||||
logger.info("Connected to RabbitMQ")
|
||||
except AMQPConnectionError as e:
|
||||
logger.error(f"Failed to connect to RabbitMQ: {e}")
|
||||
# Fallback to local-only event handling
|
||||
self.connection = None
|
||||
self.channel = None
|
||||
|
||||
def publish(self, event: Event):
|
||||
"""Publish an event"""
|
||||
try:
|
||||
# Publish to RabbitMQ if connected
|
||||
if self.channel and not self.channel.is_closed:
|
||||
self.channel.basic_publish(
|
||||
exchange=self.exchange_name,
|
||||
routing_key=event.type,
|
||||
body=event.to_json(),
|
||||
properties=pika.BasicProperties(
|
||||
delivery_mode=2, # Make message persistent
|
||||
content_type='application/json'
|
||||
)
|
||||
)
|
||||
logger.debug(f"Published event: {event.type}")
|
||||
|
||||
# Also handle local subscribers
|
||||
self._handle_local_event(event)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error publishing event: {e}")
|
||||
# Ensure local handlers still get called
|
||||
self._handle_local_event(event)
|
||||
|
||||
def subscribe(self, event_type: str, handler: Callable):
|
||||
"""Subscribe to an event type locally"""
|
||||
self.local_handlers[event_type].append(handler)
|
||||
logger.debug(f"Subscribed to {event_type}")
|
||||
|
||||
def subscribe_queue(self, event_types: List[str], queue_name: str, handler: Callable):
|
||||
"""Subscribe to events via RabbitMQ queue"""
|
||||
if not self.channel:
|
||||
logger.warning("RabbitMQ not connected, falling back to local subscription")
|
||||
for event_type in event_types:
|
||||
self.subscribe(event_type, handler)
|
||||
return
|
||||
|
||||
try:
|
||||
# Declare queue
|
||||
self.channel.queue_declare(queue=queue_name, durable=True)
|
||||
|
||||
# Bind queue to exchange for each event type
|
||||
for event_type in event_types:
|
||||
self.channel.queue_bind(
|
||||
exchange=self.exchange_name,
|
||||
queue=queue_name,
|
||||
routing_key=event_type
|
||||
)
|
||||
|
||||
# Set up consumer
|
||||
def callback(ch, method, properties, body):
|
||||
try:
|
||||
event_data = json.loads(body)
|
||||
event = Event(
|
||||
event_type=event_data['type'],
|
||||
data=event_data['data'],
|
||||
source=event_data['source']
|
||||
)
|
||||
handler(event)
|
||||
ch.basic_ack(delivery_tag=method.delivery_tag)
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling event: {e}")
|
||||
ch.basic_nack(delivery_tag=method.delivery_tag, requeue=True)
|
||||
|
||||
self.channel.basic_consume(queue=queue_name, on_message_callback=callback)
|
||||
|
||||
# Start consuming in a separate thread
|
||||
consumer_thread = threading.Thread(target=self.channel.start_consuming)
|
||||
consumer_thread.daemon = True
|
||||
consumer_thread.start()
|
||||
|
||||
logger.info(f"Started consuming from queue: {queue_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up queue subscription: {e}")
|
||||
|
||||
def _handle_local_event(self, event: Event):
|
||||
"""Handle event with local subscribers"""
|
||||
handlers = self.local_handlers.get(event.type, [])
|
||||
for handler in handlers:
|
||||
try:
|
||||
handler(event)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in event handler: {e}")
|
||||
|
||||
def close(self):
|
||||
"""Close RabbitMQ connection"""
|
||||
if self.connection and not self.connection.is_closed:
|
||||
self.connection.close()
|
||||
logger.info("Closed RabbitMQ connection")
|
||||
|
||||
# Event types
|
||||
class EventTypes:
|
||||
"""Centralized event type definitions"""
|
||||
|
||||
# License events
|
||||
LICENSE_VALIDATED = "license.validated"
|
||||
LICENSE_VALIDATION_FAILED = "license.validation.failed"
|
||||
LICENSE_ACTIVATED = "license.activated"
|
||||
LICENSE_DEACTIVATED = "license.deactivated"
|
||||
LICENSE_TRANSFERRED = "license.transferred"
|
||||
LICENSE_EXPIRED = "license.expired"
|
||||
LICENSE_CREATED = "license.created"
|
||||
LICENSE_UPDATED = "license.updated"
|
||||
|
||||
# Device events
|
||||
DEVICE_ADDED = "device.added"
|
||||
DEVICE_REMOVED = "device.removed"
|
||||
DEVICE_BLOCKED = "device.blocked"
|
||||
DEVICE_DEACTIVATED = "device.deactivated"
|
||||
|
||||
# Anomaly events
|
||||
ANOMALY_DETECTED = "anomaly.detected"
|
||||
ANOMALY_RESOLVED = "anomaly.resolved"
|
||||
|
||||
# Session events
|
||||
SESSION_STARTED = "session.started"
|
||||
SESSION_ENDED = "session.ended"
|
||||
SESSION_EXPIRED = "session.expired"
|
||||
|
||||
# System events
|
||||
RATE_LIMIT_EXCEEDED = "system.rate_limit_exceeded"
|
||||
API_ERROR = "system.api_error"
|
||||
177
lizenzserver/init.sql
Normale Datei
177
lizenzserver/init.sql
Normale Datei
@ -0,0 +1,177 @@
|
||||
-- License Server Database Schema
|
||||
-- Following best practices: snake_case for DB fields, clear naming conventions
|
||||
|
||||
-- Enable UUID extension
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- License tokens for offline validation
|
||||
CREATE TABLE IF NOT EXISTS license_tokens (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
license_id UUID REFERENCES licenses(id) ON DELETE CASCADE,
|
||||
token VARCHAR(512) NOT NULL UNIQUE,
|
||||
hardware_id VARCHAR(255) NOT NULL,
|
||||
valid_until TIMESTAMP NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_validated TIMESTAMP,
|
||||
validation_count INTEGER DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE INDEX idx_token ON license_tokens(token);
|
||||
CREATE INDEX idx_hardware ON license_tokens(hardware_id);
|
||||
CREATE INDEX idx_valid_until ON license_tokens(valid_until);
|
||||
|
||||
-- Heartbeat tracking with partitioning support
|
||||
CREATE TABLE IF NOT EXISTS license_heartbeats (
|
||||
id BIGSERIAL,
|
||||
license_id UUID REFERENCES licenses(id) ON DELETE CASCADE,
|
||||
hardware_id VARCHAR(255) NOT NULL,
|
||||
ip_address INET,
|
||||
user_agent VARCHAR(500),
|
||||
app_version VARCHAR(50),
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
session_data JSONB,
|
||||
PRIMARY KEY (id, timestamp)
|
||||
) PARTITION BY RANGE (timestamp);
|
||||
|
||||
-- Create partitions for the current and next month
|
||||
CREATE TABLE license_heartbeats_2025_01 PARTITION OF license_heartbeats
|
||||
FOR VALUES FROM ('2025-01-01') TO ('2025-02-01');
|
||||
|
||||
CREATE TABLE license_heartbeats_2025_02 PARTITION OF license_heartbeats
|
||||
FOR VALUES FROM ('2025-02-01') TO ('2025-03-01');
|
||||
|
||||
CREATE INDEX idx_heartbeat_license_time ON license_heartbeats(license_id, timestamp DESC);
|
||||
CREATE INDEX idx_heartbeat_hardware_time ON license_heartbeats(hardware_id, timestamp DESC);
|
||||
|
||||
-- Activation events tracking
|
||||
CREATE TABLE IF NOT EXISTS activation_events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
license_id UUID REFERENCES licenses(id) ON DELETE CASCADE,
|
||||
event_type VARCHAR(50) NOT NULL CHECK (event_type IN ('activation', 'deactivation', 'reactivation', 'transfer')),
|
||||
hardware_id VARCHAR(255),
|
||||
previous_hardware_id VARCHAR(255),
|
||||
ip_address INET,
|
||||
user_agent VARCHAR(500),
|
||||
success BOOLEAN DEFAULT true,
|
||||
error_message TEXT,
|
||||
metadata JSONB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX idx_license_events ON activation_events(license_id, created_at DESC);
|
||||
CREATE INDEX idx_event_type ON activation_events(event_type, created_at DESC);
|
||||
|
||||
-- API rate limiting
|
||||
CREATE TABLE IF NOT EXISTS api_rate_limits (
|
||||
id SERIAL PRIMARY KEY,
|
||||
api_key VARCHAR(255) NOT NULL UNIQUE,
|
||||
requests_per_minute INTEGER DEFAULT 60,
|
||||
requests_per_hour INTEGER DEFAULT 1000,
|
||||
requests_per_day INTEGER DEFAULT 10000,
|
||||
burst_size INTEGER DEFAULT 100,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Anomaly detection
|
||||
CREATE TABLE IF NOT EXISTS anomaly_detections (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
license_id UUID REFERENCES licenses(id),
|
||||
anomaly_type VARCHAR(100) NOT NULL CHECK (anomaly_type IN ('multiple_ips', 'rapid_hardware_change', 'suspicious_pattern', 'concurrent_use', 'geo_anomaly')),
|
||||
severity VARCHAR(20) NOT NULL CHECK (severity IN ('low', 'medium', 'high', 'critical')),
|
||||
details JSONB NOT NULL,
|
||||
detected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
resolved BOOLEAN DEFAULT false,
|
||||
resolved_at TIMESTAMP,
|
||||
resolved_by VARCHAR(255),
|
||||
action_taken TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX idx_unresolved ON anomaly_detections(resolved, severity, detected_at DESC);
|
||||
CREATE INDEX idx_license_anomalies ON anomaly_detections(license_id, detected_at DESC);
|
||||
|
||||
-- API clients for authentication
|
||||
CREATE TABLE IF NOT EXISTS api_clients (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
client_name VARCHAR(255) NOT NULL,
|
||||
api_key VARCHAR(255) NOT NULL UNIQUE,
|
||||
secret_key VARCHAR(255) NOT NULL,
|
||||
is_active BOOLEAN DEFAULT true,
|
||||
allowed_endpoints TEXT[],
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Feature flags for gradual rollout
|
||||
CREATE TABLE IF NOT EXISTS feature_flags (
|
||||
id SERIAL PRIMARY KEY,
|
||||
feature_name VARCHAR(100) NOT NULL UNIQUE,
|
||||
is_enabled BOOLEAN DEFAULT false,
|
||||
rollout_percentage INTEGER DEFAULT 0 CHECK (rollout_percentage >= 0 AND rollout_percentage <= 100),
|
||||
whitelist_license_ids UUID[],
|
||||
blacklist_license_ids UUID[],
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Insert default feature flags
|
||||
INSERT INTO feature_flags (feature_name, is_enabled, rollout_percentage) VALUES
|
||||
('anomaly_detection', true, 100),
|
||||
('offline_tokens', true, 100),
|
||||
('advanced_analytics', false, 0),
|
||||
('geo_restriction', false, 0)
|
||||
ON CONFLICT (feature_name) DO NOTHING;
|
||||
|
||||
-- Session management for concurrent use tracking
|
||||
CREATE TABLE IF NOT EXISTS active_sessions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
license_id UUID REFERENCES licenses(id) ON DELETE CASCADE,
|
||||
hardware_id VARCHAR(255) NOT NULL,
|
||||
session_token VARCHAR(512) NOT NULL UNIQUE,
|
||||
ip_address INET,
|
||||
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
expires_at TIMESTAMP NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_session_license ON active_sessions(license_id);
|
||||
CREATE INDEX idx_session_expires ON active_sessions(expires_at);
|
||||
|
||||
-- Update trigger for updated_at columns
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
|
||||
CREATE TRIGGER update_api_rate_limits_updated_at BEFORE UPDATE ON api_rate_limits
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_api_clients_updated_at BEFORE UPDATE ON api_clients
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
CREATE TRIGGER update_feature_flags_updated_at BEFORE UPDATE ON feature_flags
|
||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||
|
||||
-- Function to automatically create monthly partitions for heartbeats
|
||||
CREATE OR REPLACE FUNCTION create_monthly_partition()
|
||||
RETURNS void AS $$
|
||||
DECLARE
|
||||
start_date date;
|
||||
end_date date;
|
||||
partition_name text;
|
||||
BEGIN
|
||||
start_date := date_trunc('month', CURRENT_DATE + interval '1 month');
|
||||
end_date := start_date + interval '1 month';
|
||||
partition_name := 'license_heartbeats_' || to_char(start_date, 'YYYY_MM');
|
||||
|
||||
EXECUTE format('CREATE TABLE IF NOT EXISTS %I PARTITION OF license_heartbeats FOR VALUES FROM (%L) TO (%L)',
|
||||
partition_name, start_date, end_date);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create a scheduled job to create partitions (requires pg_cron extension)
|
||||
-- This is a placeholder - actual scheduling depends on your PostgreSQL setup
|
||||
-- SELECT cron.schedule('create-partitions', '0 0 1 * *', 'SELECT create_monthly_partition();');
|
||||
1
lizenzserver/middleware/__init__.py
Normale Datei
1
lizenzserver/middleware/__init__.py
Normale Datei
@ -0,0 +1 @@
|
||||
# Middleware Module
|
||||
158
lizenzserver/middleware/rate_limiter.py
Normale Datei
158
lizenzserver/middleware/rate_limiter.py
Normale Datei
@ -0,0 +1,158 @@
|
||||
import time
|
||||
from functools import wraps
|
||||
from flask import request, jsonify
|
||||
import redis
|
||||
from typing import Optional, Tuple
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RateLimiter:
|
||||
"""Rate limiting middleware using Redis"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
self.redis_client = None
|
||||
try:
|
||||
self.redis_client = redis.from_url(redis_url, decode_responses=True)
|
||||
self.redis_client.ping()
|
||||
logger.info("Connected to Redis for rate limiting")
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis not available for rate limiting: {e}")
|
||||
|
||||
def limit(self, requests_per_minute: int = 60, requests_per_hour: int = 1000):
|
||||
"""Decorator for rate limiting endpoints"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not self.redis_client:
|
||||
# Redis not available, skip rate limiting
|
||||
return f(*args, **kwargs)
|
||||
|
||||
# Get client identifier (API key or IP)
|
||||
client_id = self._get_client_id()
|
||||
|
||||
# Check rate limits
|
||||
is_allowed, retry_after = self._check_rate_limit(
|
||||
client_id,
|
||||
requests_per_minute,
|
||||
requests_per_hour
|
||||
)
|
||||
|
||||
if not is_allowed:
|
||||
response = jsonify({
|
||||
"error": "Rate limit exceeded",
|
||||
"retry_after": retry_after
|
||||
})
|
||||
response.status_code = 429
|
||||
response.headers['Retry-After'] = str(retry_after)
|
||||
response.headers['X-RateLimit-Limit'] = str(requests_per_minute)
|
||||
return response
|
||||
|
||||
# Add rate limit headers
|
||||
response = f(*args, **kwargs)
|
||||
if hasattr(response, 'headers'):
|
||||
response.headers['X-RateLimit-Limit'] = str(requests_per_minute)
|
||||
response.headers['X-RateLimit-Remaining'] = str(
|
||||
self._get_remaining_requests(client_id, requests_per_minute)
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return decorated_function
|
||||
return decorator
|
||||
|
||||
def _get_client_id(self) -> str:
|
||||
"""Get client identifier from request"""
|
||||
# First try API key
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
if api_key:
|
||||
return f"api_key:{api_key}"
|
||||
|
||||
# Then try auth token
|
||||
auth_header = request.headers.get('Authorization')
|
||||
if auth_header and auth_header.startswith('Bearer '):
|
||||
return f"token:{auth_header[7:32]}" # Use first 32 chars of token
|
||||
|
||||
# Fallback to IP
|
||||
if request.headers.get('X-Forwarded-For'):
|
||||
ip = request.headers.get('X-Forwarded-For').split(',')[0]
|
||||
else:
|
||||
ip = request.remote_addr
|
||||
|
||||
return f"ip:{ip}"
|
||||
|
||||
def _check_rate_limit(self, client_id: str,
|
||||
requests_per_minute: int,
|
||||
requests_per_hour: int) -> Tuple[bool, Optional[int]]:
|
||||
"""Check if request is within rate limits"""
|
||||
now = int(time.time())
|
||||
|
||||
# Check minute limit
|
||||
minute_key = f"rate_limit:minute:{client_id}:{now // 60}"
|
||||
minute_count = self.redis_client.incr(minute_key)
|
||||
self.redis_client.expire(minute_key, 60)
|
||||
|
||||
if minute_count > requests_per_minute:
|
||||
retry_after = 60 - (now % 60)
|
||||
return False, retry_after
|
||||
|
||||
# Check hour limit
|
||||
hour_key = f"rate_limit:hour:{client_id}:{now // 3600}"
|
||||
hour_count = self.redis_client.incr(hour_key)
|
||||
self.redis_client.expire(hour_key, 3600)
|
||||
|
||||
if hour_count > requests_per_hour:
|
||||
retry_after = 3600 - (now % 3600)
|
||||
return False, retry_after
|
||||
|
||||
return True, None
|
||||
|
||||
def _get_remaining_requests(self, client_id: str, limit: int) -> int:
|
||||
"""Get remaining requests in current minute"""
|
||||
now = int(time.time())
|
||||
minute_key = f"rate_limit:minute:{client_id}:{now // 60}"
|
||||
|
||||
try:
|
||||
current_count = int(self.redis_client.get(minute_key) or 0)
|
||||
return max(0, limit - current_count)
|
||||
except:
|
||||
return limit
|
||||
|
||||
class APIKeyRateLimiter(RateLimiter):
|
||||
"""Extended rate limiter with API key specific limits"""
|
||||
|
||||
def __init__(self, redis_url: str, db_repo):
|
||||
super().__init__(redis_url)
|
||||
self.db_repo = db_repo
|
||||
|
||||
def limit_by_api_key(self):
|
||||
"""Rate limit based on API key configuration"""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
|
||||
if not api_key:
|
||||
# Use default limits for non-API key requests
|
||||
return self.limit()(f)(*args, **kwargs)
|
||||
|
||||
# Get API key configuration from database
|
||||
query = """
|
||||
SELECT rate_limit_per_minute, rate_limit_per_hour
|
||||
FROM api_clients
|
||||
WHERE api_key = %s AND is_active = true
|
||||
"""
|
||||
|
||||
client = self.db_repo.execute_one(query, (api_key,))
|
||||
|
||||
if not client:
|
||||
return jsonify({"error": "Invalid API key"}), 401
|
||||
|
||||
# Use custom limits or defaults
|
||||
rpm = client.get('rate_limit_per_minute', 60)
|
||||
rph = client.get('rate_limit_per_hour', 1000)
|
||||
|
||||
return self.limit(rpm, rph)(f)(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
return decorator
|
||||
127
lizenzserver/models/__init__.py
Normale Datei
127
lizenzserver/models/__init__.py
Normale Datei
@ -0,0 +1,127 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
|
||||
class EventType(Enum):
|
||||
"""License event types"""
|
||||
ACTIVATION = "activation"
|
||||
DEACTIVATION = "deactivation"
|
||||
REACTIVATION = "reactivation"
|
||||
TRANSFER = "transfer"
|
||||
|
||||
class AnomalyType(Enum):
|
||||
"""Anomaly detection types"""
|
||||
MULTIPLE_IPS = "multiple_ips"
|
||||
RAPID_HARDWARE_CHANGE = "rapid_hardware_change"
|
||||
SUSPICIOUS_PATTERN = "suspicious_pattern"
|
||||
CONCURRENT_USE = "concurrent_use"
|
||||
GEO_ANOMALY = "geo_anomaly"
|
||||
|
||||
class Severity(Enum):
|
||||
"""Anomaly severity levels"""
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
@dataclass
|
||||
class License:
|
||||
"""License domain model"""
|
||||
id: str
|
||||
license_key: str
|
||||
customer_id: str
|
||||
max_devices: int
|
||||
is_active: bool
|
||||
is_test: bool
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
expires_at: Optional[datetime] = None
|
||||
features: List[str] = field(default_factory=list)
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@dataclass
|
||||
class LicenseToken:
|
||||
"""Offline validation token"""
|
||||
id: str
|
||||
license_id: str
|
||||
token: str
|
||||
hardware_id: str
|
||||
valid_until: datetime
|
||||
created_at: datetime
|
||||
last_validated: Optional[datetime] = None
|
||||
validation_count: int = 0
|
||||
|
||||
@dataclass
|
||||
class Heartbeat:
|
||||
"""License heartbeat"""
|
||||
id: int
|
||||
license_id: str
|
||||
hardware_id: str
|
||||
ip_address: Optional[str]
|
||||
user_agent: Optional[str]
|
||||
app_version: Optional[str]
|
||||
timestamp: datetime
|
||||
session_data: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@dataclass
|
||||
class ActivationEvent:
|
||||
"""License activation event"""
|
||||
id: str
|
||||
license_id: str
|
||||
event_type: EventType
|
||||
hardware_id: Optional[str]
|
||||
previous_hardware_id: Optional[str]
|
||||
ip_address: Optional[str]
|
||||
user_agent: Optional[str]
|
||||
success: bool
|
||||
error_message: Optional[str]
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
created_at: datetime
|
||||
|
||||
@dataclass
|
||||
class AnomalyDetection:
|
||||
"""Detected anomaly"""
|
||||
id: str
|
||||
license_id: str
|
||||
anomaly_type: AnomalyType
|
||||
severity: Severity
|
||||
details: Dict[str, Any]
|
||||
detected_at: datetime
|
||||
resolved: bool = False
|
||||
resolved_at: Optional[datetime] = None
|
||||
resolved_by: Optional[str] = None
|
||||
action_taken: Optional[str] = None
|
||||
|
||||
@dataclass
|
||||
class Session:
|
||||
"""Active session"""
|
||||
id: str
|
||||
license_id: str
|
||||
hardware_id: str
|
||||
session_token: str
|
||||
ip_address: Optional[str]
|
||||
started_at: datetime
|
||||
last_seen: datetime
|
||||
expires_at: datetime
|
||||
|
||||
@dataclass
|
||||
class ValidationRequest:
|
||||
"""License validation request"""
|
||||
license_key: str
|
||||
hardware_id: str
|
||||
app_version: Optional[str] = None
|
||||
ip_address: Optional[str] = None
|
||||
user_agent: Optional[str] = None
|
||||
|
||||
@dataclass
|
||||
class ValidationResponse:
|
||||
"""License validation response"""
|
||||
valid: bool
|
||||
license_id: Optional[str] = None
|
||||
token: Optional[str] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
features: List[str] = field(default_factory=list)
|
||||
limits: Dict[str, Any] = field(default_factory=dict)
|
||||
error: Optional[str] = None
|
||||
error_code: Optional[str] = None
|
||||
167
lizenzserver/nginx.conf
Normale Datei
167
lizenzserver/nginx.conf
Normale Datei
@ -0,0 +1,167 @@
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
# Basic settings
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
# Logging
|
||||
access_log /var/log/nginx/access.log;
|
||||
error_log /var/log/nginx/error.log;
|
||||
|
||||
# Gzip compression
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/xml+rss;
|
||||
|
||||
# Rate limiting zones
|
||||
limit_req_zone $binary_remote_addr zone=api_limit:10m rate=10r/s;
|
||||
limit_req_zone $http_x_api_key zone=key_limit:10m rate=100r/s;
|
||||
|
||||
# Upstream services
|
||||
upstream auth_service {
|
||||
server auth_service:5001;
|
||||
}
|
||||
|
||||
upstream license_api {
|
||||
server license_api:5002;
|
||||
}
|
||||
|
||||
upstream analytics_service {
|
||||
server analytics_service:5003;
|
||||
}
|
||||
|
||||
upstream admin_api {
|
||||
server admin_api:5004;
|
||||
}
|
||||
|
||||
# Main server block
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
# Security headers
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-Frame-Options DENY;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||
|
||||
# API versioning and routing
|
||||
location /api/v1/auth/ {
|
||||
limit_req zone=api_limit burst=20 nodelay;
|
||||
|
||||
proxy_pass http://auth_service/api/v1/auth/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# CORS headers
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
|
||||
|
||||
if ($request_method = 'OPTIONS') {
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
location /api/v1/license/ {
|
||||
limit_req zone=key_limit burst=50 nodelay;
|
||||
|
||||
proxy_pass http://license_api/api/v1/license/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# CORS headers
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
|
||||
|
||||
if ($request_method = 'OPTIONS') {
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
location /api/v1/analytics/ {
|
||||
limit_req zone=key_limit burst=30 nodelay;
|
||||
|
||||
proxy_pass http://analytics_service/api/v1/analytics/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# CORS headers
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-API-Key' always;
|
||||
|
||||
if ($request_method = 'OPTIONS') {
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
location /api/v1/admin/ {
|
||||
limit_req zone=key_limit burst=30 nodelay;
|
||||
|
||||
# Additional security for admin endpoints
|
||||
# In production, add IP whitelisting here
|
||||
|
||||
proxy_pass http://admin_api/api/v1/admin/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# CORS headers (more restrictive for admin)
|
||||
add_header 'Access-Control-Allow-Origin' '$http_origin' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, PATCH, DELETE, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Admin-API-Key' always;
|
||||
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||
|
||||
if ($request_method = 'OPTIONS') {
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
# Health check endpoint
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "healthy\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
|
||||
# Root redirect
|
||||
location / {
|
||||
return 301 /api/v1/;
|
||||
}
|
||||
|
||||
# API documentation
|
||||
location /api/v1/ {
|
||||
return 200 '{"message": "License Server API v1", "documentation": "/api/v1/docs"}';
|
||||
add_header Content-Type application/json;
|
||||
}
|
||||
}
|
||||
|
||||
# HTTPS server block (for production)
|
||||
# server {
|
||||
# listen 443 ssl http2;
|
||||
# server_name your-domain.com;
|
||||
#
|
||||
# ssl_certificate /etc/nginx/ssl/cert.pem;
|
||||
# ssl_certificate_key /etc/nginx/ssl/key.pem;
|
||||
# ssl_protocols TLSv1.2 TLSv1.3;
|
||||
# ssl_ciphers HIGH:!aNULL:!MD5;
|
||||
#
|
||||
# # Same location blocks as above
|
||||
# }
|
||||
}
|
||||
94
lizenzserver/repositories/base.py
Normale Datei
94
lizenzserver/repositories/base.py
Normale Datei
@ -0,0 +1,94 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, List, Dict, Any
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
from contextlib import contextmanager
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class BaseRepository(ABC):
|
||||
"""Base repository with common database operations"""
|
||||
|
||||
def __init__(self, db_url: str):
|
||||
self.db_url = db_url
|
||||
|
||||
@contextmanager
|
||||
def get_db_connection(self):
|
||||
"""Get database connection with automatic cleanup"""
|
||||
conn = None
|
||||
try:
|
||||
conn = psycopg2.connect(self.db_url)
|
||||
yield conn
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
logger.error(f"Database error: {e}")
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
@contextmanager
|
||||
def get_db_cursor(self, conn):
|
||||
"""Get database cursor with dict results"""
|
||||
cursor = None
|
||||
try:
|
||||
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
||||
yield cursor
|
||||
finally:
|
||||
if cursor:
|
||||
cursor.close()
|
||||
|
||||
def execute_query(self, query: str, params: tuple = None) -> List[Dict[str, Any]]:
|
||||
"""Execute SELECT query and return results"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
cursor.execute(query, params)
|
||||
return cursor.fetchall()
|
||||
|
||||
def execute_one(self, query: str, params: tuple = None) -> Optional[Dict[str, Any]]:
|
||||
"""Execute query and return single result"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
cursor.execute(query, params)
|
||||
return cursor.fetchone()
|
||||
|
||||
def execute_insert(self, query: str, params: tuple = None) -> Optional[str]:
|
||||
"""Execute INSERT query and return ID"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
cursor.execute(query + " RETURNING id", params)
|
||||
result = cursor.fetchone()
|
||||
conn.commit()
|
||||
return result['id'] if result else None
|
||||
|
||||
def execute_update(self, query: str, params: tuple = None) -> int:
|
||||
"""Execute UPDATE query and return affected rows"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
cursor.execute(query, params)
|
||||
affected = cursor.rowcount
|
||||
conn.commit()
|
||||
return affected
|
||||
|
||||
def execute_delete(self, query: str, params: tuple = None) -> int:
|
||||
"""Execute DELETE query and return affected rows"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
cursor.execute(query, params)
|
||||
affected = cursor.rowcount
|
||||
conn.commit()
|
||||
return affected
|
||||
|
||||
def execute_batch(self, queries: List[tuple]) -> None:
|
||||
"""Execute multiple queries in a transaction"""
|
||||
with self.get_db_connection() as conn:
|
||||
with self.get_db_cursor(conn) as cursor:
|
||||
try:
|
||||
for query, params in queries:
|
||||
cursor.execute(query, params)
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise
|
||||
178
lizenzserver/repositories/cache_repo.py
Normale Datei
178
lizenzserver/repositories/cache_repo.py
Normale Datei
@ -0,0 +1,178 @@
|
||||
import redis
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Any, Dict, List
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CacheRepository:
|
||||
"""Redis cache repository"""
|
||||
|
||||
def __init__(self, redis_url: str):
|
||||
self.redis_url = redis_url
|
||||
self._connect()
|
||||
|
||||
def _connect(self):
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
self.redis = redis.from_url(self.redis_url, decode_responses=True)
|
||||
self.redis.ping()
|
||||
logger.info("Connected to Redis")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
self.redis = None
|
||||
|
||||
def _make_key(self, prefix: str, *args) -> str:
|
||||
"""Create cache key"""
|
||||
parts = [prefix] + [str(arg) for arg in args]
|
||||
return ":".join(parts)
|
||||
|
||||
def get(self, key: str) -> Optional[Any]:
|
||||
"""Get value from cache"""
|
||||
if not self.redis:
|
||||
return None
|
||||
|
||||
try:
|
||||
value = self.redis.get(key)
|
||||
if value:
|
||||
return json.loads(value)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Cache get error: {e}")
|
||||
return None
|
||||
|
||||
def set(self, key: str, value: Any, ttl: int = 300) -> bool:
|
||||
"""Set value in cache with TTL in seconds"""
|
||||
if not self.redis:
|
||||
return False
|
||||
|
||||
try:
|
||||
json_value = json.dumps(value)
|
||||
return self.redis.setex(key, ttl, json_value)
|
||||
except Exception as e:
|
||||
logger.error(f"Cache set error: {e}")
|
||||
return False
|
||||
|
||||
def delete(self, key: str) -> bool:
|
||||
"""Delete key from cache"""
|
||||
if not self.redis:
|
||||
return False
|
||||
|
||||
try:
|
||||
return bool(self.redis.delete(key))
|
||||
except Exception as e:
|
||||
logger.error(f"Cache delete error: {e}")
|
||||
return False
|
||||
|
||||
def delete_pattern(self, pattern: str) -> int:
|
||||
"""Delete all keys matching pattern"""
|
||||
if not self.redis:
|
||||
return 0
|
||||
|
||||
try:
|
||||
keys = self.redis.keys(pattern)
|
||||
if keys:
|
||||
return self.redis.delete(*keys)
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.error(f"Cache delete pattern error: {e}")
|
||||
return 0
|
||||
|
||||
# License-specific cache methods
|
||||
|
||||
def get_license_validation(self, license_key: str, hardware_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached license validation result"""
|
||||
key = self._make_key("license:validation", license_key, hardware_id)
|
||||
return self.get(key)
|
||||
|
||||
def set_license_validation(self, license_key: str, hardware_id: str,
|
||||
result: Dict[str, Any], ttl: int = 300) -> bool:
|
||||
"""Cache license validation result"""
|
||||
key = self._make_key("license:validation", license_key, hardware_id)
|
||||
return self.set(key, result, ttl)
|
||||
|
||||
def get_license_status(self, license_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get cached license status"""
|
||||
key = self._make_key("license:status", license_id)
|
||||
return self.get(key)
|
||||
|
||||
def set_license_status(self, license_id: str, status: Dict[str, Any],
|
||||
ttl: int = 60) -> bool:
|
||||
"""Cache license status"""
|
||||
key = self._make_key("license:status", license_id)
|
||||
return self.set(key, status, ttl)
|
||||
|
||||
def get_device_list(self, license_id: str) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Get cached device list"""
|
||||
key = self._make_key("license:devices", license_id)
|
||||
return self.get(key)
|
||||
|
||||
def set_device_list(self, license_id: str, devices: List[Dict[str, Any]],
|
||||
ttl: int = 300) -> bool:
|
||||
"""Cache device list"""
|
||||
key = self._make_key("license:devices", license_id)
|
||||
return self.set(key, devices, ttl)
|
||||
|
||||
def invalidate_license_cache(self, license_id: str) -> None:
|
||||
"""Invalidate all cache entries for a license"""
|
||||
patterns = [
|
||||
f"license:validation:*:{license_id}",
|
||||
f"license:status:{license_id}",
|
||||
f"license:devices:{license_id}"
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
self.delete_pattern(pattern)
|
||||
|
||||
# Rate limiting methods
|
||||
|
||||
def check_rate_limit(self, key: str, limit: int, window: int) -> tuple[bool, int]:
|
||||
"""Check if rate limit is exceeded
|
||||
Returns: (is_allowed, current_count)
|
||||
"""
|
||||
if not self.redis:
|
||||
return True, 0
|
||||
|
||||
try:
|
||||
pipe = self.redis.pipeline()
|
||||
now = int(time.time())
|
||||
window_start = now - window
|
||||
|
||||
# Remove old entries
|
||||
pipe.zremrangebyscore(key, 0, window_start)
|
||||
|
||||
# Count requests in current window
|
||||
pipe.zcard(key)
|
||||
|
||||
# Add current request
|
||||
pipe.zadd(key, {str(now): now})
|
||||
|
||||
# Set expiry
|
||||
pipe.expire(key, window + 1)
|
||||
|
||||
results = pipe.execute()
|
||||
current_count = results[1]
|
||||
|
||||
return current_count < limit, current_count + 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Rate limit check error: {e}")
|
||||
return True, 0
|
||||
|
||||
def increment_counter(self, key: str, window: int = 3600) -> int:
|
||||
"""Increment counter with expiry"""
|
||||
if not self.redis:
|
||||
return 0
|
||||
|
||||
try:
|
||||
pipe = self.redis.pipeline()
|
||||
pipe.incr(key)
|
||||
pipe.expire(key, window)
|
||||
results = pipe.execute()
|
||||
return results[0]
|
||||
except Exception as e:
|
||||
logger.error(f"Counter increment error: {e}")
|
||||
return 0
|
||||
|
||||
import time # Add this import at the top
|
||||
228
lizenzserver/repositories/license_repo.py
Normale Datei
228
lizenzserver/repositories/license_repo.py
Normale Datei
@ -0,0 +1,228 @@
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from .base import BaseRepository
|
||||
from ..models import License, LicenseToken, ActivationEvent, EventType
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class LicenseRepository(BaseRepository):
|
||||
"""Repository for license-related database operations"""
|
||||
|
||||
def get_license_by_key(self, license_key: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get license by key"""
|
||||
query = """
|
||||
SELECT l.*, c.name as customer_name, c.email as customer_email
|
||||
FROM licenses l
|
||||
JOIN customers c ON l.customer_id = c.id
|
||||
WHERE l.license_key = %s
|
||||
"""
|
||||
return self.execute_one(query, (license_key,))
|
||||
|
||||
def get_license_by_id(self, license_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get license by ID"""
|
||||
query = """
|
||||
SELECT l.*, c.name as customer_name, c.email as customer_email
|
||||
FROM licenses l
|
||||
JOIN customers c ON l.customer_id = c.id
|
||||
WHERE l.id = %s
|
||||
"""
|
||||
return self.execute_one(query, (license_id,))
|
||||
|
||||
def get_active_devices(self, license_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get active devices for a license"""
|
||||
query = """
|
||||
SELECT DISTINCT ON (hardware_id)
|
||||
hardware_id,
|
||||
ip_address,
|
||||
user_agent,
|
||||
app_version,
|
||||
timestamp as last_seen
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '15 minutes'
|
||||
ORDER BY hardware_id, timestamp DESC
|
||||
"""
|
||||
return self.execute_query(query, (license_id,))
|
||||
|
||||
def get_device_count(self, license_id: str) -> int:
|
||||
"""Get count of active devices"""
|
||||
query = """
|
||||
SELECT COUNT(DISTINCT hardware_id) as device_count
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '15 minutes'
|
||||
"""
|
||||
result = self.execute_one(query, (license_id,))
|
||||
return result['device_count'] if result else 0
|
||||
|
||||
def create_license_token(self, license_id: str, hardware_id: str,
|
||||
valid_hours: int = 24) -> Optional[str]:
|
||||
"""Create offline validation token"""
|
||||
import secrets
|
||||
token = secrets.token_urlsafe(64)
|
||||
valid_until = datetime.utcnow() + timedelta(hours=valid_hours)
|
||||
|
||||
query = """
|
||||
INSERT INTO license_tokens (license_id, token, hardware_id, valid_until)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
result = self.execute_insert(query, (license_id, token, hardware_id, valid_until))
|
||||
return token if result else None
|
||||
|
||||
def validate_token(self, token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Validate offline token"""
|
||||
query = """
|
||||
SELECT lt.*, l.license_key, l.is_active, l.expires_at
|
||||
FROM license_tokens lt
|
||||
JOIN licenses l ON lt.license_id = l.id
|
||||
WHERE lt.token = %s
|
||||
AND lt.valid_until > NOW()
|
||||
AND l.is_active = true
|
||||
"""
|
||||
|
||||
result = self.execute_one(query, (token,))
|
||||
|
||||
if result:
|
||||
# Update validation count and timestamp
|
||||
update_query = """
|
||||
UPDATE license_tokens
|
||||
SET validation_count = validation_count + 1,
|
||||
last_validated = NOW()
|
||||
WHERE token = %s
|
||||
"""
|
||||
self.execute_update(update_query, (token,))
|
||||
|
||||
return result
|
||||
|
||||
def record_heartbeat(self, license_id: str, hardware_id: str,
|
||||
ip_address: str = None, user_agent: str = None,
|
||||
app_version: str = None, session_data: Dict = None) -> None:
|
||||
"""Record license heartbeat"""
|
||||
query = """
|
||||
INSERT INTO license_heartbeats
|
||||
(license_id, hardware_id, ip_address, user_agent, app_version, session_data)
|
||||
VALUES (%s, %s, %s, %s, %s, %s)
|
||||
"""
|
||||
|
||||
import json
|
||||
session_json = json.dumps(session_data) if session_data else None
|
||||
|
||||
self.execute_insert(query, (
|
||||
license_id, hardware_id, ip_address,
|
||||
user_agent, app_version, session_json
|
||||
))
|
||||
|
||||
def record_activation_event(self, license_id: str, event_type: EventType,
|
||||
hardware_id: str = None, previous_hardware_id: str = None,
|
||||
ip_address: str = None, user_agent: str = None,
|
||||
success: bool = True, error_message: str = None,
|
||||
metadata: Dict = None) -> str:
|
||||
"""Record activation event"""
|
||||
query = """
|
||||
INSERT INTO activation_events
|
||||
(license_id, event_type, hardware_id, previous_hardware_id,
|
||||
ip_address, user_agent, success, error_message, metadata)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
import json
|
||||
metadata_json = json.dumps(metadata) if metadata else None
|
||||
|
||||
return self.execute_insert(query, (
|
||||
license_id, event_type.value, hardware_id, previous_hardware_id,
|
||||
ip_address, user_agent, success, error_message, metadata_json
|
||||
))
|
||||
|
||||
def get_recent_activations(self, license_id: str, hours: int = 24) -> List[Dict[str, Any]]:
|
||||
"""Get recent activation events"""
|
||||
query = """
|
||||
SELECT * FROM activation_events
|
||||
WHERE license_id = %s
|
||||
AND created_at > NOW() - INTERVAL '%s hours'
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
return self.execute_query(query, (license_id, hours))
|
||||
|
||||
def check_hardware_id_exists(self, license_id: str, hardware_id: str) -> bool:
|
||||
"""Check if hardware ID is already registered"""
|
||||
query = """
|
||||
SELECT 1 FROM activation_events
|
||||
WHERE license_id = %s
|
||||
AND hardware_id = %s
|
||||
AND event_type IN ('activation', 'reactivation')
|
||||
AND success = true
|
||||
LIMIT 1
|
||||
"""
|
||||
result = self.execute_one(query, (license_id, hardware_id))
|
||||
return result is not None
|
||||
|
||||
def deactivate_device(self, license_id: str, hardware_id: str) -> bool:
|
||||
"""Deactivate a device"""
|
||||
# Record deactivation event
|
||||
self.record_activation_event(
|
||||
license_id=license_id,
|
||||
event_type=EventType.DEACTIVATION,
|
||||
hardware_id=hardware_id,
|
||||
success=True
|
||||
)
|
||||
|
||||
# Remove any active tokens for this device
|
||||
query = """
|
||||
DELETE FROM license_tokens
|
||||
WHERE license_id = %s AND hardware_id = %s
|
||||
"""
|
||||
affected = self.execute_delete(query, (license_id, hardware_id))
|
||||
|
||||
return affected > 0
|
||||
|
||||
def transfer_license(self, license_id: str, from_hardware_id: str,
|
||||
to_hardware_id: str, ip_address: str = None) -> bool:
|
||||
"""Transfer license from one device to another"""
|
||||
try:
|
||||
# Deactivate old device
|
||||
self.deactivate_device(license_id, from_hardware_id)
|
||||
|
||||
# Record transfer event
|
||||
self.record_activation_event(
|
||||
license_id=license_id,
|
||||
event_type=EventType.TRANSFER,
|
||||
hardware_id=to_hardware_id,
|
||||
previous_hardware_id=from_hardware_id,
|
||||
ip_address=ip_address,
|
||||
success=True
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"License transfer failed: {e}")
|
||||
return False
|
||||
|
||||
def get_license_usage_stats(self, license_id: str, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get usage statistics for a license"""
|
||||
query = """
|
||||
WITH daily_stats AS (
|
||||
SELECT
|
||||
DATE(timestamp) as date,
|
||||
COUNT(*) as validations,
|
||||
COUNT(DISTINCT hardware_id) as unique_devices,
|
||||
COUNT(DISTINCT ip_address) as unique_ips
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '%s days'
|
||||
GROUP BY DATE(timestamp)
|
||||
)
|
||||
SELECT
|
||||
COUNT(*) as total_days,
|
||||
SUM(validations) as total_validations,
|
||||
AVG(validations) as avg_daily_validations,
|
||||
MAX(unique_devices) as max_devices,
|
||||
MAX(unique_ips) as max_ips
|
||||
FROM daily_stats
|
||||
"""
|
||||
|
||||
return self.execute_one(query, (license_id, days)) or {}
|
||||
31
lizenzserver/requirements.txt
Normale Datei
31
lizenzserver/requirements.txt
Normale Datei
@ -0,0 +1,31 @@
|
||||
# Flask and extensions
|
||||
Flask==3.0.0
|
||||
Flask-CORS==4.0.0
|
||||
flask-limiter==3.5.0
|
||||
|
||||
# Database
|
||||
psycopg2-binary==2.9.9
|
||||
SQLAlchemy==2.0.23
|
||||
|
||||
# Redis
|
||||
redis==5.0.1
|
||||
|
||||
# RabbitMQ
|
||||
pika==1.3.2
|
||||
|
||||
# JWT
|
||||
PyJWT==2.8.0
|
||||
|
||||
# Validation
|
||||
marshmallow==3.20.1
|
||||
|
||||
# Monitoring
|
||||
prometheus-flask-exporter==0.23.0
|
||||
|
||||
# Utilities
|
||||
python-dateutil==2.8.2
|
||||
pytz==2023.3
|
||||
requests==2.31.0
|
||||
|
||||
# Development
|
||||
python-dotenv==1.0.0
|
||||
1
lizenzserver/services/admin_api/__init__.py
Normale Datei
1
lizenzserver/services/admin_api/__init__.py
Normale Datei
@ -0,0 +1 @@
|
||||
# Admin API Service
|
||||
666
lizenzserver/services/admin_api/app.py
Normale Datei
666
lizenzserver/services/admin_api/app.py
Normale Datei
@ -0,0 +1,666 @@
|
||||
import os
|
||||
import sys
|
||||
from flask import Flask, request, jsonify
|
||||
from flask_cors import CORS
|
||||
import logging
|
||||
from functools import wraps
|
||||
from marshmallow import Schema, fields, ValidationError
|
||||
from datetime import datetime, timedelta
|
||||
import secrets
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
from config import get_config
|
||||
from repositories.license_repo import LicenseRepository
|
||||
from repositories.cache_repo import CacheRepository
|
||||
from events.event_bus import EventBus, Event, EventTypes
|
||||
from models import EventType, AnomalyType, Severity
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize Flask app
|
||||
app = Flask(__name__)
|
||||
config = get_config()
|
||||
app.config.from_object(config)
|
||||
CORS(app)
|
||||
|
||||
# Initialize dependencies
|
||||
license_repo = LicenseRepository(config.DATABASE_URL)
|
||||
cache_repo = CacheRepository(config.REDIS_URL)
|
||||
event_bus = EventBus(config.RABBITMQ_URL)
|
||||
|
||||
# Validation schemas
|
||||
class CreateLicenseSchema(Schema):
|
||||
customer_id = fields.Str(required=True)
|
||||
max_devices = fields.Int(missing=1, validate=lambda x: x > 0)
|
||||
expires_in_days = fields.Int(allow_none=True)
|
||||
features = fields.List(fields.Str(), missing=[])
|
||||
is_test = fields.Bool(missing=False)
|
||||
metadata = fields.Dict(missing={})
|
||||
|
||||
class UpdateLicenseSchema(Schema):
|
||||
max_devices = fields.Int(validate=lambda x: x > 0)
|
||||
is_active = fields.Bool()
|
||||
expires_at = fields.DateTime()
|
||||
features = fields.List(fields.Str())
|
||||
metadata = fields.Dict()
|
||||
|
||||
class DeactivateDeviceSchema(Schema):
|
||||
hardware_id = fields.Str(required=True)
|
||||
reason = fields.Str()
|
||||
|
||||
class TransferLicenseSchema(Schema):
|
||||
from_hardware_id = fields.Str(required=True)
|
||||
to_hardware_id = fields.Str(required=True)
|
||||
|
||||
class SearchLicensesSchema(Schema):
|
||||
customer_id = fields.Str()
|
||||
is_active = fields.Bool()
|
||||
is_test = fields.Bool()
|
||||
created_after = fields.DateTime()
|
||||
created_before = fields.DateTime()
|
||||
expires_after = fields.DateTime()
|
||||
expires_before = fields.DateTime()
|
||||
page = fields.Int(missing=1, validate=lambda x: x > 0)
|
||||
per_page = fields.Int(missing=50, validate=lambda x: 0 < x <= 100)
|
||||
|
||||
def require_admin_auth(f):
|
||||
"""Decorator to require admin authentication"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
# Check for admin API key
|
||||
api_key = request.headers.get('X-Admin-API-Key')
|
||||
|
||||
if not api_key:
|
||||
return jsonify({"error": "Missing admin API key"}), 401
|
||||
|
||||
# In production, validate against database
|
||||
# For now, check environment variable
|
||||
if api_key != os.getenv('ADMIN_API_KEY', 'admin-key-change-in-production'):
|
||||
return jsonify({"error": "Invalid admin API key"}), 401
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""Health check endpoint"""
|
||||
return jsonify({
|
||||
"status": "healthy",
|
||||
"service": "admin-api",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses', methods=['POST'])
|
||||
@require_admin_auth
|
||||
def create_license():
|
||||
"""Create new license"""
|
||||
schema = CreateLicenseSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
# Generate license key
|
||||
license_key = f"LIC-{secrets.token_urlsafe(16).upper()}"
|
||||
|
||||
# Calculate expiration
|
||||
expires_at = None
|
||||
if data.get('expires_in_days'):
|
||||
expires_at = datetime.utcnow() + timedelta(days=data['expires_in_days'])
|
||||
|
||||
# Create license in database
|
||||
query = """
|
||||
INSERT INTO licenses
|
||||
(license_key, customer_id, max_devices, is_active, is_test, expires_at, features, metadata)
|
||||
VALUES (%s, %s, %s, true, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
import json
|
||||
license_id = license_repo.execute_insert(query, (
|
||||
license_key,
|
||||
data['customer_id'],
|
||||
data['max_devices'],
|
||||
data['is_test'],
|
||||
expires_at,
|
||||
json.dumps(data['features']),
|
||||
json.dumps(data['metadata'])
|
||||
))
|
||||
|
||||
if not license_id:
|
||||
return jsonify({"error": "Failed to create license"}), 500
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_CREATED,
|
||||
{
|
||||
"license_id": license_id,
|
||||
"customer_id": data['customer_id'],
|
||||
"license_key": license_key
|
||||
},
|
||||
"admin-api"
|
||||
))
|
||||
|
||||
return jsonify({
|
||||
"id": license_id,
|
||||
"license_key": license_key,
|
||||
"customer_id": data['customer_id'],
|
||||
"max_devices": data['max_devices'],
|
||||
"is_test": data['is_test'],
|
||||
"expires_at": expires_at.isoformat() if expires_at else None,
|
||||
"features": data['features']
|
||||
}), 201
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_license(license_id):
|
||||
"""Get license details"""
|
||||
license = license_repo.get_license_by_id(license_id)
|
||||
|
||||
if not license:
|
||||
return jsonify({"error": "License not found"}), 404
|
||||
|
||||
# Get additional statistics
|
||||
active_devices = license_repo.get_active_devices(license_id)
|
||||
usage_stats = license_repo.get_license_usage_stats(license_id)
|
||||
recent_events = license_repo.get_recent_activations(license_id)
|
||||
|
||||
# Format response
|
||||
license['active_devices'] = active_devices
|
||||
license['usage_stats'] = usage_stats
|
||||
license['recent_events'] = recent_events
|
||||
|
||||
return jsonify(license)
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>', methods=['PATCH'])
|
||||
@require_admin_auth
|
||||
def update_license(license_id):
|
||||
"""Update license"""
|
||||
schema = UpdateLicenseSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
# Build update query dynamically
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
if 'max_devices' in data:
|
||||
updates.append("max_devices = %s")
|
||||
params.append(data['max_devices'])
|
||||
|
||||
if 'is_active' in data:
|
||||
updates.append("is_active = %s")
|
||||
params.append(data['is_active'])
|
||||
|
||||
if 'expires_at' in data:
|
||||
updates.append("expires_at = %s")
|
||||
params.append(data['expires_at'])
|
||||
|
||||
if 'features' in data:
|
||||
updates.append("features = %s")
|
||||
params.append(json.dumps(data['features']))
|
||||
|
||||
if 'metadata' in data:
|
||||
updates.append("metadata = %s")
|
||||
params.append(json.dumps(data['metadata']))
|
||||
|
||||
if not updates:
|
||||
return jsonify({"error": "No fields to update"}), 400
|
||||
|
||||
# Add updated_at
|
||||
updates.append("updated_at = NOW()")
|
||||
|
||||
# Add license_id to params
|
||||
params.append(license_id)
|
||||
|
||||
query = f"""
|
||||
UPDATE licenses
|
||||
SET {', '.join(updates)}
|
||||
WHERE id = %s
|
||||
RETURNING *
|
||||
"""
|
||||
|
||||
result = license_repo.execute_one(query, params)
|
||||
|
||||
if not result:
|
||||
return jsonify({"error": "License not found"}), 404
|
||||
|
||||
# Invalidate cache
|
||||
cache_repo.invalidate_license_cache(license_id)
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_UPDATED,
|
||||
{
|
||||
"license_id": license_id,
|
||||
"changes": list(data.keys())
|
||||
},
|
||||
"admin-api"
|
||||
))
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>', methods=['DELETE'])
|
||||
@require_admin_auth
|
||||
def delete_license(license_id):
|
||||
"""Soft delete license (deactivate)"""
|
||||
query = """
|
||||
UPDATE licenses
|
||||
SET is_active = false, updated_at = NOW()
|
||||
WHERE id = %s
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
result = license_repo.execute_one(query, (license_id,))
|
||||
|
||||
if not result:
|
||||
return jsonify({"error": "License not found"}), 404
|
||||
|
||||
# Invalidate cache
|
||||
cache_repo.invalidate_license_cache(license_id)
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_DEACTIVATED,
|
||||
{"license_id": license_id},
|
||||
"admin-api"
|
||||
))
|
||||
|
||||
return jsonify({"success": True, "message": "License deactivated"})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/devices', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_license_devices(license_id):
|
||||
"""Get all devices for a license"""
|
||||
# Get active devices
|
||||
active_devices = license_repo.get_active_devices(license_id)
|
||||
|
||||
# Get all registered devices from activation events
|
||||
query = """
|
||||
SELECT DISTINCT ON (hardware_id)
|
||||
hardware_id,
|
||||
event_type,
|
||||
ip_address,
|
||||
user_agent,
|
||||
created_at as registered_at,
|
||||
metadata
|
||||
FROM activation_events
|
||||
WHERE license_id = %s
|
||||
AND event_type IN ('activation', 'reactivation', 'transfer')
|
||||
AND success = true
|
||||
ORDER BY hardware_id, created_at DESC
|
||||
"""
|
||||
|
||||
all_devices = license_repo.execute_query(query, (license_id,))
|
||||
|
||||
# Mark active devices
|
||||
active_hw_ids = {d['hardware_id'] for d in active_devices}
|
||||
for device in all_devices:
|
||||
device['is_active'] = device['hardware_id'] in active_hw_ids
|
||||
if device['is_active']:
|
||||
# Add last_seen from active_devices
|
||||
active_device = next((d for d in active_devices if d['hardware_id'] == device['hardware_id']), None)
|
||||
if active_device:
|
||||
device['last_seen'] = active_device['last_seen']
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"total_devices": len(all_devices),
|
||||
"active_devices": len(active_devices),
|
||||
"devices": all_devices
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/devices/deactivate', methods=['POST'])
|
||||
@require_admin_auth
|
||||
def deactivate_device(license_id):
|
||||
"""Deactivate a device"""
|
||||
schema = DeactivateDeviceSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
success = license_repo.deactivate_device(license_id, data['hardware_id'])
|
||||
|
||||
if not success:
|
||||
return jsonify({"error": "Failed to deactivate device"}), 500
|
||||
|
||||
# Invalidate cache
|
||||
cache_repo.invalidate_license_cache(license_id)
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.DEVICE_DEACTIVATED,
|
||||
{
|
||||
"license_id": license_id,
|
||||
"hardware_id": data['hardware_id'],
|
||||
"reason": data.get('reason', 'Admin action')
|
||||
},
|
||||
"admin-api"
|
||||
))
|
||||
|
||||
return jsonify({"success": True, "message": "Device deactivated"})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/transfer', methods=['POST'])
|
||||
@require_admin_auth
|
||||
def transfer_license(license_id):
|
||||
"""Transfer license between devices"""
|
||||
schema = TransferLicenseSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
# Get client IP
|
||||
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
|
||||
success = license_repo.transfer_license(
|
||||
license_id,
|
||||
data['from_hardware_id'],
|
||||
data['to_hardware_id'],
|
||||
ip_address
|
||||
)
|
||||
|
||||
if not success:
|
||||
return jsonify({"error": "Failed to transfer license"}), 500
|
||||
|
||||
# Invalidate cache
|
||||
cache_repo.invalidate_license_cache(license_id)
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_TRANSFERRED,
|
||||
{
|
||||
"license_id": license_id,
|
||||
"from_hardware_id": data['from_hardware_id'],
|
||||
"to_hardware_id": data['to_hardware_id']
|
||||
},
|
||||
"admin-api"
|
||||
))
|
||||
|
||||
return jsonify({"success": True, "message": "License transferred successfully"})
|
||||
|
||||
@app.route('/api/v1/admin/licenses', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def search_licenses():
|
||||
"""Search and list licenses"""
|
||||
schema = SearchLicensesSchema()
|
||||
|
||||
try:
|
||||
filters = schema.load(request.args)
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
# Build query
|
||||
where_clauses = []
|
||||
params = []
|
||||
|
||||
if filters.get('customer_id'):
|
||||
where_clauses.append("customer_id = %s")
|
||||
params.append(filters['customer_id'])
|
||||
|
||||
if 'is_active' in filters:
|
||||
where_clauses.append("is_active = %s")
|
||||
params.append(filters['is_active'])
|
||||
|
||||
if 'is_test' in filters:
|
||||
where_clauses.append("is_test = %s")
|
||||
params.append(filters['is_test'])
|
||||
|
||||
if filters.get('created_after'):
|
||||
where_clauses.append("created_at >= %s")
|
||||
params.append(filters['created_after'])
|
||||
|
||||
if filters.get('created_before'):
|
||||
where_clauses.append("created_at <= %s")
|
||||
params.append(filters['created_before'])
|
||||
|
||||
if filters.get('expires_after'):
|
||||
where_clauses.append("expires_at >= %s")
|
||||
params.append(filters['expires_after'])
|
||||
|
||||
if filters.get('expires_before'):
|
||||
where_clauses.append("expires_at <= %s")
|
||||
params.append(filters['expires_before'])
|
||||
|
||||
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
|
||||
|
||||
# Count total
|
||||
count_query = f"SELECT COUNT(*) as total FROM licenses WHERE {where_sql}"
|
||||
total_result = license_repo.execute_one(count_query, params)
|
||||
total = total_result['total'] if total_result else 0
|
||||
|
||||
# Get paginated results
|
||||
page = filters['page']
|
||||
per_page = filters['per_page']
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
query = f"""
|
||||
SELECT l.*, c.name as customer_name, c.email as customer_email
|
||||
FROM licenses l
|
||||
JOIN customers c ON l.customer_id = c.id
|
||||
WHERE {where_sql}
|
||||
ORDER BY l.created_at DESC
|
||||
LIMIT %s OFFSET %s
|
||||
"""
|
||||
|
||||
params.extend([per_page, offset])
|
||||
licenses = license_repo.execute_query(query, params)
|
||||
|
||||
return jsonify({
|
||||
"licenses": licenses,
|
||||
"pagination": {
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"pages": (total + per_page - 1) // per_page
|
||||
}
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/events', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_license_events(license_id):
|
||||
"""Get all events for a license"""
|
||||
hours = request.args.get('hours', 24, type=int)
|
||||
|
||||
events = license_repo.get_recent_activations(license_id, hours)
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"hours": hours,
|
||||
"total_events": len(events),
|
||||
"events": events
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/usage', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_license_usage(license_id):
|
||||
"""Get usage statistics for a license"""
|
||||
days = request.args.get('days', 30, type=int)
|
||||
|
||||
stats = license_repo.get_license_usage_stats(license_id, days)
|
||||
|
||||
# Get daily breakdown
|
||||
query = """
|
||||
SELECT
|
||||
DATE(timestamp) as date,
|
||||
COUNT(*) as validations,
|
||||
COUNT(DISTINCT hardware_id) as unique_devices,
|
||||
COUNT(DISTINCT ip_address) as unique_ips
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '%s days'
|
||||
GROUP BY DATE(timestamp)
|
||||
ORDER BY date DESC
|
||||
"""
|
||||
|
||||
daily_stats = license_repo.execute_query(query, (license_id, days))
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"days": days,
|
||||
"summary": stats,
|
||||
"daily": daily_stats
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/anomalies', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_license_anomalies(license_id):
|
||||
"""Get detected anomalies for a license"""
|
||||
query = """
|
||||
SELECT * FROM anomaly_detections
|
||||
WHERE license_id = %s
|
||||
ORDER BY detected_at DESC
|
||||
LIMIT 100
|
||||
"""
|
||||
|
||||
anomalies = license_repo.execute_query(query, (license_id,))
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"total_anomalies": len(anomalies),
|
||||
"anomalies": anomalies
|
||||
})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/<license_id>/anomalies/<anomaly_id>/resolve', methods=['POST'])
|
||||
@require_admin_auth
|
||||
def resolve_anomaly(license_id, anomaly_id):
|
||||
"""Mark anomaly as resolved"""
|
||||
data = request.get_json() or {}
|
||||
action_taken = data.get('action_taken', 'Resolved by admin')
|
||||
|
||||
query = """
|
||||
UPDATE anomaly_detections
|
||||
SET resolved = true,
|
||||
resolved_at = NOW(),
|
||||
resolved_by = 'admin',
|
||||
action_taken = %s
|
||||
WHERE id = %s AND license_id = %s
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
result = license_repo.execute_one(query, (action_taken, anomaly_id, license_id))
|
||||
|
||||
if not result:
|
||||
return jsonify({"error": "Anomaly not found"}), 404
|
||||
|
||||
return jsonify({"success": True, "message": "Anomaly resolved"})
|
||||
|
||||
@app.route('/api/v1/admin/licenses/bulk-create', methods=['POST'])
|
||||
@require_admin_auth
|
||||
def bulk_create_licenses():
|
||||
"""Create multiple licenses at once"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'licenses' not in data:
|
||||
return jsonify({"error": "Missing licenses array"}), 400
|
||||
|
||||
schema = CreateLicenseSchema()
|
||||
created_licenses = []
|
||||
errors = []
|
||||
|
||||
for idx, license_data in enumerate(data['licenses']):
|
||||
try:
|
||||
validated_data = schema.load(license_data)
|
||||
|
||||
# Generate license key
|
||||
license_key = f"LIC-{secrets.token_urlsafe(16).upper()}"
|
||||
|
||||
# Calculate expiration
|
||||
expires_at = None
|
||||
if validated_data.get('expires_in_days'):
|
||||
expires_at = datetime.utcnow() + timedelta(days=validated_data['expires_in_days'])
|
||||
|
||||
# Create license
|
||||
query = """
|
||||
INSERT INTO licenses
|
||||
(license_key, customer_id, max_devices, is_active, is_test, expires_at, features, metadata)
|
||||
VALUES (%s, %s, %s, true, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
import json
|
||||
license_id = license_repo.execute_insert(query, (
|
||||
license_key,
|
||||
validated_data['customer_id'],
|
||||
validated_data['max_devices'],
|
||||
validated_data['is_test'],
|
||||
expires_at,
|
||||
json.dumps(validated_data['features']),
|
||||
json.dumps(validated_data['metadata'])
|
||||
))
|
||||
|
||||
if license_id:
|
||||
created_licenses.append({
|
||||
"id": license_id,
|
||||
"license_key": license_key,
|
||||
"customer_id": validated_data['customer_id']
|
||||
})
|
||||
except Exception as e:
|
||||
errors.append({
|
||||
"index": idx,
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
"created": len(created_licenses),
|
||||
"failed": len(errors),
|
||||
"licenses": created_licenses,
|
||||
"errors": errors
|
||||
}), 201 if created_licenses else 400
|
||||
|
||||
@app.route('/api/v1/admin/statistics', methods=['GET'])
|
||||
@require_admin_auth
|
||||
def get_statistics():
|
||||
"""Get overall license statistics"""
|
||||
query = """
|
||||
WITH stats AS (
|
||||
SELECT
|
||||
COUNT(*) as total_licenses,
|
||||
COUNT(*) FILTER (WHERE is_active = true) as active_licenses,
|
||||
COUNT(*) FILTER (WHERE is_test = true) as test_licenses,
|
||||
COUNT(*) FILTER (WHERE expires_at < NOW()) as expired_licenses,
|
||||
COUNT(DISTINCT customer_id) as total_customers
|
||||
FROM licenses
|
||||
),
|
||||
device_stats AS (
|
||||
SELECT COUNT(DISTINCT hardware_id) as total_devices
|
||||
FROM license_heartbeats
|
||||
WHERE timestamp > NOW() - INTERVAL '15 minutes'
|
||||
),
|
||||
validation_stats AS (
|
||||
SELECT
|
||||
COUNT(*) as validations_today,
|
||||
COUNT(DISTINCT license_id) as licenses_used_today
|
||||
FROM license_heartbeats
|
||||
WHERE timestamp > CURRENT_DATE
|
||||
)
|
||||
SELECT * FROM stats, device_stats, validation_stats
|
||||
"""
|
||||
|
||||
stats = license_repo.execute_one(query)
|
||||
|
||||
return jsonify(stats or {})
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
return jsonify({"error": "Not found"}), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
logger.error(f"Internal error: {error}")
|
||||
return jsonify({"error": "Internal server error"}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5004, debug=True)
|
||||
1
lizenzserver/services/analytics/__init__.py
Normale Datei
1
lizenzserver/services/analytics/__init__.py
Normale Datei
@ -0,0 +1 @@
|
||||
# Analytics Service
|
||||
478
lizenzserver/services/analytics/app.py
Normale Datei
478
lizenzserver/services/analytics/app.py
Normale Datei
@ -0,0 +1,478 @@
|
||||
import os
|
||||
import sys
|
||||
from flask import Flask, request, jsonify
|
||||
from flask_cors import CORS
|
||||
import logging
|
||||
from functools import wraps
|
||||
from datetime import datetime, timedelta
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
from config import get_config
|
||||
from repositories.license_repo import LicenseRepository
|
||||
from repositories.cache_repo import CacheRepository
|
||||
from events.event_bus import EventBus, Event, EventTypes
|
||||
from models import AnomalyType, Severity
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize Flask app
|
||||
app = Flask(__name__)
|
||||
config = get_config()
|
||||
app.config.from_object(config)
|
||||
CORS(app)
|
||||
|
||||
# Initialize dependencies
|
||||
license_repo = LicenseRepository(config.DATABASE_URL)
|
||||
cache_repo = CacheRepository(config.REDIS_URL)
|
||||
event_bus = EventBus(config.RABBITMQ_URL)
|
||||
|
||||
# Thread pool for async operations
|
||||
executor = ThreadPoolExecutor(max_workers=10)
|
||||
|
||||
def require_auth(f):
|
||||
"""Decorator to require authentication"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
|
||||
if not api_key:
|
||||
return jsonify({"error": "Missing API key"}), 401
|
||||
|
||||
# Simple validation for now
|
||||
if not api_key.startswith('sk_'):
|
||||
return jsonify({"error": "Invalid API key"}), 401
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""Health check endpoint"""
|
||||
return jsonify({
|
||||
"status": "healthy",
|
||||
"service": "analytics",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
@app.route('/api/v1/analytics/licenses/<license_id>/patterns', methods=['GET'])
|
||||
@require_auth
|
||||
def analyze_license_patterns(license_id):
|
||||
"""Analyze usage patterns for a license"""
|
||||
days = request.args.get('days', 30, type=int)
|
||||
|
||||
# Get usage data
|
||||
query = """
|
||||
WITH hourly_usage AS (
|
||||
SELECT
|
||||
DATE_TRUNC('hour', timestamp) as hour,
|
||||
COUNT(*) as validations,
|
||||
COUNT(DISTINCT hardware_id) as devices,
|
||||
COUNT(DISTINCT ip_address) as ips
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '%s days'
|
||||
GROUP BY DATE_TRUNC('hour', timestamp)
|
||||
),
|
||||
daily_patterns AS (
|
||||
SELECT
|
||||
EXTRACT(DOW FROM hour) as day_of_week,
|
||||
EXTRACT(HOUR FROM hour) as hour_of_day,
|
||||
AVG(validations) as avg_validations,
|
||||
MAX(devices) as max_devices
|
||||
FROM hourly_usage
|
||||
GROUP BY day_of_week, hour_of_day
|
||||
)
|
||||
SELECT * FROM daily_patterns
|
||||
ORDER BY day_of_week, hour_of_day
|
||||
"""
|
||||
|
||||
patterns = license_repo.execute_query(query, (license_id, days))
|
||||
|
||||
# Detect anomalies
|
||||
anomalies = detect_usage_anomalies(license_id, patterns)
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"days_analyzed": days,
|
||||
"patterns": patterns,
|
||||
"anomalies": anomalies
|
||||
})
|
||||
|
||||
@app.route('/api/v1/analytics/licenses/<license_id>/anomalies/detect', methods=['POST'])
|
||||
@require_auth
|
||||
def detect_anomalies(license_id):
|
||||
"""Manually trigger anomaly detection for a license"""
|
||||
|
||||
# Run multiple anomaly detection checks
|
||||
anomalies = []
|
||||
|
||||
# Check for multiple IPs
|
||||
ip_anomalies = check_multiple_ips(license_id)
|
||||
anomalies.extend(ip_anomalies)
|
||||
|
||||
# Check for rapid hardware changes
|
||||
hw_anomalies = check_rapid_hardware_changes(license_id)
|
||||
anomalies.extend(hw_anomalies)
|
||||
|
||||
# Check for concurrent usage
|
||||
concurrent_anomalies = check_concurrent_usage(license_id)
|
||||
anomalies.extend(concurrent_anomalies)
|
||||
|
||||
# Check for geographic anomalies
|
||||
geo_anomalies = check_geographic_anomalies(license_id)
|
||||
anomalies.extend(geo_anomalies)
|
||||
|
||||
# Store detected anomalies
|
||||
for anomaly in anomalies:
|
||||
store_anomaly(license_id, anomaly)
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"anomalies_detected": len(anomalies),
|
||||
"anomalies": anomalies
|
||||
})
|
||||
|
||||
@app.route('/api/v1/analytics/licenses/<license_id>/risk-score', methods=['GET'])
|
||||
@require_auth
|
||||
def get_risk_score(license_id):
|
||||
"""Calculate risk score for a license"""
|
||||
|
||||
# Get recent anomalies
|
||||
query = """
|
||||
SELECT anomaly_type, severity, detected_at
|
||||
FROM anomaly_detections
|
||||
WHERE license_id = %s
|
||||
AND detected_at > NOW() - INTERVAL '30 days'
|
||||
AND resolved = false
|
||||
"""
|
||||
|
||||
anomalies = license_repo.execute_query(query, (license_id,))
|
||||
|
||||
# Calculate risk score
|
||||
risk_score = 0
|
||||
severity_weights = {
|
||||
'low': 10,
|
||||
'medium': 25,
|
||||
'high': 50,
|
||||
'critical': 100
|
||||
}
|
||||
|
||||
for anomaly in anomalies:
|
||||
weight = severity_weights.get(anomaly['severity'], 0)
|
||||
# Recent anomalies have higher weight
|
||||
days_old = (datetime.utcnow() - anomaly['detected_at']).days
|
||||
recency_factor = max(0.5, 1 - (days_old / 30))
|
||||
risk_score += weight * recency_factor
|
||||
|
||||
# Normalize to 0-100
|
||||
risk_score = min(100, risk_score)
|
||||
|
||||
# Determine risk level
|
||||
if risk_score < 20:
|
||||
risk_level = "low"
|
||||
elif risk_score < 50:
|
||||
risk_level = "medium"
|
||||
elif risk_score < 80:
|
||||
risk_level = "high"
|
||||
else:
|
||||
risk_level = "critical"
|
||||
|
||||
return jsonify({
|
||||
"license_id": license_id,
|
||||
"risk_score": round(risk_score, 2),
|
||||
"risk_level": risk_level,
|
||||
"active_anomalies": len(anomalies),
|
||||
"factors": anomalies
|
||||
})
|
||||
|
||||
@app.route('/api/v1/analytics/reports/usage', methods=['GET'])
|
||||
@require_auth
|
||||
def generate_usage_report():
|
||||
"""Generate usage report for all licenses"""
|
||||
days = request.args.get('days', 30, type=int)
|
||||
|
||||
query = """
|
||||
WITH license_stats AS (
|
||||
SELECT
|
||||
l.id,
|
||||
l.license_key,
|
||||
l.customer_id,
|
||||
c.name as customer_name,
|
||||
l.max_devices,
|
||||
l.is_test,
|
||||
l.expires_at,
|
||||
COUNT(DISTINCT lh.hardware_id) as active_devices,
|
||||
COUNT(lh.*) as total_validations,
|
||||
MAX(lh.timestamp) as last_validation
|
||||
FROM licenses l
|
||||
LEFT JOIN customers c ON l.customer_id = c.id
|
||||
LEFT JOIN license_heartbeats lh ON l.id = lh.license_id
|
||||
AND lh.timestamp > NOW() - INTERVAL '%s days'
|
||||
WHERE l.is_active = true
|
||||
GROUP BY l.id, l.license_key, l.customer_id, c.name, l.max_devices, l.is_test, l.expires_at
|
||||
)
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN total_validations = 0 THEN 'inactive'
|
||||
WHEN active_devices > max_devices THEN 'over_limit'
|
||||
WHEN expires_at < NOW() THEN 'expired'
|
||||
ELSE 'active'
|
||||
END as status,
|
||||
ROUND((active_devices::numeric / NULLIF(max_devices, 0)) * 100, 2) as device_utilization
|
||||
FROM license_stats
|
||||
ORDER BY total_validations DESC
|
||||
"""
|
||||
|
||||
report = license_repo.execute_query(query, (days,))
|
||||
|
||||
# Summary statistics
|
||||
summary = {
|
||||
"total_licenses": len(report),
|
||||
"active_licenses": len([r for r in report if r['status'] == 'active']),
|
||||
"inactive_licenses": len([r for r in report if r['status'] == 'inactive']),
|
||||
"over_limit_licenses": len([r for r in report if r['status'] == 'over_limit']),
|
||||
"expired_licenses": len([r for r in report if r['status'] == 'expired']),
|
||||
"total_validations": sum(r['total_validations'] for r in report),
|
||||
"average_device_utilization": sum(r['device_utilization'] or 0 for r in report) / len(report) if report else 0
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
"period_days": days,
|
||||
"generated_at": datetime.utcnow().isoformat(),
|
||||
"summary": summary,
|
||||
"licenses": report
|
||||
})
|
||||
|
||||
@app.route('/api/v1/analytics/reports/revenue', methods=['GET'])
|
||||
@require_auth
|
||||
def generate_revenue_report():
|
||||
"""Generate revenue analytics report"""
|
||||
# This would need pricing information in the database
|
||||
# For now, return a placeholder
|
||||
return jsonify({
|
||||
"message": "Revenue reporting requires pricing data integration",
|
||||
"placeholder": True
|
||||
})
|
||||
|
||||
def detect_usage_anomalies(license_id, patterns):
|
||||
"""Detect anomalies in usage patterns"""
|
||||
anomalies = []
|
||||
|
||||
if not patterns:
|
||||
return anomalies
|
||||
|
||||
# Calculate statistics
|
||||
validations = [p['avg_validations'] for p in patterns]
|
||||
if validations:
|
||||
avg_validations = sum(validations) / len(validations)
|
||||
max_validations = max(validations)
|
||||
|
||||
# Detect spikes
|
||||
for pattern in patterns:
|
||||
if pattern['avg_validations'] > avg_validations * 3:
|
||||
anomalies.append({
|
||||
"type": AnomalyType.SUSPICIOUS_PATTERN.value,
|
||||
"severity": Severity.MEDIUM.value,
|
||||
"details": {
|
||||
"day": pattern['day_of_week'],
|
||||
"hour": pattern['hour_of_day'],
|
||||
"validations": pattern['avg_validations'],
|
||||
"average": avg_validations
|
||||
}
|
||||
})
|
||||
|
||||
return anomalies
|
||||
|
||||
def check_multiple_ips(license_id):
|
||||
"""Check for multiple IP addresses"""
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(DISTINCT ip_address) as ip_count,
|
||||
array_agg(DISTINCT ip_address) as ips
|
||||
FROM license_heartbeats
|
||||
WHERE license_id = %s
|
||||
AND timestamp > NOW() - INTERVAL '1 hour'
|
||||
"""
|
||||
|
||||
result = license_repo.execute_one(query, (license_id,))
|
||||
anomalies = []
|
||||
|
||||
if result and result['ip_count'] > config.ANOMALY_MULTIPLE_IPS_THRESHOLD:
|
||||
anomalies.append({
|
||||
"type": AnomalyType.MULTIPLE_IPS.value,
|
||||
"severity": Severity.HIGH.value,
|
||||
"details": {
|
||||
"ip_count": result['ip_count'],
|
||||
"ips": result['ips'][:10], # Limit to 10 IPs
|
||||
"threshold": config.ANOMALY_MULTIPLE_IPS_THRESHOLD
|
||||
}
|
||||
})
|
||||
|
||||
return anomalies
|
||||
|
||||
def check_rapid_hardware_changes(license_id):
|
||||
"""Check for rapid hardware ID changes"""
|
||||
query = """
|
||||
SELECT
|
||||
hardware_id,
|
||||
created_at
|
||||
FROM activation_events
|
||||
WHERE license_id = %s
|
||||
AND event_type IN ('activation', 'transfer')
|
||||
AND created_at > NOW() - INTERVAL '1 hour'
|
||||
AND success = true
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
|
||||
events = license_repo.execute_query(query, (license_id,))
|
||||
anomalies = []
|
||||
|
||||
if len(events) > 1:
|
||||
# Check time between changes
|
||||
for i in range(len(events) - 1):
|
||||
time_diff = (events[i]['created_at'] - events[i+1]['created_at']).total_seconds() / 60
|
||||
if time_diff < config.ANOMALY_RAPID_HARDWARE_CHANGE_MINUTES:
|
||||
anomalies.append({
|
||||
"type": AnomalyType.RAPID_HARDWARE_CHANGE.value,
|
||||
"severity": Severity.HIGH.value,
|
||||
"details": {
|
||||
"hardware_ids": [events[i]['hardware_id'], events[i+1]['hardware_id']],
|
||||
"time_difference_minutes": round(time_diff, 2),
|
||||
"threshold_minutes": config.ANOMALY_RAPID_HARDWARE_CHANGE_MINUTES
|
||||
}
|
||||
})
|
||||
|
||||
return anomalies
|
||||
|
||||
def check_concurrent_usage(license_id):
|
||||
"""Check for concurrent usage from different devices"""
|
||||
query = """
|
||||
WITH concurrent_sessions AS (
|
||||
SELECT
|
||||
h1.hardware_id as hw1,
|
||||
h2.hardware_id as hw2,
|
||||
h1.timestamp as time1,
|
||||
h2.timestamp as time2
|
||||
FROM license_heartbeats h1
|
||||
JOIN license_heartbeats h2 ON h1.license_id = h2.license_id
|
||||
WHERE h1.license_id = %s
|
||||
AND h2.license_id = %s
|
||||
AND h1.hardware_id != h2.hardware_id
|
||||
AND h1.timestamp > NOW() - INTERVAL '15 minutes'
|
||||
AND h2.timestamp > NOW() - INTERVAL '15 minutes'
|
||||
AND ABS(EXTRACT(EPOCH FROM h1.timestamp - h2.timestamp)) < 300
|
||||
)
|
||||
SELECT COUNT(*) as concurrent_count
|
||||
FROM concurrent_sessions
|
||||
"""
|
||||
|
||||
result = license_repo.execute_one(query, (license_id, license_id))
|
||||
anomalies = []
|
||||
|
||||
if result and result['concurrent_count'] > 0:
|
||||
anomalies.append({
|
||||
"type": AnomalyType.CONCURRENT_USE.value,
|
||||
"severity": Severity.CRITICAL.value,
|
||||
"details": {
|
||||
"concurrent_sessions": result['concurrent_count'],
|
||||
"timeframe_minutes": 5
|
||||
}
|
||||
})
|
||||
|
||||
return anomalies
|
||||
|
||||
def check_geographic_anomalies(license_id):
|
||||
"""Check for geographic anomalies (requires IP geolocation)"""
|
||||
# This would require IP geolocation service integration
|
||||
# For now, return empty list
|
||||
return []
|
||||
|
||||
def store_anomaly(license_id, anomaly):
|
||||
"""Store detected anomaly in database"""
|
||||
query = """
|
||||
INSERT INTO anomaly_detections
|
||||
(license_id, anomaly_type, severity, details)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
ON CONFLICT (license_id, anomaly_type, details) DO NOTHING
|
||||
"""
|
||||
|
||||
import json
|
||||
license_repo.execute_insert(query, (
|
||||
license_id,
|
||||
anomaly['type'],
|
||||
anomaly['severity'],
|
||||
json.dumps(anomaly['details'])
|
||||
))
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.ANOMALY_DETECTED,
|
||||
{
|
||||
"license_id": license_id,
|
||||
"anomaly": anomaly
|
||||
},
|
||||
"analytics"
|
||||
))
|
||||
|
||||
@app.route('/api/v1/analytics/dashboard', methods=['GET'])
|
||||
@require_auth
|
||||
def get_dashboard_data():
|
||||
"""Get analytics dashboard data"""
|
||||
query = """
|
||||
WITH current_stats AS (
|
||||
SELECT
|
||||
COUNT(DISTINCT license_id) as active_licenses,
|
||||
COUNT(DISTINCT hardware_id) as active_devices,
|
||||
COUNT(*) as validations_today
|
||||
FROM license_heartbeats
|
||||
WHERE timestamp > CURRENT_DATE
|
||||
),
|
||||
anomaly_stats AS (
|
||||
SELECT
|
||||
COUNT(*) as total_anomalies,
|
||||
COUNT(*) FILTER (WHERE severity = 'critical') as critical_anomalies,
|
||||
COUNT(*) FILTER (WHERE resolved = false) as unresolved_anomalies
|
||||
FROM anomaly_detections
|
||||
WHERE detected_at > CURRENT_DATE - INTERVAL '7 days'
|
||||
),
|
||||
trend_data AS (
|
||||
SELECT
|
||||
DATE(timestamp) as date,
|
||||
COUNT(*) as validations,
|
||||
COUNT(DISTINCT license_id) as licenses,
|
||||
COUNT(DISTINCT hardware_id) as devices
|
||||
FROM license_heartbeats
|
||||
WHERE timestamp > CURRENT_DATE - INTERVAL '7 days'
|
||||
GROUP BY DATE(timestamp)
|
||||
ORDER BY date
|
||||
)
|
||||
SELECT
|
||||
cs.*,
|
||||
ans.*,
|
||||
(SELECT json_agg(td.*) FROM trend_data td) as trends
|
||||
FROM current_stats cs, anomaly_stats ans
|
||||
"""
|
||||
|
||||
dashboard_data = license_repo.execute_one(query)
|
||||
|
||||
return jsonify(dashboard_data or {})
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
return jsonify({"error": "Not found"}), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
logger.error(f"Internal error: {error}")
|
||||
return jsonify({"error": "Internal server error"}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5003, debug=True)
|
||||
25
lizenzserver/services/auth/Dockerfile
Normale Datei
25
lizenzserver/services/auth/Dockerfile
Normale Datei
@ -0,0 +1,25 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5001
|
||||
|
||||
# Run with gunicorn
|
||||
CMD ["gunicorn", "--bind", "0.0.0.0:5001", "--workers", "4", "--timeout", "120", "app:app"]
|
||||
279
lizenzserver/services/auth/app.py
Normale Datei
279
lizenzserver/services/auth/app.py
Normale Datei
@ -0,0 +1,279 @@
|
||||
import os
|
||||
import sys
|
||||
from flask import Flask, request, jsonify
|
||||
from flask_cors import CORS
|
||||
import jwt
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from functools import wraps
|
||||
from prometheus_flask_exporter import PrometheusMetrics
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
from config import get_config
|
||||
from repositories.base import BaseRepository
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize Flask app
|
||||
app = Flask(__name__)
|
||||
config = get_config()
|
||||
app.config.from_object(config)
|
||||
CORS(app)
|
||||
|
||||
# Initialize Prometheus metrics
|
||||
metrics = PrometheusMetrics(app)
|
||||
metrics.info('auth_service_info', 'Auth Service Information', version='1.0.0')
|
||||
|
||||
# Initialize repository
|
||||
db_repo = BaseRepository(config.DATABASE_URL)
|
||||
|
||||
def create_token(payload: dict, expires_delta: timedelta) -> str:
|
||||
"""Create JWT token"""
|
||||
to_encode = payload.copy()
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
to_encode.update({"exp": expire, "iat": datetime.utcnow()})
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
config.JWT_SECRET,
|
||||
algorithm=config.JWT_ALGORITHM
|
||||
)
|
||||
|
||||
def decode_token(token: str) -> dict:
|
||||
"""Decode and validate JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
config.JWT_SECRET,
|
||||
algorithms=[config.JWT_ALGORITHM]
|
||||
)
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise ValueError("Token has expired")
|
||||
except jwt.InvalidTokenError:
|
||||
raise ValueError("Invalid token")
|
||||
|
||||
def require_api_key(f):
|
||||
"""Decorator to require API key"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
|
||||
if not api_key:
|
||||
return jsonify({"error": "Missing API key"}), 401
|
||||
|
||||
# Validate API key
|
||||
query = """
|
||||
SELECT id, client_name, allowed_endpoints
|
||||
FROM api_clients
|
||||
WHERE api_key = %s AND is_active = true
|
||||
"""
|
||||
client = db_repo.execute_one(query, (api_key,))
|
||||
|
||||
if not client:
|
||||
return jsonify({"error": "Invalid API key"}), 401
|
||||
|
||||
# Check if endpoint is allowed
|
||||
endpoint = request.endpoint
|
||||
allowed = client.get('allowed_endpoints', [])
|
||||
if allowed and endpoint not in allowed:
|
||||
return jsonify({"error": "Endpoint not allowed"}), 403
|
||||
|
||||
# Add client info to request
|
||||
request.api_client = client
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""Health check endpoint"""
|
||||
return jsonify({
|
||||
"status": "healthy",
|
||||
"service": "auth",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
@app.route('/api/v1/auth/token', methods=['POST'])
|
||||
@require_api_key
|
||||
def create_access_token():
|
||||
"""Create access token for license validation"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'license_id' not in data:
|
||||
return jsonify({"error": "Missing license_id"}), 400
|
||||
|
||||
license_id = data['license_id']
|
||||
hardware_id = data.get('hardware_id')
|
||||
|
||||
# Verify license exists and is active
|
||||
query = """
|
||||
SELECT id, is_active, max_devices
|
||||
FROM licenses
|
||||
WHERE id = %s
|
||||
"""
|
||||
license = db_repo.execute_one(query, (license_id,))
|
||||
|
||||
if not license:
|
||||
return jsonify({"error": "License not found"}), 404
|
||||
|
||||
if not license['is_active']:
|
||||
return jsonify({"error": "License is not active"}), 403
|
||||
|
||||
# Create token payload
|
||||
payload = {
|
||||
"sub": license_id,
|
||||
"hwid": hardware_id,
|
||||
"client_id": request.api_client['id'],
|
||||
"type": "access"
|
||||
}
|
||||
|
||||
# Add features and limits based on license
|
||||
payload["features"] = data.get('features', [])
|
||||
payload["limits"] = {
|
||||
"api_calls": config.DEFAULT_RATE_LIMIT_PER_HOUR,
|
||||
"concurrent_sessions": config.MAX_CONCURRENT_SESSIONS
|
||||
}
|
||||
|
||||
# Create tokens
|
||||
access_token = create_token(payload, config.JWT_ACCESS_TOKEN_EXPIRES)
|
||||
|
||||
# Create refresh token
|
||||
refresh_payload = {
|
||||
"sub": license_id,
|
||||
"client_id": request.api_client['id'],
|
||||
"type": "refresh"
|
||||
}
|
||||
refresh_token = create_token(refresh_payload, config.JWT_REFRESH_TOKEN_EXPIRES)
|
||||
|
||||
return jsonify({
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "Bearer",
|
||||
"expires_in": int(config.JWT_ACCESS_TOKEN_EXPIRES.total_seconds())
|
||||
})
|
||||
|
||||
@app.route('/api/v1/auth/refresh', methods=['POST'])
|
||||
def refresh_access_token():
|
||||
"""Refresh access token"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'refresh_token' not in data:
|
||||
return jsonify({"error": "Missing refresh_token"}), 400
|
||||
|
||||
try:
|
||||
# Decode refresh token
|
||||
payload = decode_token(data['refresh_token'])
|
||||
|
||||
if payload.get('type') != 'refresh':
|
||||
return jsonify({"error": "Invalid token type"}), 400
|
||||
|
||||
license_id = payload['sub']
|
||||
|
||||
# Verify license still active
|
||||
query = "SELECT is_active FROM licenses WHERE id = %s"
|
||||
license = db_repo.execute_one(query, (license_id,))
|
||||
|
||||
if not license or not license['is_active']:
|
||||
return jsonify({"error": "License is not active"}), 403
|
||||
|
||||
# Create new access token
|
||||
access_payload = {
|
||||
"sub": license_id,
|
||||
"client_id": payload['client_id'],
|
||||
"type": "access"
|
||||
}
|
||||
|
||||
access_token = create_token(access_payload, config.JWT_ACCESS_TOKEN_EXPIRES)
|
||||
|
||||
return jsonify({
|
||||
"access_token": access_token,
|
||||
"token_type": "Bearer",
|
||||
"expires_in": int(config.JWT_ACCESS_TOKEN_EXPIRES.total_seconds())
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
return jsonify({"error": str(e)}), 401
|
||||
|
||||
@app.route('/api/v1/auth/verify', methods=['POST'])
|
||||
def verify_token():
|
||||
"""Verify token validity"""
|
||||
auth_header = request.headers.get('Authorization')
|
||||
|
||||
if not auth_header or not auth_header.startswith('Bearer '):
|
||||
return jsonify({"error": "Missing or invalid authorization header"}), 401
|
||||
|
||||
token = auth_header.split(' ')[1]
|
||||
|
||||
try:
|
||||
payload = decode_token(token)
|
||||
|
||||
return jsonify({
|
||||
"valid": True,
|
||||
"license_id": payload['sub'],
|
||||
"expires_at": datetime.fromtimestamp(payload['exp']).isoformat()
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": str(e)
|
||||
}), 401
|
||||
|
||||
@app.route('/api/v1/auth/api-key', methods=['POST'])
|
||||
def create_api_key():
|
||||
"""Create new API key (admin only)"""
|
||||
# This endpoint should be protected by admin authentication
|
||||
# For now, we'll use a simple secret header
|
||||
admin_secret = request.headers.get('X-Admin-Secret')
|
||||
|
||||
if admin_secret != os.getenv('ADMIN_SECRET', 'change-this-admin-secret'):
|
||||
return jsonify({"error": "Unauthorized"}), 401
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'client_name' not in data:
|
||||
return jsonify({"error": "Missing client_name"}), 400
|
||||
|
||||
import secrets
|
||||
api_key = f"sk_{secrets.token_urlsafe(32)}"
|
||||
secret_key = secrets.token_urlsafe(64)
|
||||
|
||||
query = """
|
||||
INSERT INTO api_clients (client_name, api_key, secret_key, allowed_endpoints)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
RETURNING id
|
||||
"""
|
||||
|
||||
allowed_endpoints = data.get('allowed_endpoints', [])
|
||||
client_id = db_repo.execute_insert(
|
||||
query,
|
||||
(data['client_name'], api_key, secret_key, allowed_endpoints)
|
||||
)
|
||||
|
||||
if not client_id:
|
||||
return jsonify({"error": "Failed to create API key"}), 500
|
||||
|
||||
return jsonify({
|
||||
"client_id": client_id,
|
||||
"api_key": api_key,
|
||||
"secret_key": secret_key,
|
||||
"client_name": data['client_name']
|
||||
}), 201
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
return jsonify({"error": "Not found"}), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
logger.error(f"Internal error: {error}")
|
||||
return jsonify({"error": "Internal server error"}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5001, debug=True)
|
||||
15
lizenzserver/services/auth/config.py
Normale Datei
15
lizenzserver/services/auth/config.py
Normale Datei
@ -0,0 +1,15 @@
|
||||
import os
|
||||
from datetime import timedelta
|
||||
|
||||
def get_config():
|
||||
"""Get configuration from environment variables"""
|
||||
return {
|
||||
'DATABASE_URL': os.getenv('DATABASE_URL', 'postgresql://postgres:password@postgres:5432/v2_adminpanel'),
|
||||
'REDIS_URL': os.getenv('REDIS_URL', 'redis://redis:6379/1'),
|
||||
'JWT_SECRET': os.getenv('JWT_SECRET', 'dev-secret-key'),
|
||||
'JWT_ALGORITHM': 'HS256',
|
||||
'ACCESS_TOKEN_EXPIRE_MINUTES': 30,
|
||||
'REFRESH_TOKEN_EXPIRE_DAYS': 7,
|
||||
'FLASK_ENV': os.getenv('FLASK_ENV', 'production'),
|
||||
'LOG_LEVEL': os.getenv('LOG_LEVEL', 'INFO'),
|
||||
}
|
||||
9
lizenzserver/services/auth/requirements.txt
Normale Datei
9
lizenzserver/services/auth/requirements.txt
Normale Datei
@ -0,0 +1,9 @@
|
||||
flask==3.0.0
|
||||
flask-cors==4.0.0
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
redis==5.0.1
|
||||
python-dotenv==1.0.0
|
||||
gunicorn==21.2.0
|
||||
marshmallow==3.20.1
|
||||
prometheus-flask-exporter==0.23.0
|
||||
25
lizenzserver/services/license_api/Dockerfile
Normale Datei
25
lizenzserver/services/license_api/Dockerfile
Normale Datei
@ -0,0 +1,25 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5002
|
||||
|
||||
# Run with gunicorn
|
||||
CMD ["gunicorn", "--bind", "0.0.0.0:5002", "--workers", "4", "--timeout", "120", "app:app"]
|
||||
409
lizenzserver/services/license_api/app.py
Normale Datei
409
lizenzserver/services/license_api/app.py
Normale Datei
@ -0,0 +1,409 @@
|
||||
import os
|
||||
import sys
|
||||
from flask import Flask, request, jsonify
|
||||
from flask_cors import CORS
|
||||
import jwt
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from functools import wraps
|
||||
from marshmallow import Schema, fields, ValidationError
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
from config import get_config
|
||||
from repositories.license_repo import LicenseRepository
|
||||
from repositories.cache_repo import CacheRepository
|
||||
from events.event_bus import EventBus, Event, EventTypes
|
||||
from models import EventType, ValidationRequest, ValidationResponse
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Initialize Flask app
|
||||
app = Flask(__name__)
|
||||
config = get_config()
|
||||
app.config.from_object(config)
|
||||
CORS(app)
|
||||
|
||||
# Initialize dependencies
|
||||
license_repo = LicenseRepository(config.DATABASE_URL)
|
||||
cache_repo = CacheRepository(config.REDIS_URL)
|
||||
event_bus = EventBus(config.RABBITMQ_URL)
|
||||
|
||||
# Validation schemas
|
||||
class ValidateSchema(Schema):
|
||||
license_key = fields.Str(required=True)
|
||||
hardware_id = fields.Str(required=True)
|
||||
app_version = fields.Str()
|
||||
|
||||
class ActivateSchema(Schema):
|
||||
license_key = fields.Str(required=True)
|
||||
hardware_id = fields.Str(required=True)
|
||||
device_name = fields.Str()
|
||||
os_info = fields.Dict()
|
||||
|
||||
class HeartbeatSchema(Schema):
|
||||
session_data = fields.Dict()
|
||||
|
||||
class OfflineTokenSchema(Schema):
|
||||
duration_hours = fields.Int(missing=24, validate=lambda x: 0 < x <= 72)
|
||||
|
||||
def require_api_key(f):
|
||||
"""Decorator to require API key"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
|
||||
if not api_key:
|
||||
return jsonify({"error": "Missing API key"}), 401
|
||||
|
||||
# For now, accept any API key starting with 'sk_'
|
||||
# In production, validate against database
|
||||
if not api_key.startswith('sk_'):
|
||||
return jsonify({"error": "Invalid API key"}), 401
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
def require_auth_token(f):
|
||||
"""Decorator to require JWT token"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
auth_header = request.headers.get('Authorization')
|
||||
|
||||
if not auth_header or not auth_header.startswith('Bearer '):
|
||||
return jsonify({"error": "Missing or invalid authorization header"}), 401
|
||||
|
||||
token = auth_header.split(' ')[1]
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
config.JWT_SECRET,
|
||||
algorithms=[config.JWT_ALGORITHM]
|
||||
)
|
||||
request.token_payload = payload
|
||||
return f(*args, **kwargs)
|
||||
except jwt.ExpiredSignatureError:
|
||||
return jsonify({"error": "Token has expired"}), 401
|
||||
except jwt.InvalidTokenError:
|
||||
return jsonify({"error": "Invalid token"}), 401
|
||||
|
||||
return decorated_function
|
||||
|
||||
def get_client_ip():
|
||||
"""Get client IP address"""
|
||||
if request.headers.get('X-Forwarded-For'):
|
||||
return request.headers.get('X-Forwarded-For').split(',')[0]
|
||||
return request.remote_addr
|
||||
|
||||
@app.route('/health', methods=['GET'])
|
||||
def health_check():
|
||||
"""Health check endpoint"""
|
||||
return jsonify({
|
||||
"status": "healthy",
|
||||
"service": "license-api",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
@app.route('/api/v1/license/validate', methods=['POST'])
|
||||
@require_api_key
|
||||
def validate_license():
|
||||
"""Validate license key with hardware ID"""
|
||||
schema = ValidateSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
license_key = data['license_key']
|
||||
hardware_id = data['hardware_id']
|
||||
app_version = data.get('app_version')
|
||||
|
||||
# Check cache first
|
||||
cached_result = cache_repo.get_license_validation(license_key, hardware_id)
|
||||
if cached_result:
|
||||
logger.info(f"Cache hit for license validation: {license_key[:8]}...")
|
||||
return jsonify(cached_result)
|
||||
|
||||
# Get license from database
|
||||
license = license_repo.get_license_by_key(license_key)
|
||||
|
||||
if not license:
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_VALIDATION_FAILED,
|
||||
{"license_key": license_key, "reason": "not_found"},
|
||||
"license-api"
|
||||
))
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": "License not found",
|
||||
"error_code": "LICENSE_NOT_FOUND"
|
||||
}), 404
|
||||
|
||||
# Check if license is active
|
||||
if not license['is_active']:
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_VALIDATION_FAILED,
|
||||
{"license_id": license['id'], "reason": "inactive"},
|
||||
"license-api"
|
||||
))
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": "License is not active",
|
||||
"error_code": "LICENSE_INACTIVE"
|
||||
}), 403
|
||||
|
||||
# Check expiration
|
||||
if license['expires_at'] and datetime.utcnow() > license['expires_at']:
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_EXPIRED,
|
||||
{"license_id": license['id']},
|
||||
"license-api"
|
||||
))
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": "License has expired",
|
||||
"error_code": "LICENSE_EXPIRED"
|
||||
}), 403
|
||||
|
||||
# Check device limit
|
||||
device_count = license_repo.get_device_count(license['id'])
|
||||
if device_count >= license['max_devices']:
|
||||
# Check if this device is already registered
|
||||
if not license_repo.check_hardware_id_exists(license['id'], hardware_id):
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": "Device limit exceeded",
|
||||
"error_code": "DEVICE_LIMIT_EXCEEDED",
|
||||
"current_devices": device_count,
|
||||
"max_devices": license['max_devices']
|
||||
}), 403
|
||||
|
||||
# Record heartbeat
|
||||
license_repo.record_heartbeat(
|
||||
license_id=license['id'],
|
||||
hardware_id=hardware_id,
|
||||
ip_address=get_client_ip(),
|
||||
user_agent=request.headers.get('User-Agent'),
|
||||
app_version=app_version
|
||||
)
|
||||
|
||||
# Create response
|
||||
response = {
|
||||
"valid": True,
|
||||
"license_id": license['id'],
|
||||
"expires_at": license['expires_at'].isoformat() if license['expires_at'] else None,
|
||||
"features": license.get('features', []),
|
||||
"limits": {
|
||||
"max_devices": license['max_devices'],
|
||||
"current_devices": device_count
|
||||
}
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
cache_repo.set_license_validation(
|
||||
license_key,
|
||||
hardware_id,
|
||||
response,
|
||||
config.CACHE_TTL_VALIDATION
|
||||
)
|
||||
|
||||
# Publish success event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_VALIDATED,
|
||||
{
|
||||
"license_id": license['id'],
|
||||
"hardware_id": hardware_id,
|
||||
"ip_address": get_client_ip()
|
||||
},
|
||||
"license-api"
|
||||
))
|
||||
|
||||
return jsonify(response)
|
||||
|
||||
@app.route('/api/v1/license/activate', methods=['POST'])
|
||||
@require_api_key
|
||||
def activate_license():
|
||||
"""Activate license on a new device"""
|
||||
schema = ActivateSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json())
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
license_key = data['license_key']
|
||||
hardware_id = data['hardware_id']
|
||||
device_name = data.get('device_name')
|
||||
os_info = data.get('os_info', {})
|
||||
|
||||
# Get license
|
||||
license = license_repo.get_license_by_key(license_key)
|
||||
|
||||
if not license:
|
||||
return jsonify({
|
||||
"error": "License not found",
|
||||
"error_code": "LICENSE_NOT_FOUND"
|
||||
}), 404
|
||||
|
||||
if not license['is_active']:
|
||||
return jsonify({
|
||||
"error": "License is not active",
|
||||
"error_code": "LICENSE_INACTIVE"
|
||||
}), 403
|
||||
|
||||
# Check if already activated on this device
|
||||
if license_repo.check_hardware_id_exists(license['id'], hardware_id):
|
||||
return jsonify({
|
||||
"error": "License already activated on this device",
|
||||
"error_code": "ALREADY_ACTIVATED"
|
||||
}), 400
|
||||
|
||||
# Check device limit
|
||||
device_count = license_repo.get_device_count(license['id'])
|
||||
if device_count >= license['max_devices']:
|
||||
return jsonify({
|
||||
"error": "Device limit exceeded",
|
||||
"error_code": "DEVICE_LIMIT_EXCEEDED",
|
||||
"current_devices": device_count,
|
||||
"max_devices": license['max_devices']
|
||||
}), 403
|
||||
|
||||
# Record activation
|
||||
license_repo.record_activation_event(
|
||||
license_id=license['id'],
|
||||
event_type=EventType.ACTIVATION,
|
||||
hardware_id=hardware_id,
|
||||
ip_address=get_client_ip(),
|
||||
user_agent=request.headers.get('User-Agent'),
|
||||
success=True,
|
||||
metadata={
|
||||
"device_name": device_name,
|
||||
"os_info": os_info
|
||||
}
|
||||
)
|
||||
|
||||
# Invalidate cache
|
||||
cache_repo.invalidate_license_cache(license['id'])
|
||||
|
||||
# Publish event
|
||||
event_bus.publish(Event(
|
||||
EventTypes.LICENSE_ACTIVATED,
|
||||
{
|
||||
"license_id": license['id'],
|
||||
"hardware_id": hardware_id,
|
||||
"device_name": device_name
|
||||
},
|
||||
"license-api"
|
||||
))
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"license_id": license['id'],
|
||||
"message": "License activated successfully"
|
||||
}), 201
|
||||
|
||||
@app.route('/api/v1/license/heartbeat', methods=['POST'])
|
||||
@require_auth_token
|
||||
def heartbeat():
|
||||
"""Record license heartbeat"""
|
||||
schema = HeartbeatSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json() or {})
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
license_id = request.token_payload['sub']
|
||||
hardware_id = request.token_payload.get('hwid')
|
||||
|
||||
# Record heartbeat
|
||||
license_repo.record_heartbeat(
|
||||
license_id=license_id,
|
||||
hardware_id=hardware_id,
|
||||
ip_address=get_client_ip(),
|
||||
user_agent=request.headers.get('User-Agent'),
|
||||
session_data=data.get('session_data', {})
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
@app.route('/api/v1/license/offline-token', methods=['POST'])
|
||||
@require_auth_token
|
||||
def create_offline_token():
|
||||
"""Create offline validation token"""
|
||||
schema = OfflineTokenSchema()
|
||||
|
||||
try:
|
||||
data = schema.load(request.get_json() or {})
|
||||
except ValidationError as e:
|
||||
return jsonify({"error": "Invalid request", "details": e.messages}), 400
|
||||
|
||||
license_id = request.token_payload['sub']
|
||||
hardware_id = request.token_payload.get('hwid')
|
||||
duration_hours = data['duration_hours']
|
||||
|
||||
if not hardware_id:
|
||||
return jsonify({"error": "Hardware ID required"}), 400
|
||||
|
||||
# Create offline token
|
||||
token = license_repo.create_license_token(
|
||||
license_id=license_id,
|
||||
hardware_id=hardware_id,
|
||||
valid_hours=duration_hours
|
||||
)
|
||||
|
||||
if not token:
|
||||
return jsonify({"error": "Failed to create token"}), 500
|
||||
|
||||
valid_until = datetime.utcnow() + timedelta(hours=duration_hours)
|
||||
|
||||
return jsonify({
|
||||
"token": token,
|
||||
"valid_until": valid_until.isoformat(),
|
||||
"duration_hours": duration_hours
|
||||
})
|
||||
|
||||
@app.route('/api/v1/license/validate-offline', methods=['POST'])
|
||||
def validate_offline_token():
|
||||
"""Validate offline token"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or 'token' not in data:
|
||||
return jsonify({"error": "Missing token"}), 400
|
||||
|
||||
# Validate token
|
||||
result = license_repo.validate_token(data['token'])
|
||||
|
||||
if not result:
|
||||
return jsonify({
|
||||
"valid": False,
|
||||
"error": "Invalid or expired token"
|
||||
}), 401
|
||||
|
||||
return jsonify({
|
||||
"valid": True,
|
||||
"license_id": result['license_id'],
|
||||
"hardware_id": result['hardware_id'],
|
||||
"expires_at": result['valid_until'].isoformat()
|
||||
})
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
return jsonify({"error": "Not found"}), 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
logger.error(f"Internal error: {error}")
|
||||
return jsonify({"error": "Internal server error"}), 500
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0', port=5002, debug=True)
|
||||
10
lizenzserver/services/license_api/requirements.txt
Normale Datei
10
lizenzserver/services/license_api/requirements.txt
Normale Datei
@ -0,0 +1,10 @@
|
||||
flask==3.0.0
|
||||
flask-cors==4.0.0
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
redis==5.0.1
|
||||
pika==1.3.2
|
||||
python-dotenv==1.0.0
|
||||
gunicorn==21.2.0
|
||||
marshmallow==3.20.1
|
||||
requests==2.31.0
|
||||
In neuem Issue referenzieren
Einen Benutzer sperren