Refactor golden image handling in backup upload process</message>
<message>Update the _set_golden_from_path function to improve the handling of existing golden image files. Replace the existing unlink logic with a more robust method that safely removes files or broken symlinks using the missing_ok parameter. This change enhances the reliability of the backup upload process by ensuring that stale references are properly cleared before setting a new golden image path.
This commit is contained in:
@@ -0,0 +1,27 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEAuFwehtR5QVRr/HAxmcrUvaMfj31HBhThtze/L7nwLLcpWwOo
|
||||
VugvCkVD/GgOUBPagnUjlfZ+MTR35k70pOybw+TjDHtqMdu2RuM67Ns3u0sx2mIr
|
||||
V5WZcc2zvsKyREd/uIVX8pe0VEvRpNoq420zdtY9J9Coy34grOLZlGsOELjnP+Hf
|
||||
0jcsw1rMgfvoKWffuOJk4qqGVq0a7cta3JURsUS4YqSDqybobRP+fArWfxOBitqS
|
||||
aNL78tMpnGr+wLykRkAbjulvZbibjr6N8/HjQKSYfxOlUNAci4K9QZaxGCdifgcz
|
||||
MZwnhu96XDm1gIFXeAN5nNKHjRo1fI8R53wSHwIDAQABAoIBAHXqTYgVS/zR/0N1
|
||||
ivP/vDQSqnP/P7cPEhM6r6jZ91jSSbwxybDUTon2JXbCIy1qlV7Nh1Y6UxoroeiH
|
||||
ZYg64aHYurPYF+MN0TbjzWODDtFXVeqE0Y3yXDNiyu1e3+A2DuW5O7go+ajU2aDj
|
||||
/Xx68ui2PGVD20JUSJfrfBimpFdipedFYw0obKEQ6L8c/AYWXSkCp9RXa+VAfJvB
|
||||
epO5Fi0eciaB+rblH/r36gYRY+ebMU3upvBgZXtL52MYj8aHhUlR8P+iwoDyBm2l
|
||||
eMJc5nH2M1iEfZ6I3PbPYL58oMwdxVw3Y/ZlxnidFQS9HRcBWYfOCnqZWPTxAf54
|
||||
Rh0N1zECgYEA53q0qzEsUtEY04n3bl20D4emZM2c1Gojm5suOWT8RTqcsgZb2Yrl
|
||||
bU5zy+EQjDUUXGbjUbgCYOHHg6JInI3R79rh6te+dg2w8aMTFG4NDeJ5p7WatpwT
|
||||
ynqsVSj0B4Z3XwZhTpyoxnLr9vtsPKjA5UDEotBTxRfZHUHmfUnongcCgYEAy+Oe
|
||||
pyf0vPOyHCWS0vSyySRnb7xtx6MvnfF5/kzRNmZME+NxoYo2Yn0ArMOLx1SAKZka
|
||||
sCYcGVlonA8O6g4t9zW7b0mV/2LDax1zev1iq2rnVK+aU4y5RR06J2VwSZ5mRWCk
|
||||
sExo4nWIJdiHi18ixtHDUSkxY4rnp01W0YWOZSkCgYA1M//IhSHR2xtgq4pCRKk5
|
||||
FI2LB7MvI0IR5sXmDS7qXoFbbZi41HLM/8YfqxgZka2fW0qOIsPxLpOjzq3vxazl
|
||||
+yIHzxSIn7b2ouuku3KmqVIa2OO5awAlfrKTVDlabW6MWbQN1HX6Prm7Z6hF/Odx
|
||||
CcToQwet+kA9uELYsx8TCwKBgDuMdnjxtYw+TMXlv3U3nMQcis1apmGJas3hijTY
|
||||
sL4HsK6aXkTE/k9TnQ/YaQnFx0ze96l85/YLY/84cq2viINMQTsmrdWSPesaBfFk
|
||||
8h2IspnMU/GVB0OFXsfE27/UsKAQsuj+2B9UHniXPjdZiOmyuC4LLu6Y0kHN186I
|
||||
CGfJAoGBAMqAMCMpfC8QZT5zQtzjOWV5iUvpsLwf5HikXw/U19uSW59jajGdiz7B
|
||||
Y3Wt2jslrYS/BmMVDOfgQfXTFfNuZFR1a9fB93rY14zhQ33ChzBaQUp83qRmy6Ae
|
||||
60aBUd+vBL/gV5sxdeOtCZSxZ+uPL4imk2L89efhPW7QiBXI6OQE
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@@ -0,0 +1,49 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
.venv
|
||||
venv
|
||||
|
||||
# Environment files (uploaded separately)
|
||||
.env
|
||||
.env.*
|
||||
env.example
|
||||
|
||||
# Docker
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
.dockerignore
|
||||
|
||||
# IDE
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# SSH keys
|
||||
.cert/
|
||||
*.pem
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Data
|
||||
data/
|
||||
*.db
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
# Local server configuration (auto-generated)
|
||||
# SQLite database for local testing
|
||||
|
||||
GNSS_SERVER_DATABASE_URL=sqlite:////Users/alexandershulman/projects2/tm-gnss-guard/server/data/server_local.db
|
||||
GNSS_SERVER_WEB_USERNAME=test
|
||||
GNSS_SERVER_WEB_PASSWORD=Tototheo.25!
|
||||
GNSS_SERVER_SECRET_KEY=local-dev-secret-key-change-in-production
|
||||
GNSS_SERVER_DEBUG=true
|
||||
GNSS_SERVER_HOST=127.0.0.1
|
||||
GNSS_SERVER_PORT=8000
|
||||
|
||||
# ============================================================================
|
||||
# Telegram Bot Configuration (Optional)
|
||||
# ============================================================================
|
||||
# 1. Create bot: Open Telegram → Search @BotFather → /newbot
|
||||
# 2. Get chat ID:
|
||||
# - Start chat with your bot, send any message
|
||||
# - Visit: https://api.telegram.org/bot<YOUR_TOKEN>/getUpdates
|
||||
# - Find "chat":{"id":123456789} (positive for DM, negative for groups)
|
||||
# 3. Fill in values below
|
||||
#
|
||||
# Each asset can override the chat_id to send to a different chat/group.
|
||||
# ============================================================================
|
||||
|
||||
GNSS_SERVER_TELEGRAM_BOT_TOKEN=8319259186:AAGfg2tHPlnHduAPvsnODLPA1kaRDIsbx0A
|
||||
GNSS_SERVER_TELEGRAM_CHAT_ID=-4863784324
|
||||
|
||||
# =============================================================================
|
||||
# ASSET OFFLINE DETECTION
|
||||
# =============================================================================
|
||||
|
||||
# Seconds without updates before an asset is considered offline (default: 120)
|
||||
# Triggers Telegram notification when asset goes offline/online
|
||||
GNSS_SERVER_ASSET_OFFLINE_SECONDS=120
|
||||
93
backup-from-device/gnss-guard/tm-gnss-guard/server/.env.prod
Normal file
93
backup-from-device/gnss-guard/tm-gnss-guard/server/.env.prod
Normal file
@@ -0,0 +1,93 @@
|
||||
# =============================================================================
|
||||
# GNSS Guard Server Configuration
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# SERVER SETTINGS
|
||||
# =============================================================================
|
||||
|
||||
# Host to bind to (127.0.0.1 when behind Nginx proxy)
|
||||
GNSS_SERVER_HOST=127.0.0.1
|
||||
|
||||
# Port to bind to
|
||||
GNSS_SERVER_PORT=8000
|
||||
|
||||
# Enable debug mode (set to false in production)
|
||||
GNSS_SERVER_DEBUG=false
|
||||
|
||||
# =============================================================================
|
||||
# DATABASE (PostgreSQL RDS)
|
||||
# =============================================================================
|
||||
|
||||
# Full database connection URL
|
||||
# Format: postgresql://USER:PASSWORD@HOST:PORT/DATABASE
|
||||
GNSS_SERVER_DATABASE_URL=postgresql://postgres:!ks-hUe8@gnss-guard.cn06uuuk8ttq.eu-west-1.rds.amazonaws.com:5432/gnss_guard
|
||||
|
||||
# =============================================================================
|
||||
# SECURITY
|
||||
# =============================================================================
|
||||
|
||||
# Secret key for session encryption (generate with: python -c "import secrets; print(secrets.token_urlsafe(32))")
|
||||
GNSS_SERVER_SECRET_KEY=e0QnYxAvisgbOqzTIl-rlLyczsNOpP7hEc26ea22ikI
|
||||
|
||||
# Session expiration in minutes (default: 24 hours)
|
||||
GNSS_SERVER_SESSION_EXPIRE_MINUTES=1440
|
||||
|
||||
# =============================================================================
|
||||
# WEB UI AUTHENTICATION
|
||||
# =============================================================================
|
||||
|
||||
# Username for web dashboard login
|
||||
GNSS_SERVER_WEB_USERNAME=test
|
||||
|
||||
# Password for web dashboard login
|
||||
GNSS_SERVER_WEB_PASSWORD=Tototheo.25!
|
||||
|
||||
# =============================================================================
|
||||
# DOMAIN (for SSL/HTTPS)
|
||||
# =============================================================================
|
||||
|
||||
# Server domain name (for Let's Encrypt SSL)
|
||||
GNSS_SERVER_DOMAIN=gnss.tototheo.com
|
||||
|
||||
# =============================================================================
|
||||
# =============================================================================
|
||||
# VALIDATION
|
||||
# =============================================================================
|
||||
|
||||
# Staleness threshold in seconds (data older than this is considered stale)
|
||||
GNSS_SERVER_STALE_THRESHOLD_SECONDS=60
|
||||
|
||||
# DATA RETENTION
|
||||
# =============================================================================
|
||||
|
||||
# Days to keep validation history (default: 90)
|
||||
GNSS_SERVER_VALIDATION_HISTORY_DAYS=90
|
||||
|
||||
# Email for Let's Encrypt certificate notifications
|
||||
LETSENCRYPT_EMAIL=alexander.s@tototheo.com
|
||||
|
||||
# ============================================================================
|
||||
# Telegram Bot Configuration (Optional)
|
||||
# ============================================================================
|
||||
# 1. Create bot: Open Telegram → Search @BotFather → /newbot
|
||||
# 2. Get chat ID:
|
||||
# - Start chat with your bot, send any message
|
||||
# - Visit: https://api.telegram.org/bot<YOUR_TOKEN>/getUpdates
|
||||
# - Find "chat":{"id":123456789} (positive for DM, negative for groups)
|
||||
# 3. Fill in values below
|
||||
#
|
||||
# Each asset can override the chat_id to send to a different chat/group.
|
||||
# ============================================================================
|
||||
|
||||
GNSS_SERVER_TELEGRAM_BOT_TOKEN=8319259186:AAGfg2tHPlnHduAPvsnODLPA1kaRDIsbx0A
|
||||
GNSS_SERVER_TELEGRAM_CHAT_ID=-4863784324
|
||||
|
||||
# =============================================================================
|
||||
# ASSET OFFLINE DETECTION
|
||||
# =============================================================================
|
||||
|
||||
# Seconds without updates before an asset is considered offline (default: 120)
|
||||
# Triggers Telegram notification when asset goes offline/online
|
||||
GNSS_SERVER_ASSET_OFFLINE_SECONDS=120
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
# GNSS Guard Server - Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements first (for better caching)
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Create non-root user for security
|
||||
RUN useradd --create-home --shell /bin/bash appuser && \
|
||||
chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD python -c "import requests; requests.get('http://localhost:8000/auth/check', timeout=5)" || exit 1
|
||||
|
||||
# Run uvicorn
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
"""
|
||||
GNSS Guard Server - Centralized monitoring server for multiple assets
|
||||
"""
|
||||
|
||||
75
backup-from-device/gnss-guard/tm-gnss-guard/server/config.py
Normal file
75
backup-from-device/gnss-guard/tm-gnss-guard/server/config.py
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Server configuration management for GNSS Guard Server
|
||||
Loads configuration from environment variables
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from pydantic_settings import BaseSettings
|
||||
from pydantic import field_validator
|
||||
|
||||
|
||||
class ServerConfig(BaseSettings):
|
||||
"""Server configuration loaded from environment variables"""
|
||||
|
||||
# Server settings
|
||||
server_host: str = "0.0.0.0"
|
||||
server_port: int = 8000
|
||||
debug: bool = False
|
||||
|
||||
# Database settings (PostgreSQL) - REQUIRED, no insecure default
|
||||
database_url: str
|
||||
|
||||
# Security settings
|
||||
secret_key: str = "change-this-in-production-to-a-random-secret-key"
|
||||
session_expire_minutes: int = 1440 # 24 hours
|
||||
|
||||
# Web UI authentication - REQUIRED, no insecure defaults
|
||||
# Must be set via environment variables GNSS_SERVER_WEB_USERNAME and GNSS_SERVER_WEB_PASSWORD
|
||||
web_username: str
|
||||
web_password: str
|
||||
|
||||
@field_validator('web_password')
|
||||
@classmethod
|
||||
def password_strength(cls, v: str) -> str:
|
||||
"""Ensure password meets minimum security requirements"""
|
||||
if len(v) < 10:
|
||||
raise ValueError('Password must be at least 10 characters long')
|
||||
if v.lower() in ['password', 'admin', 'test', '123456', 'tototheo']:
|
||||
raise ValueError('Password is too common/weak')
|
||||
return v
|
||||
|
||||
# Validation settings
|
||||
stale_threshold_seconds: int = 60 # Data older than this is considered stale
|
||||
|
||||
# Asset offline detection
|
||||
asset_offline_seconds: int = 120 # Consider asset offline after this many seconds without updates
|
||||
|
||||
# Data retention
|
||||
validation_history_days: int = 90 # Keep 90 days of validation history
|
||||
|
||||
# Domain for SSL (optional)
|
||||
server_domain: Optional[str] = None
|
||||
|
||||
# Telegram notification settings (optional)
|
||||
telegram_bot_token: Optional[str] = None
|
||||
telegram_chat_id: Optional[str] = None # Default chat ID for all assets
|
||||
|
||||
@property
|
||||
def telegram_enabled(self) -> bool:
|
||||
"""Check if Telegram notifications are configured"""
|
||||
return bool(self.telegram_bot_token and self.telegram_chat_id)
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_prefix = "GNSS_SERVER_"
|
||||
case_sensitive = False
|
||||
|
||||
|
||||
def get_config() -> ServerConfig:
|
||||
"""Get server configuration instance"""
|
||||
return ServerConfig()
|
||||
|
||||
105
backup-from-device/gnss-guard/tm-gnss-guard/server/database.py
Normal file
105
backup-from-device/gnss-guard/tm-gnss-guard/server/database.py
Normal file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database connection and session management for GNSS Guard Server
|
||||
"""
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator
|
||||
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.orm import sessionmaker, Session
|
||||
from sqlalchemy.pool import QueuePool
|
||||
|
||||
from config import get_config
|
||||
from models import Base
|
||||
|
||||
logger = logging.getLogger("gnss_guard.server.database")
|
||||
|
||||
# Global engine and session factory
|
||||
_engine = None
|
||||
_SessionLocal = None
|
||||
|
||||
|
||||
def get_engine():
|
||||
"""Get or create the database engine"""
|
||||
global _engine
|
||||
|
||||
if _engine is None:
|
||||
config = get_config()
|
||||
|
||||
# Check if using SQLite (local development)
|
||||
is_sqlite = config.database_url.startswith("sqlite")
|
||||
|
||||
if is_sqlite:
|
||||
# SQLite-specific settings
|
||||
from sqlalchemy.pool import StaticPool
|
||||
_engine = create_engine(
|
||||
config.database_url,
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
echo=config.debug,
|
||||
)
|
||||
logger.info(f"SQLite database engine created: {config.database_url}")
|
||||
else:
|
||||
# PostgreSQL with connection pooling
|
||||
_engine = create_engine(
|
||||
config.database_url,
|
||||
poolclass=QueuePool,
|
||||
pool_size=5,
|
||||
max_overflow=10,
|
||||
pool_pre_ping=True, # Verify connections before using
|
||||
echo=config.debug,
|
||||
)
|
||||
logger.info(f"Database engine created for: {config.database_url.split('@')[-1]}")
|
||||
|
||||
return _engine
|
||||
|
||||
|
||||
def get_session_factory():
|
||||
"""Get or create the session factory"""
|
||||
global _SessionLocal
|
||||
|
||||
if _SessionLocal is None:
|
||||
engine = get_engine()
|
||||
_SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
return _SessionLocal
|
||||
|
||||
|
||||
def init_db():
|
||||
"""Initialize database - create all tables"""
|
||||
engine = get_engine()
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logger.info("Database tables created/verified")
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
"""
|
||||
Dependency for FastAPI to get database session.
|
||||
Yields a session and ensures it's closed after use.
|
||||
"""
|
||||
SessionLocal = get_session_factory()
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_db_session() -> Generator[Session, None, None]:
|
||||
"""
|
||||
Context manager for database sessions (for use outside FastAPI dependencies).
|
||||
"""
|
||||
SessionLocal = get_session_factory()
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
db.commit()
|
||||
except Exception:
|
||||
db.rollback()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
# GNSS Guard Server - Development Docker Compose
|
||||
# No nginx, no SSL - direct access to FastAPI on port 8000
|
||||
#
|
||||
# Usage:
|
||||
# cp env.example .env.dev
|
||||
# # Edit .env.dev (can use SQLite for dev: sqlite:///./data/gnss_guard.db)
|
||||
# docker compose -f docker-compose.dev.yml up -d
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
gnss-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: gnss-guard-server-dev
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env.dev
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
# Mount source code for live reload (development only)
|
||||
- .:/app
|
||||
environment:
|
||||
- GNSS_SERVER_DEBUG=true
|
||||
command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/auth/check"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
# GNSS Guard Server - Docker Compose with Nginx + SSL
|
||||
#
|
||||
# Usage:
|
||||
# 1. cp env.example .env.prod
|
||||
# 2. Edit .env.prod with your configuration
|
||||
# 3. docker compose up -d
|
||||
# 4. Run SSL setup: docker compose exec certbot certbot certonly ...
|
||||
#
|
||||
# For development (no SSL): use docker-compose.dev.yml
|
||||
|
||||
services:
|
||||
# ==========================================================================
|
||||
# GNSS Guard Server (FastAPI/Uvicorn)
|
||||
# ==========================================================================
|
||||
gnss-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
container_name: gnss-guard-server
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env.prod
|
||||
expose:
|
||||
- "8000"
|
||||
networks:
|
||||
- gnss-network
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import requests; requests.get('http://localhost:8000/auth/check', timeout=5)"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# ==========================================================================
|
||||
# Nginx Reverse Proxy
|
||||
# ==========================================================================
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: gnss-nginx
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./nginx/conf.d:/etc/nginx/conf.d:ro
|
||||
- certbot-etc:/etc/letsencrypt:ro
|
||||
- certbot-var:/var/lib/letsencrypt
|
||||
- certbot-webroot:/var/www/certbot
|
||||
# Mount nginx logs to host for fail2ban monitoring
|
||||
- /var/log/nginx:/var/log/nginx
|
||||
depends_on:
|
||||
- gnss-server
|
||||
networks:
|
||||
- gnss-network
|
||||
|
||||
# ==========================================================================
|
||||
# Certbot (SSL Certificate Management)
|
||||
# ==========================================================================
|
||||
certbot:
|
||||
image: certbot/certbot
|
||||
container_name: gnss-certbot
|
||||
volumes:
|
||||
- certbot-etc:/etc/letsencrypt
|
||||
- certbot-var:/var/lib/letsencrypt
|
||||
- certbot-webroot:/var/www/certbot
|
||||
entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'"
|
||||
|
||||
networks:
|
||||
gnss-network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
certbot-etc:
|
||||
certbot-var:
|
||||
certbot-webroot:
|
||||
103
backup-from-device/gnss-guard/tm-gnss-guard/server/env.example
Normal file
103
backup-from-device/gnss-guard/tm-gnss-guard/server/env.example
Normal file
@@ -0,0 +1,103 @@
|
||||
# =============================================================================
|
||||
# GNSS Guard Server Configuration
|
||||
# =============================================================================
|
||||
# Copy this file to .env.prod and configure for your environment
|
||||
# Example: cp env.example .env.prod
|
||||
|
||||
# =============================================================================
|
||||
# SERVER SETTINGS
|
||||
# =============================================================================
|
||||
|
||||
# Host to bind to (127.0.0.1 when behind Nginx proxy)
|
||||
GNSS_SERVER_HOST=127.0.0.1
|
||||
|
||||
# Port to bind to
|
||||
GNSS_SERVER_PORT=8000
|
||||
|
||||
# Enable debug mode (set to false in production)
|
||||
GNSS_SERVER_DEBUG=false
|
||||
|
||||
# =============================================================================
|
||||
# DATABASE (PostgreSQL RDS) - REQUIRED!
|
||||
# =============================================================================
|
||||
# The server will NOT start without a valid database URL!
|
||||
|
||||
# Full database connection URL
|
||||
# Format: postgresql://USER:PASSWORD@HOST:PORT/DATABASE
|
||||
GNSS_SERVER_DATABASE_URL=postgresql://gnss_admin:your-password@your-rds-endpoint.rds.amazonaws.com:5432/gnss_guard
|
||||
|
||||
# =============================================================================
|
||||
# SECURITY
|
||||
# =============================================================================
|
||||
|
||||
# Secret key for session encryption (generate with: python -c "import secrets; print(secrets.token_urlsafe(32))")
|
||||
GNSS_SERVER_SECRET_KEY=change-this-to-a-random-secret-key
|
||||
|
||||
# Session expiration in minutes (default: 24 hours)
|
||||
GNSS_SERVER_SESSION_EXPIRE_MINUTES=1440
|
||||
|
||||
# =============================================================================
|
||||
# WEB UI AUTHENTICATION (REQUIRED - no defaults!)
|
||||
# =============================================================================
|
||||
# These credentials are used to login to the web dashboard.
|
||||
# The server will NOT start without these being set!
|
||||
|
||||
# Username for web dashboard login (REQUIRED)
|
||||
GNSS_SERVER_WEB_USERNAME=your_username_here
|
||||
|
||||
# Password for web dashboard login (REQUIRED)
|
||||
# Requirements:
|
||||
# - At least 12 characters long
|
||||
# - Cannot be common passwords like 'password', 'admin', 'test'
|
||||
# Generate a secure password: python -c "import secrets; print(secrets.token_urlsafe(16))"
|
||||
GNSS_SERVER_WEB_PASSWORD=your_secure_password_here
|
||||
|
||||
# =============================================================================
|
||||
# DOMAIN (for SSL/HTTPS)
|
||||
# =============================================================================
|
||||
|
||||
# Server domain name (for Let's Encrypt SSL)
|
||||
GNSS_SERVER_DOMAIN=gnss.yourdomain.com
|
||||
|
||||
# =============================================================================
|
||||
# VALIDATION
|
||||
# =============================================================================
|
||||
|
||||
# Staleness threshold in seconds (data older than this is considered stale)
|
||||
GNSS_SERVER_STALE_THRESHOLD_SECONDS=60
|
||||
|
||||
# =============================================================================
|
||||
# ASSET OFFLINE DETECTION
|
||||
# =============================================================================
|
||||
|
||||
# Seconds without updates before an asset is considered offline (default: 120)
|
||||
# Triggers Telegram notification when asset goes offline/online
|
||||
GNSS_SERVER_ASSET_OFFLINE_SECONDS=120
|
||||
|
||||
# =============================================================================
|
||||
# DATA RETENTION
|
||||
# =============================================================================
|
||||
|
||||
# Days to keep validation history (default: 90)
|
||||
GNSS_SERVER_VALIDATION_HISTORY_DAYS=90
|
||||
|
||||
# =============================================================================
|
||||
# TELEGRAM NOTIFICATIONS (Optional)
|
||||
# =============================================================================
|
||||
# Server-side Telegram notifications for all assets.
|
||||
# Each asset can override the chat_id to send to a different chat/group.
|
||||
|
||||
# Telegram bot token (from @BotFather)
|
||||
GNSS_SERVER_TELEGRAM_BOT_TOKEN=
|
||||
|
||||
# Default Telegram chat ID (negative for groups)
|
||||
# Individual assets can override this in the database
|
||||
GNSS_SERVER_TELEGRAM_CHAT_ID=
|
||||
|
||||
# =============================================================================
|
||||
# SSL (for Docker deployment with Traefik)
|
||||
# =============================================================================
|
||||
|
||||
# Email for Let's Encrypt certificate notifications
|
||||
LETSENCRYPT_EMAIL=admin@yourdomain.com
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
# Keep this directory for importing client database files
|
||||
# Place .db files here with format: {id}_{name}.db
|
||||
# Example: 2_msc_charlotte.db
|
||||
408
backup-from-device/gnss-guard/tm-gnss-guard/server/main.py
Normal file
408
backup-from-device/gnss-guard/tm-gnss-guard/server/main.py
Normal file
@@ -0,0 +1,408 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
FastAPI main application for GNSS Guard Server
|
||||
Centralized monitoring server for multiple GNSS Guard assets
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
import random
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import FastAPI, Request, Depends, HTTPException
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||
from slowapi.util import get_remote_address
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
|
||||
from config import get_config
|
||||
from database import init_db, get_db, get_session_factory
|
||||
from routes import api, auth
|
||||
from routes.auth import get_optional_user, get_current_user
|
||||
from services.asset_service import AssetService
|
||||
from services.telegram_service import get_telegram_service
|
||||
from models import Asset, AssetNotificationState
|
||||
|
||||
# Initialize rate limiter
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
logger = logging.getLogger("gnss_guard.server")
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(
|
||||
title="GNSS Guard Server",
|
||||
description="Centralized monitoring server for GNSS Guard assets",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# Setup rate limiting
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
|
||||
# Add CORS middleware - restricted to same-origin only
|
||||
# Since the dashboard is served from the same domain, we only need
|
||||
# to allow requests from the same origin. This prevents CSRF attacks.
|
||||
config = get_config()
|
||||
allowed_origins = []
|
||||
if config.server_domain:
|
||||
allowed_origins = [
|
||||
f"https://{config.server_domain}",
|
||||
f"http://{config.server_domain}", # For initial setup before SSL
|
||||
]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=allowed_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "DELETE"],
|
||||
allow_headers=["Content-Type", "Authorization", "Cookie"],
|
||||
)
|
||||
|
||||
# Setup static files and templates
|
||||
static_path = Path(__file__).parent / "static"
|
||||
templates_path = Path(__file__).parent / "templates"
|
||||
|
||||
if static_path.exists():
|
||||
app.mount("/static", StaticFiles(directory=str(static_path)), name="static")
|
||||
|
||||
templates = Jinja2Templates(directory=str(templates_path)) if templates_path.exists() else None
|
||||
|
||||
# Include routers
|
||||
app.include_router(api.router)
|
||||
app.include_router(auth.router)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Health Check Endpoint (public, no auth required)
|
||||
# =============================================================================
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint - always accessible"""
|
||||
return {"status": "ok", "timestamp": datetime.utcnow().isoformat()}
|
||||
|
||||
|
||||
async def check_offline_assets():
|
||||
"""Background task to check for assets that have gone offline"""
|
||||
config = get_config()
|
||||
telegram_service = get_telegram_service()
|
||||
|
||||
if not telegram_service.enabled:
|
||||
return
|
||||
|
||||
threshold = datetime.utcnow() - timedelta(seconds=config.asset_offline_seconds)
|
||||
|
||||
SessionLocal = get_session_factory()
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Find assets that are marked online but haven't reported recently
|
||||
states = db.query(AssetNotificationState).join(Asset).filter(
|
||||
AssetNotificationState.is_online == True,
|
||||
AssetNotificationState.last_validation_at != None,
|
||||
AssetNotificationState.last_validation_at < threshold,
|
||||
Asset.is_active == True,
|
||||
Asset.telegram_enabled == True
|
||||
).all()
|
||||
|
||||
for state in states:
|
||||
chat_id = state.asset.telegram_chat_id or telegram_service.default_chat_id
|
||||
if chat_id:
|
||||
logger.info(f"Asset '{state.asset.name}' detected as offline (last seen: {state.last_validation_at})")
|
||||
telegram_service.send_asset_offline_alert(
|
||||
chat_id=chat_id,
|
||||
asset_name=state.asset.name,
|
||||
last_seen=state.last_validation_at,
|
||||
offline_threshold_seconds=config.asset_offline_seconds
|
||||
)
|
||||
state.is_online = False
|
||||
|
||||
if states:
|
||||
db.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking offline assets: {e}")
|
||||
db.rollback()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
async def offline_checker_loop():
|
||||
"""Background loop that periodically checks for offline assets"""
|
||||
while True:
|
||||
await asyncio.sleep(30) # Check every 30 seconds
|
||||
try:
|
||||
await check_offline_assets()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in offline checker loop: {e}")
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Initialize database and background tasks on startup"""
|
||||
logger.info("Starting GNSS Guard Server...")
|
||||
init_db()
|
||||
logger.info("Database initialized")
|
||||
|
||||
# Start background task for offline detection
|
||||
asyncio.create_task(offline_checker_loop())
|
||||
logger.info("Offline asset checker started")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Web UI Routes
|
||||
# =============================================================================
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
async def index(request: Request, user: Optional[str] = Depends(get_optional_user)):
|
||||
"""Main dashboard page"""
|
||||
if not user:
|
||||
return RedirectResponse(url="/login", status_code=302)
|
||||
|
||||
if not templates:
|
||||
return HTMLResponse("<h1>GNSS Guard Server</h1><p>Templates not configured</p>")
|
||||
|
||||
return templates.TemplateResponse("dashboard.html", {
|
||||
"request": request,
|
||||
"username": user,
|
||||
"cache_buster": random.randint(100000, 999999)
|
||||
})
|
||||
|
||||
|
||||
@app.get("/login", response_class=HTMLResponse)
|
||||
async def login_page(request: Request, user: Optional[str] = Depends(get_optional_user)):
|
||||
"""Login page"""
|
||||
if user:
|
||||
return RedirectResponse(url="/", status_code=302)
|
||||
|
||||
if not templates:
|
||||
return HTMLResponse("""
|
||||
<h1>GNSS Guard Server - Login</h1>
|
||||
<form method="post" action="/login">
|
||||
<input name="username" placeholder="Username"><br>
|
||||
<input name="password" type="password" placeholder="Password"><br>
|
||||
<button type="submit">Login</button>
|
||||
</form>
|
||||
""")
|
||||
|
||||
return templates.TemplateResponse("login.html", {
|
||||
"request": request,
|
||||
"cache_buster": random.randint(100000, 999999)
|
||||
})
|
||||
|
||||
|
||||
@app.get("/api/dashboard/assets")
|
||||
async def dashboard_assets(
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get all assets status for dashboard"""
|
||||
service = AssetService(db)
|
||||
return service.get_all_assets_status()
|
||||
|
||||
|
||||
@app.get("/api/dashboard/asset/{asset_name}/status")
|
||||
async def dashboard_asset_status(
|
||||
asset_name: str,
|
||||
at: Optional[float] = None,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get detailed status for a specific asset (for dashboard display).
|
||||
Matches the format expected by the client dashboard.
|
||||
|
||||
Args:
|
||||
at: Optional Unix timestamp to get historical data at that time.
|
||||
If not provided, returns the latest data.
|
||||
"""
|
||||
service = AssetService(db)
|
||||
asset = service.get_asset_by_name(asset_name)
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail=f"Asset '{asset_name}' not found")
|
||||
|
||||
if at is not None:
|
||||
# Get historical validation at specified timestamp
|
||||
latest = service.get_validation_at_timestamp(asset.id, at)
|
||||
else:
|
||||
latest = service.get_latest_validation(asset.id)
|
||||
|
||||
if not latest:
|
||||
return {
|
||||
"error": "No validation data available",
|
||||
"timestamp": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Parse JSON fields
|
||||
sources_missing = json.loads(latest.sources_missing or "[]")
|
||||
sources_stale = json.loads(latest.sources_stale or "[]")
|
||||
coordinate_differences = json.loads(latest.coordinate_differences or "{}")
|
||||
source_coordinates = json.loads(latest.source_coordinates or "{}")
|
||||
validation_details = json.loads(latest.validation_details or "{}")
|
||||
|
||||
# Get enabled sources from validation_details
|
||||
expected_sources = validation_details.get("expected_sources", [])
|
||||
|
||||
# Build sources status (matching client format)
|
||||
source_display_names = {
|
||||
"nmea_primary": "Primary GPS",
|
||||
"nmea_secondary": "Secondary GPS",
|
||||
"tm_ais": "TM AIS GPS",
|
||||
"starlink_gps": "Starlink GPS",
|
||||
"starlink_location": "Starlink Location"
|
||||
}
|
||||
|
||||
sources = {}
|
||||
all_source_names = ["nmea_primary", "nmea_secondary", "tm_ais", "starlink_gps", "starlink_location"]
|
||||
|
||||
for source_name in all_source_names:
|
||||
display_name = source_display_names.get(source_name, source_name)
|
||||
|
||||
if source_name not in expected_sources:
|
||||
sources[source_name] = {
|
||||
"display_name": display_name,
|
||||
"enabled": False,
|
||||
"status": "not_configured",
|
||||
"is_stale": False,
|
||||
"coordinates": None,
|
||||
"last_update": None,
|
||||
"last_update_unix": None
|
||||
}
|
||||
continue
|
||||
|
||||
source_data = source_coordinates.get(source_name)
|
||||
is_stale = source_name in sources_stale
|
||||
|
||||
if not source_data:
|
||||
sources[source_name] = {
|
||||
"display_name": display_name,
|
||||
"enabled": True,
|
||||
"status": "missing",
|
||||
"is_stale": is_stale,
|
||||
"coordinates": None,
|
||||
"last_update": None,
|
||||
"last_update_unix": None
|
||||
}
|
||||
else:
|
||||
status = "stale" if is_stale else "ok"
|
||||
sources[source_name] = {
|
||||
"display_name": display_name,
|
||||
"enabled": True,
|
||||
"status": status,
|
||||
"is_stale": is_stale,
|
||||
"coordinates": {
|
||||
"latitude": source_data.get("latitude"),
|
||||
"longitude": source_data.get("longitude")
|
||||
},
|
||||
"last_update": source_data.get("timestamp"),
|
||||
"last_update_unix": source_data.get("timestamp_unix")
|
||||
}
|
||||
|
||||
# Calculate max distance
|
||||
threshold_meters = validation_details.get("threshold_meters", 200.0)
|
||||
max_distance_km = None
|
||||
max_distance_m = 0.0
|
||||
|
||||
if not latest.is_valid and coordinate_differences:
|
||||
for diff_data in coordinate_differences.values():
|
||||
if isinstance(diff_data, dict):
|
||||
distance = diff_data.get("distance_meters", diff_data.get("distance_m", 0))
|
||||
if distance > max_distance_m:
|
||||
max_distance_m = distance
|
||||
|
||||
if max_distance_m > threshold_meters:
|
||||
max_distance_km = max_distance_m / 1000.0
|
||||
|
||||
has_alert = (not latest.is_valid and max_distance_km is not None) or len(sources_missing) > 0
|
||||
|
||||
# Find map center
|
||||
map_center = None
|
||||
for priority_source in ["nmea_primary", "tm_ais", "starlink_location"]:
|
||||
if sources.get(priority_source, {}).get("coordinates"):
|
||||
coords = sources[priority_source]["coordinates"]
|
||||
if coords.get("latitude") and coords.get("longitude"):
|
||||
map_center = coords
|
||||
break
|
||||
|
||||
if not map_center:
|
||||
for source_data in sources.values():
|
||||
if source_data.get("coordinates"):
|
||||
coords = source_data["coordinates"]
|
||||
if coords.get("latitude") and coords.get("longitude"):
|
||||
map_center = coords
|
||||
break
|
||||
|
||||
return {
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"validation_timestamp": latest.validation_timestamp,
|
||||
"validation_timestamp_unix": latest.validation_timestamp_unix,
|
||||
"is_valid": latest.is_valid,
|
||||
"has_alert": has_alert,
|
||||
"max_distance_km": max_distance_km,
|
||||
"threshold_meters": threshold_meters,
|
||||
"sources": sources,
|
||||
"sources_stale": sources_stale,
|
||||
"map_center": map_center,
|
||||
"asset_name": asset_name
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/dashboard/asset/{asset_name}/route")
|
||||
async def dashboard_asset_route(
|
||||
asset_name: str,
|
||||
hours: int = 72,
|
||||
until: Optional[float] = None,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get route data for map visualization.
|
||||
|
||||
Args:
|
||||
hours: Number of hours of history (default 72)
|
||||
until: Optional Unix timestamp to show route up to this time.
|
||||
If not provided, shows route up to current time.
|
||||
"""
|
||||
service = AssetService(db)
|
||||
asset = service.get_asset_by_name(asset_name)
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail=f"Asset '{asset_name}' not found")
|
||||
|
||||
return service.get_route_data(asset.id, hours, until_timestamp=until)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Main entry point
|
||||
# =============================================================================
|
||||
|
||||
def run_server():
|
||||
"""Run the server using uvicorn"""
|
||||
import uvicorn
|
||||
config = get_config()
|
||||
|
||||
uvicorn.run(
|
||||
"server.main:app",
|
||||
host=config.server_host,
|
||||
port=config.server_port,
|
||||
reload=config.debug,
|
||||
log_level="info"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_server()
|
||||
|
||||
211
backup-from-device/gnss-guard/tm-gnss-guard/server/models.py
Normal file
211
backup-from-device/gnss-guard/tm-gnss-guard/server/models.py
Normal file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
SQLAlchemy and Pydantic models for GNSS Guard Server
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey, Text, Index
|
||||
from sqlalchemy.orm import relationship, declarative_base
|
||||
from pydantic import BaseModel, Field
|
||||
import hashlib
|
||||
import secrets
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# SQLAlchemy Database Models
|
||||
# =============================================================================
|
||||
|
||||
class Asset(Base):
|
||||
"""Asset (client device) registered with the server"""
|
||||
__tablename__ = "assets"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String(255), unique=True, nullable=False, index=True)
|
||||
token_hash = Column(String(64), nullable=False) # SHA-256 hash of token
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
is_active = Column(Boolean, default=True)
|
||||
description = Column(String(500), nullable=True)
|
||||
|
||||
# Telegram notification settings (optional override for this asset)
|
||||
telegram_chat_id = Column(String(100), nullable=True) # Override default chat ID
|
||||
telegram_enabled = Column(Boolean, default=True) # Enable/disable notifications for this asset
|
||||
|
||||
# Relationship to validation history
|
||||
validations = relationship("ValidationHistory", back_populates="asset", cascade="all, delete-orphan")
|
||||
|
||||
# Relationship to notification state
|
||||
notification_state = relationship("AssetNotificationState", back_populates="asset", uselist=False, cascade="all, delete-orphan")
|
||||
|
||||
@staticmethod
|
||||
def hash_token(token: str) -> str:
|
||||
"""Hash a token using SHA-256"""
|
||||
return hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def generate_token() -> str:
|
||||
"""Generate a secure random token"""
|
||||
return secrets.token_urlsafe(32)
|
||||
|
||||
def verify_token(self, token: str) -> bool:
|
||||
"""Verify if provided token matches stored hash"""
|
||||
return self.token_hash == self.hash_token(token)
|
||||
|
||||
|
||||
class AssetNotificationState(Base):
|
||||
"""Tracks the previous notification state for each asset to detect changes"""
|
||||
__tablename__ = "asset_notification_state"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
asset_id = Column(Integer, ForeignKey("assets.id", ondelete="CASCADE"), unique=True, nullable=False)
|
||||
|
||||
# Previous state (JSON arrays stored as text)
|
||||
prev_sources_missing = Column(Text, nullable=True) # JSON array
|
||||
prev_sources_stale = Column(Text, nullable=True) # JSON array
|
||||
prev_threshold_breached = Column(Boolean, default=False)
|
||||
|
||||
# Last notification timestamp
|
||||
last_notification_at = Column(DateTime, nullable=True)
|
||||
|
||||
# Asset online/offline tracking
|
||||
is_online = Column(Boolean, default=True) # Whether asset is currently reporting
|
||||
last_validation_at = Column(DateTime, nullable=True) # Last time we received validation data
|
||||
|
||||
# Relationship
|
||||
asset = relationship("Asset", back_populates="notification_state")
|
||||
|
||||
|
||||
class ValidationHistory(Base):
|
||||
"""Historical validation records from assets"""
|
||||
__tablename__ = "validation_history"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
asset_id = Column(Integer, ForeignKey("assets.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
# Validation timestamps
|
||||
validation_timestamp = Column(String(50), nullable=False) # ISO format
|
||||
validation_timestamp_unix = Column(Float, nullable=False, index=True)
|
||||
|
||||
# Validation result
|
||||
is_valid = Column(Boolean, nullable=False)
|
||||
|
||||
# JSON fields stored as text
|
||||
sources_missing = Column(Text, nullable=True) # JSON array
|
||||
sources_stale = Column(Text, nullable=True) # JSON array
|
||||
coordinate_differences = Column(Text, nullable=True) # JSON object
|
||||
source_coordinates = Column(Text, nullable=True) # JSON object
|
||||
validation_details = Column(Text, nullable=True) # JSON object
|
||||
|
||||
# Server-side metadata
|
||||
received_at = Column(DateTime, default=datetime.utcnow, index=True)
|
||||
|
||||
# Relationship
|
||||
asset = relationship("Asset", back_populates="validations")
|
||||
|
||||
# Indexes for common queries
|
||||
__table_args__ = (
|
||||
Index('ix_validation_asset_timestamp', 'asset_id', 'validation_timestamp_unix'),
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Request/Response Models
|
||||
# =============================================================================
|
||||
|
||||
class AssetCreate(BaseModel):
|
||||
"""Request model for creating a new asset"""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
description: Optional[str] = Field(None, max_length=500)
|
||||
telegram_chat_id: Optional[str] = Field(None, max_length=100) # Override default chat ID
|
||||
telegram_enabled: bool = True # Enable notifications for this asset
|
||||
|
||||
|
||||
class AssetResponse(BaseModel):
|
||||
"""Response model for asset data"""
|
||||
id: int
|
||||
name: str
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
description: Optional[str] = None
|
||||
telegram_chat_id: Optional[str] = None
|
||||
telegram_enabled: bool = True
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class AssetWithToken(AssetResponse):
|
||||
"""Response model for newly created asset (includes token)"""
|
||||
token: str # Only returned when asset is created
|
||||
|
||||
|
||||
class AssetImport(BaseModel):
|
||||
"""Request model for importing an asset with a specific token"""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
token: str = Field(..., min_length=32, max_length=128)
|
||||
description: Optional[str] = Field(None, max_length=500)
|
||||
telegram_chat_id: Optional[str] = Field(None, max_length=100)
|
||||
telegram_enabled: bool = True
|
||||
|
||||
|
||||
class AssetBatchImport(BaseModel):
|
||||
"""Request model for batch importing assets"""
|
||||
assets: List[AssetImport]
|
||||
|
||||
|
||||
class ValidationSubmission(BaseModel):
|
||||
"""Request model for submitting validation data"""
|
||||
validation_timestamp: str
|
||||
validation_timestamp_unix: float
|
||||
is_valid: bool
|
||||
sources_missing: List[str] = []
|
||||
sources_stale: List[str] = []
|
||||
coordinate_differences: Dict[str, Any] = {}
|
||||
source_coordinates: Dict[str, Any] = {}
|
||||
validation_details: Dict[str, Any] = {}
|
||||
|
||||
|
||||
class ValidationBatchSubmission(BaseModel):
|
||||
"""Request model for submitting multiple validation records"""
|
||||
records: List[ValidationSubmission]
|
||||
|
||||
|
||||
class ValidationResponse(BaseModel):
|
||||
"""Response model for validation data"""
|
||||
id: int
|
||||
asset_name: str
|
||||
validation_timestamp: str
|
||||
validation_timestamp_unix: float
|
||||
is_valid: bool
|
||||
sources_missing: List[str]
|
||||
sources_stale: List[str]
|
||||
coordinate_differences: Dict[str, Any]
|
||||
source_coordinates: Dict[str, Any]
|
||||
validation_details: Dict[str, Any]
|
||||
received_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class AssetStatus(BaseModel):
|
||||
"""Current status of an asset (latest validation)"""
|
||||
asset_name: str
|
||||
is_online: bool # Has reported in last 5 minutes
|
||||
last_seen: Optional[datetime] = None
|
||||
latest_validation: Optional[ValidationResponse] = None
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
"""Request model for user login"""
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class LoginResponse(BaseModel):
|
||||
"""Response model for successful login"""
|
||||
message: str
|
||||
username: str
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
# GNSS Guard Server - Nginx Configuration
|
||||
# This file is used for initial setup (HTTP only)
|
||||
# After SSL setup, this file is replaced with the SSL configuration
|
||||
|
||||
upstream gnss_server {
|
||||
server gnss-server:8000;
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# IP WHITELIST FOR DASHBOARD ACCESS
|
||||
# =============================================================================
|
||||
# These IPs can access the web dashboard and admin endpoints.
|
||||
# The validation API endpoints (/api/v1/validation*) are open to all.
|
||||
#
|
||||
# To update: edit this file and run ./deploy_server.sh --restart
|
||||
# =============================================================================
|
||||
|
||||
geo $ip_whitelist {
|
||||
default 0;
|
||||
|
||||
# Office IPs - Whitelisted for dashboard access
|
||||
213.149.164.73 1; # Socrates Office 5G
|
||||
87.228.228.45 1; # Thaleias Office
|
||||
93.109.218.195 1; # HQ Cyta
|
||||
65.18.217.50 1; # HQ Cablenet
|
||||
93.109.218.196 1; # HQ Cyta 2
|
||||
62.228.7.94 1; # Socrates Home 3
|
||||
195.97.70.162 1; # Piraeus Office
|
||||
|
||||
# Localhost only (for internal health checks)
|
||||
127.0.0.1 1;
|
||||
# NOTE: Docker internal networks (10.0.0.0/8, 172.16.0.0/12) are NOT whitelisted
|
||||
# to prevent privilege escalation if an attacker gains container access
|
||||
}
|
||||
|
||||
# HTTP server
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
|
||||
# Let's Encrypt challenge location - always open
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
# =========================================================================
|
||||
# PUBLIC ENDPOINTS - Open to all (asset token authentication)
|
||||
# =========================================================================
|
||||
|
||||
# Validation API - accessible from anywhere (clients authenticate with tokens)
|
||||
location /api/v1/validation {
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
}
|
||||
|
||||
# Health check endpoint - open
|
||||
location /health {
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
}
|
||||
|
||||
# =========================================================================
|
||||
# RESTRICTED ENDPOINTS - Office IPs only (session authentication)
|
||||
# =========================================================================
|
||||
|
||||
# All other endpoints require IP whitelist
|
||||
location / {
|
||||
# Check IP whitelist
|
||||
# TEMPORARILY DISABLED - uncomment to re-enable IP whitelisting
|
||||
# if ($ip_whitelist = 0) {
|
||||
# return 403;
|
||||
# }
|
||||
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
}
|
||||
|
||||
# Custom error page for 403
|
||||
error_page 403 /403.html;
|
||||
location = /403.html {
|
||||
internal;
|
||||
default_type text/html;
|
||||
return 403 '<!DOCTYPE html><html><head><title>Access Denied</title><style>body{font-family:sans-serif;display:flex;justify-content:center;align-items:center;height:100vh;margin:0;background:#060b10;color:#e5e9f5;}.container{text-align:center;}.title{font-size:48px;margin-bottom:20px;color:#c62828;}.msg{font-size:18px;color:#9aa3b8;}</style></head><body><div class="container"><div class="title">403</div><div class="msg">Access Denied<br>Your IP is not authorized to access this resource.</div></div></body></html>';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,148 @@
|
||||
# GNSS Guard Server - Nginx Configuration with SSL
|
||||
#
|
||||
# After obtaining SSL certificate, copy this file:
|
||||
# cp gnss-guard-ssl.conf.template gnss-guard-ssl.conf
|
||||
# Then edit and set your domain, and restart nginx
|
||||
|
||||
upstream gnss_server {
|
||||
server gnss-server:8000;
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# IP WHITELIST FOR DASHBOARD ACCESS
|
||||
# =============================================================================
|
||||
# These IPs can access the web dashboard and admin endpoints.
|
||||
# The validation API endpoints (/api/v1/validation*) are open to all.
|
||||
#
|
||||
# To update: edit this file and run ./deploy_server.sh --restart
|
||||
# =============================================================================
|
||||
|
||||
geo $ip_whitelist {
|
||||
default 0;
|
||||
|
||||
# Office IPs - Whitelisted for dashboard access
|
||||
213.149.164.73 1; # Socrates Office 5G
|
||||
87.228.228.45 1; # Thaleias Office
|
||||
93.109.218.195 1; # HQ Cyta
|
||||
65.18.217.50 1; # HQ Cablenet
|
||||
93.109.218.196 1; # HQ Cyta 2
|
||||
62.228.7.94 1; # Socrates Home 3
|
||||
195.97.70.162 1; # Piraeus Office
|
||||
|
||||
# Localhost only (for internal health checks)
|
||||
127.0.0.1 1;
|
||||
# NOTE: Docker internal networks (10.0.0.0/8, 172.16.0.0/12) are NOT whitelisted
|
||||
# to prevent privilege escalation if an attacker gains container access
|
||||
}
|
||||
|
||||
# HTTP -> HTTPS redirect
|
||||
server {
|
||||
listen 80;
|
||||
server_name YOUR_DOMAIN_HERE;
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
# HTTPS server
|
||||
server {
|
||||
listen 443 ssl;
|
||||
http2 on;
|
||||
server_name YOUR_DOMAIN_HERE;
|
||||
|
||||
# SSL certificates (Let's Encrypt)
|
||||
ssl_certificate /etc/letsencrypt/live/YOUR_DOMAIN_HERE/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/YOUR_DOMAIN_HERE/privkey.pem;
|
||||
|
||||
# SSL configuration
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
# Modern TLS configuration
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
# HSTS - Force HTTPS for 2 years, include subdomains
|
||||
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains" always;
|
||||
|
||||
# Content Security Policy - restrict resource loading
|
||||
# Allows: self, Leaflet from unpkg, map tiles, marker icons
|
||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self' https://unpkg.com 'unsafe-inline'; style-src 'self' https://unpkg.com 'unsafe-inline'; img-src 'self' data: https://*.basemaps.cartocdn.com https://raw.githubusercontent.com https://cdnjs.cloudflare.com https://*.openstreetmap.org; font-src 'self'; connect-src 'self'; frame-ancestors 'self'; base-uri 'self'; form-action 'self'" always;
|
||||
|
||||
# =========================================================================
|
||||
# PUBLIC ENDPOINTS - Open to all (asset token authentication)
|
||||
# =========================================================================
|
||||
|
||||
# Validation API - accessible from anywhere (clients authenticate with tokens)
|
||||
location /api/v1/validation {
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
}
|
||||
|
||||
# Health check endpoint - open
|
||||
location /health {
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
}
|
||||
|
||||
# =========================================================================
|
||||
# RESTRICTED ENDPOINTS - Office IPs only (session authentication)
|
||||
# =========================================================================
|
||||
|
||||
# All other endpoints require IP whitelist
|
||||
location / {
|
||||
# Check IP whitelist
|
||||
# TEMPORARILY DISABLED - uncomment to re-enable IP whitelisting
|
||||
# if ($ip_whitelist = 0) {
|
||||
# return 403;
|
||||
# }
|
||||
|
||||
proxy_pass http://gnss_server;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Static files - also restricted
|
||||
location /static/ {
|
||||
# TEMPORARILY DISABLED - uncomment to re-enable IP whitelisting
|
||||
# if ($ip_whitelist = 0) {
|
||||
# return 403;
|
||||
# }
|
||||
|
||||
proxy_pass http://gnss_server/static/;
|
||||
proxy_cache_valid 200 1d;
|
||||
expires 1d;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# Custom error page for 403
|
||||
error_page 403 /403.html;
|
||||
location = /403.html {
|
||||
internal;
|
||||
default_type text/html;
|
||||
return 403 '<!DOCTYPE html><html><head><title>Access Denied</title><style>body{font-family:sans-serif;display:flex;justify-content:center;align-items:center;height:100vh;margin:0;background:#060b10;color:#e5e9f5;}.container{text-align:center;}.title{font-size:48px;margin-bottom:20px;color:#c62828;}.msg{font-size:18px;color:#9aa3b8;}</style></head><body><div class="container"><div class="title">403</div><div class="msg">Access Denied<br>Your IP is not authorized to access this resource.</div></div></body></html>';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
user nginx;
|
||||
worker_processes auto;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
# Gzip compression
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_proxied any;
|
||||
gzip_comp_level 6;
|
||||
gzip_types text/plain text/css text/xml application/json application/javascript application/xml;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
# GNSS Guard Server Dependencies
|
||||
|
||||
# Web framework
|
||||
fastapi>=0.104.0
|
||||
uvicorn[standard]>=0.24.0
|
||||
|
||||
# Database
|
||||
sqlalchemy>=2.0.0
|
||||
psycopg2-binary>=2.9.9 # PostgreSQL driver
|
||||
alembic>=1.12.0 # Database migrations (optional)
|
||||
|
||||
# Configuration
|
||||
pydantic>=2.5.0
|
||||
pydantic-settings>=2.1.0
|
||||
python-dotenv>=1.0.0
|
||||
|
||||
# Templates and static files
|
||||
jinja2>=3.1.2
|
||||
python-multipart>=0.0.6 # For form data
|
||||
|
||||
# Security
|
||||
passlib[bcrypt]>=1.7.4 # Password hashing
|
||||
slowapi>=0.1.9 # Rate limiting
|
||||
|
||||
# HTTP client (for health checks and Telegram API)
|
||||
httpx>=0.25.0
|
||||
requests>=2.31.0
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
"""
|
||||
API routes for GNSS Guard Server
|
||||
"""
|
||||
|
||||
488
backup-from-device/gnss-guard/tm-gnss-guard/server/routes/api.py
Normal file
488
backup-from-device/gnss-guard/tm-gnss-guard/server/routes/api.py
Normal file
@@ -0,0 +1,488 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
REST API endpoints for GNSS Guard Server
|
||||
Handles validation data submission and retrieval
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Header, Query
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc
|
||||
|
||||
from database import get_db
|
||||
from models import (
|
||||
Asset, ValidationHistory, AssetNotificationState,
|
||||
ValidationSubmission, ValidationBatchSubmission,
|
||||
ValidationResponse, AssetStatus, AssetResponse, AssetCreate, AssetWithToken,
|
||||
AssetImport, AssetBatchImport
|
||||
)
|
||||
from routes.auth import get_current_user
|
||||
from services.telegram_service import get_telegram_service
|
||||
|
||||
logger = logging.getLogger("gnss_guard.server.api")
|
||||
|
||||
router = APIRouter(prefix="/api/v1", tags=["api"])
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Asset Token Authentication Dependency
|
||||
# =============================================================================
|
||||
|
||||
async def get_current_asset(
|
||||
authorization: str = Header(..., description="Bearer token for asset authentication"),
|
||||
db: Session = Depends(get_db)
|
||||
) -> Asset:
|
||||
"""
|
||||
Dependency to authenticate asset using Bearer token.
|
||||
Returns the authenticated asset or raises 401.
|
||||
"""
|
||||
if not authorization.startswith("Bearer "):
|
||||
raise HTTPException(status_code=401, detail="Invalid authorization header format")
|
||||
|
||||
token = authorization[7:] # Remove "Bearer " prefix
|
||||
token_hash = Asset.hash_token(token)
|
||||
|
||||
asset = db.query(Asset).filter(
|
||||
Asset.token_hash == token_hash,
|
||||
Asset.is_active == True
|
||||
).first()
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=401, detail="Invalid or inactive token")
|
||||
|
||||
return asset
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Validation Endpoints (Asset Authentication Required)
|
||||
# =============================================================================
|
||||
|
||||
@router.post("/validation", status_code=201)
|
||||
async def submit_validation(
|
||||
data: ValidationSubmission,
|
||||
asset: Asset = Depends(get_current_asset),
|
||||
db: Session = Depends(get_db)
|
||||
) -> dict:
|
||||
"""
|
||||
Submit a single validation record from an asset.
|
||||
Also triggers Telegram notifications if state changed.
|
||||
"""
|
||||
try:
|
||||
validation = ValidationHistory(
|
||||
asset_id=asset.id,
|
||||
validation_timestamp=data.validation_timestamp,
|
||||
validation_timestamp_unix=data.validation_timestamp_unix,
|
||||
is_valid=data.is_valid,
|
||||
sources_missing=json.dumps(data.sources_missing),
|
||||
sources_stale=json.dumps(data.sources_stale),
|
||||
coordinate_differences=json.dumps(data.coordinate_differences),
|
||||
source_coordinates=json.dumps(data.source_coordinates),
|
||||
validation_details=json.dumps(data.validation_details),
|
||||
)
|
||||
|
||||
db.add(validation)
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Validation received from asset '{asset.name}' at {data.validation_timestamp}")
|
||||
|
||||
# Process Telegram notification (will only send if state changed)
|
||||
try:
|
||||
telegram_service = get_telegram_service()
|
||||
validation_data = {
|
||||
"sources_missing": data.sources_missing,
|
||||
"sources_stale": data.sources_stale,
|
||||
"validation_details": data.validation_details,
|
||||
"source_coordinates": data.source_coordinates,
|
||||
}
|
||||
telegram_service.process_validation(db, asset, validation_data)
|
||||
except Exception as e:
|
||||
logger.warning(f"Telegram notification error for {asset.name}: {e}")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Validation record saved",
|
||||
"id": validation.id
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving validation from {asset.name}: {e}")
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/validation/batch", status_code=201)
|
||||
async def submit_validation_batch(
|
||||
data: ValidationBatchSubmission,
|
||||
asset: Asset = Depends(get_current_asset),
|
||||
db: Session = Depends(get_db)
|
||||
) -> dict:
|
||||
"""
|
||||
Submit multiple validation records (for catching up after offline period).
|
||||
Only sends Telegram notification for the most recent record to avoid spam.
|
||||
"""
|
||||
try:
|
||||
saved_count = 0
|
||||
skipped_count = 0
|
||||
latest_record = None
|
||||
latest_timestamp = 0
|
||||
|
||||
for record in data.records:
|
||||
# Check if this timestamp already exists for this asset
|
||||
existing = db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset.id,
|
||||
ValidationHistory.validation_timestamp_unix == record.validation_timestamp_unix
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
validation = ValidationHistory(
|
||||
asset_id=asset.id,
|
||||
validation_timestamp=record.validation_timestamp,
|
||||
validation_timestamp_unix=record.validation_timestamp_unix,
|
||||
is_valid=record.is_valid,
|
||||
sources_missing=json.dumps(record.sources_missing),
|
||||
sources_stale=json.dumps(record.sources_stale),
|
||||
coordinate_differences=json.dumps(record.coordinate_differences),
|
||||
source_coordinates=json.dumps(record.source_coordinates),
|
||||
validation_details=json.dumps(record.validation_details),
|
||||
)
|
||||
db.add(validation)
|
||||
saved_count += 1
|
||||
|
||||
# Track the most recent record for notification
|
||||
if record.validation_timestamp_unix > latest_timestamp:
|
||||
latest_timestamp = record.validation_timestamp_unix
|
||||
latest_record = record
|
||||
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Batch validation from '{asset.name}': {saved_count} saved, {skipped_count} skipped")
|
||||
|
||||
# Process Telegram notification for the most recent record only
|
||||
if latest_record:
|
||||
try:
|
||||
telegram_service = get_telegram_service()
|
||||
validation_data = {
|
||||
"sources_missing": latest_record.sources_missing,
|
||||
"sources_stale": latest_record.sources_stale,
|
||||
"validation_details": latest_record.validation_details,
|
||||
"source_coordinates": latest_record.source_coordinates,
|
||||
}
|
||||
telegram_service.process_validation(db, asset, validation_data)
|
||||
except Exception as e:
|
||||
logger.warning(f"Telegram notification error for {asset.name}: {e}")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"saved": saved_count,
|
||||
"skipped": skipped_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving batch validation from {asset.name}: {e}")
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Read Endpoints (Session Authentication Required)
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/assets", response_model=List[AssetResponse])
|
||||
async def list_assets(
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> List[AssetResponse]:
|
||||
"""
|
||||
List all registered assets.
|
||||
Requires user session authentication.
|
||||
"""
|
||||
assets = db.query(Asset).filter(Asset.is_active == True).all()
|
||||
return assets
|
||||
|
||||
|
||||
@router.get("/assets/{asset_name}/status")
|
||||
async def get_asset_status(
|
||||
asset_name: str,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> AssetStatus:
|
||||
"""
|
||||
Get current status of an asset (latest validation).
|
||||
Requires user session authentication.
|
||||
"""
|
||||
asset = db.query(Asset).filter(
|
||||
Asset.name == asset_name,
|
||||
Asset.is_active == True
|
||||
).first()
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail=f"Asset '{asset_name}' not found")
|
||||
|
||||
# Get latest validation
|
||||
latest = db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset.id
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix)).first()
|
||||
|
||||
# Get online status from notification state (consistent with Telegram alerts)
|
||||
notification_state = db.query(AssetNotificationState).filter(
|
||||
AssetNotificationState.asset_id == asset.id
|
||||
).first()
|
||||
|
||||
is_online = notification_state.is_online if notification_state else False
|
||||
last_seen = notification_state.last_validation_at if notification_state else None
|
||||
|
||||
# Fall back to validation timestamp if no notification state
|
||||
if not last_seen and latest and latest.received_at:
|
||||
last_seen = latest.received_at
|
||||
|
||||
latest_validation = None
|
||||
if latest:
|
||||
latest_validation = ValidationResponse(
|
||||
id=latest.id,
|
||||
asset_name=asset.name,
|
||||
validation_timestamp=latest.validation_timestamp,
|
||||
validation_timestamp_unix=latest.validation_timestamp_unix,
|
||||
is_valid=latest.is_valid,
|
||||
sources_missing=json.loads(latest.sources_missing or "[]"),
|
||||
sources_stale=json.loads(latest.sources_stale or "[]"),
|
||||
coordinate_differences=json.loads(latest.coordinate_differences or "{}"),
|
||||
source_coordinates=json.loads(latest.source_coordinates or "{}"),
|
||||
validation_details=json.loads(latest.validation_details or "{}"),
|
||||
received_at=latest.received_at
|
||||
)
|
||||
|
||||
return AssetStatus(
|
||||
asset_name=asset.name,
|
||||
is_online=is_online,
|
||||
last_seen=last_seen,
|
||||
latest_validation=latest_validation
|
||||
)
|
||||
|
||||
|
||||
@router.get("/assets/{asset_name}/history")
|
||||
async def get_asset_history(
|
||||
asset_name: str,
|
||||
hours: int = Query(default=72, ge=1, le=168, description="Hours of history (max 168 = 7 days)"),
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> List[ValidationResponse]:
|
||||
"""
|
||||
Get validation history for an asset (default: 72 hours).
|
||||
Requires user session authentication.
|
||||
"""
|
||||
asset = db.query(Asset).filter(
|
||||
Asset.name == asset_name,
|
||||
Asset.is_active == True
|
||||
).first()
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail=f"Asset '{asset_name}' not found")
|
||||
|
||||
# Calculate cutoff timestamp
|
||||
cutoff = datetime.utcnow() - timedelta(hours=hours)
|
||||
cutoff_unix = cutoff.timestamp()
|
||||
|
||||
# Get validation history
|
||||
validations = db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset.id,
|
||||
ValidationHistory.validation_timestamp_unix >= cutoff_unix
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix)).all()
|
||||
|
||||
return [
|
||||
ValidationResponse(
|
||||
id=v.id,
|
||||
asset_name=asset.name,
|
||||
validation_timestamp=v.validation_timestamp,
|
||||
validation_timestamp_unix=v.validation_timestamp_unix,
|
||||
is_valid=v.is_valid,
|
||||
sources_missing=json.loads(v.sources_missing or "[]"),
|
||||
sources_stale=json.loads(v.sources_stale or "[]"),
|
||||
coordinate_differences=json.loads(v.coordinate_differences or "{}"),
|
||||
source_coordinates=json.loads(v.source_coordinates or "{}"),
|
||||
validation_details=json.loads(v.validation_details or "{}"),
|
||||
received_at=v.received_at
|
||||
)
|
||||
for v in validations
|
||||
]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Admin Endpoints (Session Authentication Required)
|
||||
# =============================================================================
|
||||
|
||||
@router.post("/admin/assets", response_model=AssetWithToken, status_code=201)
|
||||
async def create_asset(
|
||||
data: AssetCreate,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> AssetWithToken:
|
||||
"""
|
||||
Create a new asset and return its token.
|
||||
Requires user session authentication.
|
||||
"""
|
||||
# Check if asset already exists
|
||||
existing = db.query(Asset).filter(Asset.name == data.name).first()
|
||||
if existing:
|
||||
raise HTTPException(status_code=400, detail=f"Asset '{data.name}' already exists")
|
||||
|
||||
# Generate token
|
||||
token = Asset.generate_token()
|
||||
token_hash = Asset.hash_token(token)
|
||||
|
||||
asset = Asset(
|
||||
name=data.name,
|
||||
token_hash=token_hash,
|
||||
description=data.description,
|
||||
telegram_chat_id=data.telegram_chat_id,
|
||||
telegram_enabled=data.telegram_enabled
|
||||
)
|
||||
|
||||
db.add(asset)
|
||||
db.commit()
|
||||
db.refresh(asset)
|
||||
|
||||
logger.info(f"Created new asset: {data.name}")
|
||||
|
||||
# Return asset with the unhashed token (only shown once!)
|
||||
return AssetWithToken(
|
||||
id=asset.id,
|
||||
name=asset.name,
|
||||
is_active=asset.is_active,
|
||||
created_at=asset.created_at,
|
||||
description=asset.description,
|
||||
telegram_chat_id=asset.telegram_chat_id,
|
||||
telegram_enabled=asset.telegram_enabled,
|
||||
token=token
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/admin/assets/{asset_name}")
|
||||
async def deactivate_asset(
|
||||
asset_name: str,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> dict:
|
||||
"""
|
||||
Deactivate an asset (soft delete).
|
||||
Requires user session authentication.
|
||||
"""
|
||||
asset = db.query(Asset).filter(Asset.name == asset_name).first()
|
||||
|
||||
if not asset:
|
||||
raise HTTPException(status_code=404, detail=f"Asset '{asset_name}' not found")
|
||||
|
||||
asset.is_active = False
|
||||
db.commit()
|
||||
|
||||
logger.info(f"Deactivated asset: {asset_name}")
|
||||
|
||||
return {"status": "success", "message": f"Asset '{asset_name}' deactivated"}
|
||||
|
||||
|
||||
@router.post("/admin/assets/import", response_model=AssetResponse, status_code=201)
|
||||
async def import_asset(
|
||||
data: AssetImport,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> AssetResponse:
|
||||
"""
|
||||
Import an asset with a specific token.
|
||||
If asset exists, updates its token. If not, creates it.
|
||||
Requires user session authentication.
|
||||
"""
|
||||
# Hash the provided token
|
||||
token_hash = Asset.hash_token(data.token)
|
||||
|
||||
# Check if asset already exists
|
||||
existing = db.query(Asset).filter(Asset.name == data.name).first()
|
||||
|
||||
if existing:
|
||||
# Update existing asset's token
|
||||
existing.token_hash = token_hash
|
||||
existing.is_active = True
|
||||
if data.description:
|
||||
existing.description = data.description
|
||||
if data.telegram_chat_id is not None:
|
||||
existing.telegram_chat_id = data.telegram_chat_id
|
||||
existing.telegram_enabled = data.telegram_enabled
|
||||
db.commit()
|
||||
db.refresh(existing)
|
||||
logger.info(f"Updated token for existing asset: {data.name}")
|
||||
return existing
|
||||
else:
|
||||
# Create new asset with provided token
|
||||
asset = Asset(
|
||||
name=data.name,
|
||||
token_hash=token_hash,
|
||||
description=data.description,
|
||||
telegram_chat_id=data.telegram_chat_id,
|
||||
telegram_enabled=data.telegram_enabled
|
||||
)
|
||||
db.add(asset)
|
||||
db.commit()
|
||||
db.refresh(asset)
|
||||
logger.info(f"Imported new asset: {data.name}")
|
||||
return asset
|
||||
|
||||
|
||||
@router.post("/admin/assets/import/batch")
|
||||
async def import_assets_batch(
|
||||
data: AssetBatchImport,
|
||||
user: str = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> dict:
|
||||
"""
|
||||
Batch import assets with specific tokens.
|
||||
Creates new assets or updates existing ones.
|
||||
Requires user session authentication.
|
||||
"""
|
||||
created = 0
|
||||
updated = 0
|
||||
errors = []
|
||||
|
||||
for asset_data in data.assets:
|
||||
try:
|
||||
token_hash = Asset.hash_token(asset_data.token)
|
||||
existing = db.query(Asset).filter(Asset.name == asset_data.name).first()
|
||||
|
||||
if existing:
|
||||
existing.token_hash = token_hash
|
||||
existing.is_active = True
|
||||
if asset_data.description:
|
||||
existing.description = asset_data.description
|
||||
if asset_data.telegram_chat_id is not None:
|
||||
existing.telegram_chat_id = asset_data.telegram_chat_id
|
||||
existing.telegram_enabled = asset_data.telegram_enabled
|
||||
updated += 1
|
||||
logger.info(f"Updated token for asset: {asset_data.name}")
|
||||
else:
|
||||
asset = Asset(
|
||||
name=asset_data.name,
|
||||
token_hash=token_hash,
|
||||
description=asset_data.description,
|
||||
telegram_chat_id=asset_data.telegram_chat_id,
|
||||
telegram_enabled=asset_data.telegram_enabled
|
||||
)
|
||||
db.add(asset)
|
||||
created += 1
|
||||
logger.info(f"Created asset: {asset_data.name}")
|
||||
except Exception as e:
|
||||
errors.append({"name": asset_data.name, "error": str(e)})
|
||||
logger.error(f"Failed to import asset {asset_data.name}: {e}")
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"errors": errors
|
||||
}
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Authentication routes for GNSS Guard Server
|
||||
Handles user session authentication for the web UI
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from pydantic import BaseModel
|
||||
from slowapi import Limiter
|
||||
from slowapi.util import get_remote_address
|
||||
|
||||
from config import get_config
|
||||
|
||||
logger = logging.getLogger("gnss_guard.server.auth")
|
||||
|
||||
router = APIRouter(tags=["auth"])
|
||||
|
||||
# Rate limiter instance (uses app.state.limiter set in main.py)
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
# Simple in-memory session storage (for single-user scenario)
|
||||
# In production with multiple servers, use Redis or database
|
||||
_sessions: dict = {}
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
def create_session(username: str) -> str:
|
||||
"""Create a new session and return session ID"""
|
||||
import secrets
|
||||
session_id = secrets.token_urlsafe(32)
|
||||
config = get_config()
|
||||
|
||||
_sessions[session_id] = {
|
||||
"username": username,
|
||||
"created_at": datetime.utcnow(),
|
||||
"expires_at": datetime.utcnow() + timedelta(minutes=config.session_expire_minutes)
|
||||
}
|
||||
|
||||
return session_id
|
||||
|
||||
|
||||
def validate_session(session_id: str) -> Optional[str]:
|
||||
"""Validate session and return username if valid"""
|
||||
if not session_id or session_id not in _sessions:
|
||||
return None
|
||||
|
||||
session = _sessions[session_id]
|
||||
if datetime.utcnow() > session["expires_at"]:
|
||||
del _sessions[session_id]
|
||||
return None
|
||||
|
||||
return session["username"]
|
||||
|
||||
|
||||
def get_current_user(request: Request) -> str:
|
||||
"""
|
||||
Dependency to get current authenticated user.
|
||||
Raises 401 if not authenticated.
|
||||
"""
|
||||
session_id = request.cookies.get("session_id")
|
||||
username = validate_session(session_id)
|
||||
|
||||
if not username:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Not authenticated",
|
||||
headers={"WWW-Authenticate": "Bearer"}
|
||||
)
|
||||
|
||||
return username
|
||||
|
||||
|
||||
def get_optional_user(request: Request) -> Optional[str]:
|
||||
"""
|
||||
Dependency to get current user if authenticated, None otherwise.
|
||||
"""
|
||||
session_id = request.cookies.get("session_id")
|
||||
return validate_session(session_id)
|
||||
|
||||
|
||||
@router.post("/login")
|
||||
@limiter.limit("5/minute") # Rate limit: 5 login attempts per minute per IP
|
||||
async def login(request: Request, data: LoginRequest, response: Response):
|
||||
"""
|
||||
Login endpoint - validates credentials and sets session cookie.
|
||||
Rate limited to prevent brute force attacks.
|
||||
"""
|
||||
config = get_config()
|
||||
|
||||
# Verify credentials against hardcoded user
|
||||
if data.username != config.web_username or data.password != config.web_password:
|
||||
logger.warning(f"Failed login attempt for user: {data.username} from IP: {request.client.host}")
|
||||
raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||
|
||||
# Create session
|
||||
session_id = create_session(data.username)
|
||||
|
||||
# Set session cookie
|
||||
# secure=True ensures cookie only sent over HTTPS
|
||||
response.set_cookie(
|
||||
key="session_id",
|
||||
value=session_id,
|
||||
httponly=True,
|
||||
secure=True, # Only send over HTTPS
|
||||
samesite="lax",
|
||||
max_age=config.session_expire_minutes * 60
|
||||
)
|
||||
|
||||
logger.info(f"User logged in: {data.username}")
|
||||
|
||||
return {"message": "Login successful", "username": data.username}
|
||||
|
||||
|
||||
@router.post("/logout")
|
||||
async def logout(request: Request, response: Response):
|
||||
"""
|
||||
Logout endpoint - clears session.
|
||||
"""
|
||||
session_id = request.cookies.get("session_id")
|
||||
|
||||
if session_id and session_id in _sessions:
|
||||
del _sessions[session_id]
|
||||
|
||||
response.delete_cookie("session_id")
|
||||
|
||||
return {"message": "Logged out successfully"}
|
||||
|
||||
|
||||
@router.get("/auth/check")
|
||||
async def check_auth(request: Request):
|
||||
"""
|
||||
Check if current session is authenticated.
|
||||
"""
|
||||
session_id = request.cookies.get("session_id")
|
||||
username = validate_session(session_id)
|
||||
|
||||
if username:
|
||||
return {"authenticated": True, "username": username}
|
||||
else:
|
||||
return {"authenticated": False}
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
"""
|
||||
Services for GNSS Guard Server
|
||||
"""
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Asset management service for GNSS Guard Server
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc, func
|
||||
|
||||
from models import Asset, ValidationHistory, AssetNotificationState
|
||||
|
||||
logger = logging.getLogger("gnss_guard.server.asset_service")
|
||||
|
||||
|
||||
class AssetService:
|
||||
"""Service for asset-related operations"""
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_all_assets(self, include_inactive: bool = False) -> List[Asset]:
|
||||
"""Get all assets"""
|
||||
query = self.db.query(Asset)
|
||||
if not include_inactive:
|
||||
query = query.filter(Asset.is_active == True)
|
||||
return query.all()
|
||||
|
||||
def get_asset_by_name(self, name: str) -> Optional[Asset]:
|
||||
"""Get asset by name"""
|
||||
return self.db.query(Asset).filter(Asset.name == name).first()
|
||||
|
||||
def get_asset_by_token(self, token: str) -> Optional[Asset]:
|
||||
"""Get active asset by token"""
|
||||
token_hash = Asset.hash_token(token)
|
||||
return self.db.query(Asset).filter(
|
||||
Asset.token_hash == token_hash,
|
||||
Asset.is_active == True
|
||||
).first()
|
||||
|
||||
def get_latest_validation(self, asset_id: int) -> Optional[ValidationHistory]:
|
||||
"""Get the latest validation record for an asset"""
|
||||
return self.db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset_id
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix)).first()
|
||||
|
||||
def get_validation_at_timestamp(
|
||||
self,
|
||||
asset_id: int,
|
||||
target_timestamp: float
|
||||
) -> Optional[ValidationHistory]:
|
||||
"""
|
||||
Get the validation record closest to (but not after) the specified timestamp.
|
||||
This is useful for viewing historical data at a specific point in time.
|
||||
"""
|
||||
return self.db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset_id,
|
||||
ValidationHistory.validation_timestamp_unix <= target_timestamp
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix)).first()
|
||||
|
||||
def get_validation_history(
|
||||
self,
|
||||
asset_id: int,
|
||||
hours: int = 72,
|
||||
limit: Optional[int] = None
|
||||
) -> List[ValidationHistory]:
|
||||
"""Get validation history for an asset"""
|
||||
cutoff = datetime.utcnow() - timedelta(hours=hours)
|
||||
cutoff_unix = cutoff.timestamp()
|
||||
|
||||
query = self.db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset_id,
|
||||
ValidationHistory.validation_timestamp_unix >= cutoff_unix
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix))
|
||||
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query.all()
|
||||
|
||||
def get_all_assets_status(self) -> List[Dict[str, Any]]:
|
||||
"""Get status summary for all active assets"""
|
||||
assets = self.get_all_assets()
|
||||
statuses = []
|
||||
|
||||
for asset in assets:
|
||||
latest = self.get_latest_validation(asset.id)
|
||||
|
||||
# Get online status from notification state (consistent with Telegram alerts)
|
||||
notification_state = self.db.query(AssetNotificationState).filter(
|
||||
AssetNotificationState.asset_id == asset.id
|
||||
).first()
|
||||
|
||||
is_online = notification_state.is_online if notification_state else False
|
||||
last_seen = notification_state.last_validation_at if notification_state else None
|
||||
|
||||
# Fall back to validation timestamp if no notification state
|
||||
if not last_seen and latest and latest.received_at:
|
||||
last_seen = latest.received_at
|
||||
|
||||
is_valid = None
|
||||
has_distance_alert = False # True if distance threshold exceeded
|
||||
|
||||
if latest:
|
||||
is_valid = latest.is_valid
|
||||
|
||||
# Check if there's a distance alert (AT RISK vs DEGRADED)
|
||||
if not is_valid:
|
||||
validation_details = json.loads(latest.validation_details or "{}")
|
||||
coordinate_differences = json.loads(latest.coordinate_differences or "{}")
|
||||
threshold = validation_details.get("threshold_meters", 200)
|
||||
max_distance = validation_details.get("max_distance_meters", 0)
|
||||
|
||||
# Also check coordinate_differences for max distance
|
||||
if not max_distance and coordinate_differences:
|
||||
for diff_data in coordinate_differences.values():
|
||||
if isinstance(diff_data, dict):
|
||||
dist = diff_data.get("distance_meters", 0)
|
||||
if dist > max_distance:
|
||||
max_distance = dist
|
||||
|
||||
has_distance_alert = max_distance > threshold
|
||||
|
||||
statuses.append({
|
||||
"name": asset.name,
|
||||
"is_online": is_online,
|
||||
"is_valid": is_valid,
|
||||
"has_distance_alert": has_distance_alert,
|
||||
"last_seen": last_seen.isoformat() if last_seen else None,
|
||||
"description": asset.description
|
||||
})
|
||||
|
||||
return statuses
|
||||
|
||||
def get_route_data(
|
||||
self,
|
||||
asset_id: int,
|
||||
hours: int = 72,
|
||||
until_timestamp: Optional[float] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get route data for map visualization.
|
||||
Returns list of points with coordinates and validation status.
|
||||
|
||||
Args:
|
||||
asset_id: The asset ID
|
||||
hours: Number of hours of history to retrieve
|
||||
until_timestamp: Optional Unix timestamp to show route up to this time.
|
||||
If provided, returns `hours` of history ending at this timestamp.
|
||||
"""
|
||||
if until_timestamp is not None:
|
||||
# Get history ending at the specified timestamp
|
||||
cutoff_unix = until_timestamp - (hours * 3600)
|
||||
validations = self.db.query(ValidationHistory).filter(
|
||||
ValidationHistory.asset_id == asset_id,
|
||||
ValidationHistory.validation_timestamp_unix >= cutoff_unix,
|
||||
ValidationHistory.validation_timestamp_unix <= until_timestamp
|
||||
).order_by(desc(ValidationHistory.validation_timestamp_unix)).all()
|
||||
else:
|
||||
validations = self.get_validation_history(asset_id, hours)
|
||||
route_points = []
|
||||
|
||||
for v in validations:
|
||||
source_coordinates = json.loads(v.source_coordinates or "{}")
|
||||
|
||||
# Get primary coordinate (prefer nmea_primary, then tm_ais, then any)
|
||||
coord = None
|
||||
for source in ["nmea_primary", "tm_ais", "starlink_location"]:
|
||||
if source in source_coordinates:
|
||||
coord = source_coordinates[source]
|
||||
break
|
||||
|
||||
if not coord and source_coordinates:
|
||||
# Use first available
|
||||
coord = list(source_coordinates.values())[0]
|
||||
|
||||
if coord and coord.get("latitude") and coord.get("longitude"):
|
||||
# Determine status color
|
||||
sources_missing = json.loads(v.sources_missing or "[]")
|
||||
sources_stale = json.loads(v.sources_stale or "[]")
|
||||
validation_details = json.loads(v.validation_details or "{}")
|
||||
|
||||
threshold = validation_details.get("threshold_meters", 200)
|
||||
max_distance = validation_details.get("max_distance_meters", 0)
|
||||
|
||||
if not v.is_valid and max_distance > threshold:
|
||||
status = "alert" # Red - distance exceeded
|
||||
elif sources_missing or sources_stale:
|
||||
status = "degraded" # Orange - missing/stale
|
||||
else:
|
||||
status = "valid" # Green - all OK
|
||||
|
||||
route_points.append({
|
||||
"id": v.id,
|
||||
"timestamp": v.validation_timestamp,
|
||||
"timestamp_unix": v.validation_timestamp_unix,
|
||||
"latitude": coord["latitude"],
|
||||
"longitude": coord["longitude"],
|
||||
"status": status,
|
||||
"is_valid": v.is_valid,
|
||||
"sources_missing": sources_missing,
|
||||
"sources_stale": sources_stale,
|
||||
"max_distance_m": max_distance,
|
||||
"threshold_m": threshold
|
||||
})
|
||||
|
||||
return route_points
|
||||
|
||||
def cleanup_old_validations(self, days: int = 90) -> int:
|
||||
"""Remove validation records older than specified days"""
|
||||
cutoff = datetime.utcnow() - timedelta(days=days)
|
||||
cutoff_unix = cutoff.timestamp()
|
||||
|
||||
deleted = self.db.query(ValidationHistory).filter(
|
||||
ValidationHistory.validation_timestamp_unix < cutoff_unix
|
||||
).delete()
|
||||
|
||||
self.db.commit()
|
||||
|
||||
logger.info(f"Cleaned up {deleted} old validation records")
|
||||
return deleted
|
||||
|
||||
@@ -0,0 +1,366 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Server-side Telegram Notification Service for GNSS Guard
|
||||
|
||||
Sends alerts to Telegram for GPS validation state changes:
|
||||
- Sources becoming missing or recovering
|
||||
- Sources becoming stale or recovering
|
||||
- Distance threshold breaches (possible jamming/spoofing)
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional, Set
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from config import get_config
|
||||
from models import Asset, AssetNotificationState
|
||||
|
||||
logger = logging.getLogger("gnss_guard.server.telegram")
|
||||
|
||||
|
||||
class TelegramService:
|
||||
"""Server-side Telegram notification service"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Telegram service with config"""
|
||||
config = get_config()
|
||||
self.bot_token = config.telegram_bot_token
|
||||
self.default_chat_id = config.telegram_chat_id
|
||||
self.enabled = config.telegram_enabled
|
||||
|
||||
if self.enabled:
|
||||
self.api_url = f"https://api.telegram.org/bot{self.bot_token}"
|
||||
logger.info("Telegram service initialized")
|
||||
else:
|
||||
self.api_url = None
|
||||
logger.info("Telegram service disabled (no bot token or chat ID configured)")
|
||||
|
||||
@staticmethod
|
||||
def escape_html(text: str) -> str:
|
||||
"""Escape HTML special characters for Telegram HTML parsing"""
|
||||
text = str(text)
|
||||
text = text.replace('&', '&')
|
||||
text = text.replace('<', '<')
|
||||
text = text.replace('>', '>')
|
||||
return text
|
||||
|
||||
def _send_message(self, chat_id: str, message: str) -> bool:
|
||||
"""Send a message to Telegram"""
|
||||
if not self.enabled:
|
||||
return False
|
||||
|
||||
try:
|
||||
url = f"{self.api_url}/sendMessage"
|
||||
payload = {
|
||||
"chat_id": chat_id,
|
||||
"text": message,
|
||||
"parse_mode": "HTML",
|
||||
"disable_web_page_preview": True
|
||||
}
|
||||
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Telegram API error: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send Telegram message: {e}")
|
||||
return False
|
||||
|
||||
def _get_chat_id_for_asset(self, asset: Asset) -> Optional[str]:
|
||||
"""Get the chat ID to use for an asset (asset-specific or default)"""
|
||||
if not asset.telegram_enabled:
|
||||
return None
|
||||
return asset.telegram_chat_id or self.default_chat_id
|
||||
|
||||
def process_validation(
|
||||
self,
|
||||
db: Session,
|
||||
asset: Asset,
|
||||
validation_data: Dict[str, Any]
|
||||
) -> bool:
|
||||
"""
|
||||
Process a validation submission and send notification if state changed.
|
||||
Also handles online/offline state transitions.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
asset: Asset that submitted the validation
|
||||
validation_data: Validation data from the submission
|
||||
|
||||
Returns:
|
||||
bool: True if notification was sent
|
||||
"""
|
||||
chat_id = self._get_chat_id_for_asset(asset)
|
||||
|
||||
# Get or create notification state for this asset
|
||||
state = db.query(AssetNotificationState).filter(
|
||||
AssetNotificationState.asset_id == asset.id
|
||||
).first()
|
||||
|
||||
if not state:
|
||||
state = AssetNotificationState(asset_id=asset.id)
|
||||
db.add(state)
|
||||
db.flush()
|
||||
|
||||
notification_sent = False
|
||||
now = datetime.utcnow()
|
||||
|
||||
# Check if asset was offline and is now back online
|
||||
was_offline = state.is_online == False and state.last_validation_at is not None
|
||||
|
||||
if was_offline and self.enabled and chat_id:
|
||||
# Calculate how long it was offline
|
||||
offline_duration = (now - state.last_validation_at).total_seconds() if state.last_validation_at else None
|
||||
|
||||
notification_sent = self.send_asset_online_alert(
|
||||
chat_id=chat_id,
|
||||
asset_name=asset.name,
|
||||
offline_duration_seconds=offline_duration
|
||||
)
|
||||
|
||||
# Update online status and last validation time
|
||||
state.is_online = True
|
||||
state.last_validation_at = now
|
||||
|
||||
# Skip further processing if Telegram is disabled
|
||||
if not self.enabled or not chat_id:
|
||||
db.commit()
|
||||
return notification_sent
|
||||
|
||||
# Parse current state from validation
|
||||
sources_missing = set(validation_data.get("sources_missing", []))
|
||||
sources_stale = set(validation_data.get("sources_stale", []))
|
||||
validation_details = validation_data.get("validation_details", {})
|
||||
threshold = validation_details.get("threshold_meters", 0)
|
||||
max_distance = validation_details.get("max_distance_meters", 0)
|
||||
threshold_breached = max_distance > threshold if max_distance and threshold else False
|
||||
|
||||
# Parse previous state
|
||||
prev_missing = set(json.loads(state.prev_sources_missing or "[]"))
|
||||
prev_stale = set(json.loads(state.prev_sources_stale or "[]"))
|
||||
prev_threshold_breached = state.prev_threshold_breached or False
|
||||
|
||||
# Detect changes
|
||||
missing_added = sources_missing - prev_missing
|
||||
missing_removed = prev_missing - sources_missing
|
||||
stale_added = sources_stale - prev_stale
|
||||
stale_removed = prev_stale - sources_stale
|
||||
threshold_changed = threshold_breached != prev_threshold_breached
|
||||
|
||||
has_state_change = (
|
||||
missing_added or missing_removed or
|
||||
stale_added or stale_removed or
|
||||
threshold_changed
|
||||
)
|
||||
|
||||
if has_state_change:
|
||||
logger.info(f"State change detected for {asset.name}")
|
||||
|
||||
# Build and send notification
|
||||
source_coordinates = validation_data.get("source_coordinates", {})
|
||||
|
||||
message = self._build_state_change_message(
|
||||
asset_name=asset.name,
|
||||
missing_added=missing_added,
|
||||
missing_removed=missing_removed,
|
||||
stale_added=stale_added,
|
||||
stale_removed=stale_removed,
|
||||
threshold_breached=threshold_breached,
|
||||
prev_threshold_breached=prev_threshold_breached,
|
||||
max_distance_meters=max_distance,
|
||||
threshold_meters=threshold,
|
||||
source_coordinates=source_coordinates
|
||||
)
|
||||
|
||||
if self._send_message(chat_id, message):
|
||||
state.last_notification_at = now
|
||||
logger.info(f"Notification sent for {asset.name}")
|
||||
notification_sent = True
|
||||
|
||||
# Update state
|
||||
state.prev_sources_missing = json.dumps(list(sources_missing))
|
||||
state.prev_sources_stale = json.dumps(list(sources_stale))
|
||||
state.prev_threshold_breached = threshold_breached
|
||||
|
||||
db.commit()
|
||||
|
||||
return notification_sent
|
||||
|
||||
def _build_state_change_message(
|
||||
self,
|
||||
asset_name: str,
|
||||
missing_added: Set[str],
|
||||
missing_removed: Set[str],
|
||||
stale_added: Set[str],
|
||||
stale_removed: Set[str],
|
||||
threshold_breached: bool,
|
||||
prev_threshold_breached: bool,
|
||||
max_distance_meters: float,
|
||||
threshold_meters: float,
|
||||
source_coordinates: Dict[str, Any]
|
||||
) -> str:
|
||||
"""Build the state change notification message"""
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
|
||||
# Determine if this is a degradation or recovery
|
||||
is_degradation = missing_added or stale_added or (threshold_breached and not prev_threshold_breached)
|
||||
is_recovery = missing_removed or stale_removed or (not threshold_breached and prev_threshold_breached)
|
||||
|
||||
if is_degradation and not is_recovery:
|
||||
emoji = "🚨"
|
||||
title = "GNSS STATE DEGRADED"
|
||||
elif is_recovery and not is_degradation:
|
||||
emoji = "✅"
|
||||
title = "GNSS STATE RECOVERED"
|
||||
else:
|
||||
emoji = "⚠️"
|
||||
title = "GNSS STATE CHANGED"
|
||||
|
||||
message = (
|
||||
f"{emoji} <b>{title}</b>\n\n"
|
||||
f"📍 <b>Asset:</b> {self.escape_html(asset_name)}\n"
|
||||
f"⏰ <b>Time:</b> {timestamp}\n\n"
|
||||
)
|
||||
|
||||
# Missing sources changes
|
||||
if missing_added:
|
||||
message += f"❌ <b>Sources now MISSING:</b> {', '.join(sorted(missing_added))}\n"
|
||||
if missing_removed:
|
||||
message += f"✅ <b>Sources RECOVERED (was missing):</b> {', '.join(sorted(missing_removed))}\n"
|
||||
|
||||
# Stale sources changes
|
||||
if stale_added:
|
||||
message += f"⏱️ <b>Sources now STALE:</b> {', '.join(sorted(stale_added))}\n"
|
||||
if stale_removed:
|
||||
message += f"✅ <b>Sources RECOVERED (was stale):</b> {', '.join(sorted(stale_removed))}\n"
|
||||
|
||||
# Threshold breach changes
|
||||
if threshold_breached and not prev_threshold_breached:
|
||||
message += (
|
||||
f"\n🚨 <b>DISTANCE THRESHOLD BREACHED!</b>\n"
|
||||
f" Max distance: {max_distance_meters:.1f}m (threshold: {threshold_meters:.1f}m)\n"
|
||||
f" ⚠️ Possible GPS jamming or spoofing!\n"
|
||||
)
|
||||
elif not threshold_breached and prev_threshold_breached:
|
||||
message += (
|
||||
f"\n✅ <b>Distance threshold OK</b>\n"
|
||||
f" Max distance: {max_distance_meters:.1f}m (threshold: {threshold_meters:.1f}m)\n"
|
||||
)
|
||||
|
||||
# Current coordinates summary
|
||||
if source_coordinates:
|
||||
message += f"\n📍 <b>Current Coordinates:</b>\n"
|
||||
for source, coords in source_coordinates.items():
|
||||
lat = coords.get("latitude", "N/A")
|
||||
lon = coords.get("longitude", "N/A")
|
||||
message += f" • {self.escape_html(source)}: {lat}, {lon}\n"
|
||||
|
||||
return message
|
||||
|
||||
def send_asset_offline_alert(
|
||||
self,
|
||||
chat_id: str,
|
||||
asset_name: str,
|
||||
last_seen: datetime,
|
||||
offline_threshold_seconds: int = 120
|
||||
) -> bool:
|
||||
"""Send notification when an asset goes offline (no updates received)"""
|
||||
if not self.enabled:
|
||||
return False
|
||||
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
last_seen_str = last_seen.strftime("%Y-%m-%d %H:%M:%S UTC") if last_seen else "Unknown"
|
||||
|
||||
message = (
|
||||
f"📴 <b>ASSET OFFLINE</b>\n\n"
|
||||
f"📍 <b>Asset:</b> {self.escape_html(asset_name)}\n"
|
||||
f"⏰ <b>Detected at:</b> {timestamp}\n"
|
||||
f"🕐 <b>Last seen:</b> {last_seen_str}\n\n"
|
||||
f"⚠️ No updates received for over {offline_threshold_seconds} seconds.\n"
|
||||
f"Check client connectivity and service status."
|
||||
)
|
||||
|
||||
result = self._send_message(chat_id, message)
|
||||
if result:
|
||||
logger.info(f"Offline alert sent for {asset_name}")
|
||||
return result
|
||||
|
||||
def send_asset_online_alert(
|
||||
self,
|
||||
chat_id: str,
|
||||
asset_name: str,
|
||||
offline_duration_seconds: Optional[float] = None
|
||||
) -> bool:
|
||||
"""Send notification when an asset comes back online"""
|
||||
if not self.enabled:
|
||||
return False
|
||||
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
|
||||
|
||||
duration_str = ""
|
||||
if offline_duration_seconds:
|
||||
if offline_duration_seconds < 60:
|
||||
duration_str = f"{int(offline_duration_seconds)} seconds"
|
||||
elif offline_duration_seconds < 3600:
|
||||
duration_str = f"{int(offline_duration_seconds / 60)} minutes"
|
||||
else:
|
||||
hours = offline_duration_seconds / 3600
|
||||
duration_str = f"{hours:.1f} hours"
|
||||
|
||||
message = (
|
||||
f"📶 <b>ASSET BACK ONLINE</b>\n\n"
|
||||
f"📍 <b>Asset:</b> {self.escape_html(asset_name)}\n"
|
||||
f"⏰ <b>Time:</b> {timestamp}\n"
|
||||
)
|
||||
|
||||
if duration_str:
|
||||
message += f"⏱️ <b>Was offline for:</b> {duration_str}\n"
|
||||
|
||||
message += f"\n✅ Asset is now reporting normally."
|
||||
|
||||
result = self._send_message(chat_id, message)
|
||||
if result:
|
||||
logger.info(f"Online alert sent for {asset_name}")
|
||||
return result
|
||||
|
||||
def test_connection(self) -> bool:
|
||||
"""Test Telegram bot connection"""
|
||||
if not self.enabled:
|
||||
return False
|
||||
|
||||
try:
|
||||
url = f"{self.api_url}/getMe"
|
||||
response = requests.get(url, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
bot_info = response.json()
|
||||
logger.info(f"Telegram bot connected: @{bot_info['result']['username']}")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"Telegram connection failed: {response.status_code}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Telegram connection error: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_telegram_service: Optional[TelegramService] = None
|
||||
|
||||
|
||||
def get_telegram_service() -> TelegramService:
|
||||
"""Get the singleton Telegram service instance"""
|
||||
global _telegram_service
|
||||
if _telegram_service is None:
|
||||
_telegram_service = TelegramService()
|
||||
return _telegram_service
|
||||
|
||||
976
backup-from-device/gnss-guard/tm-gnss-guard/server/static/app.js
Normal file
976
backup-from-device/gnss-guard/tm-gnss-guard/server/static/app.js
Normal file
@@ -0,0 +1,976 @@
|
||||
/**
|
||||
* GNSS Guard Server - Dashboard JavaScript
|
||||
* Multi-asset monitoring with 72h route visualization
|
||||
*/
|
||||
|
||||
// Global state
|
||||
let map = null;
|
||||
let currentAsset = null;
|
||||
let currentData = null;
|
||||
let assets = [];
|
||||
let routeMarkers = [];
|
||||
let sourceMarkers = {};
|
||||
let showRouteEnabled = true;
|
||||
let lastFetchSucceeded = false;
|
||||
let lastValidationTimestamp = null;
|
||||
let isInitialMapLoad = true; // Only fit bounds on initial load or asset change
|
||||
|
||||
// Time mode state
|
||||
let timeMode = 'now'; // 'now' or 'select'
|
||||
let selectedTimestamp = null; // Unix timestamp when in 'select' mode
|
||||
let autoRefreshInterval = null;
|
||||
|
||||
// =============================================================================
|
||||
// AUTO-REFRESH PAGE (every 1 hour to pick up deployments)
|
||||
// =============================================================================
|
||||
const PAGE_LOAD_TIME = Date.now();
|
||||
const AUTO_REFRESH_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
|
||||
let lastVisibilityCheck = Date.now();
|
||||
|
||||
function checkAutoRefresh() {
|
||||
const elapsed = Date.now() - PAGE_LOAD_TIME;
|
||||
if (elapsed >= AUTO_REFRESH_INTERVAL_MS) {
|
||||
console.log('Auto-refreshing page after 1 hour...');
|
||||
window.location.reload();
|
||||
}
|
||||
}
|
||||
|
||||
// Check for refresh on visibility change (tab becomes active)
|
||||
document.addEventListener('visibilitychange', () => {
|
||||
if (document.visibilityState === 'visible') {
|
||||
const now = Date.now();
|
||||
// Only check if at least 10 seconds since last check (prevents rapid refreshes)
|
||||
if (now - lastVisibilityCheck > 10000) {
|
||||
lastVisibilityCheck = now;
|
||||
checkAutoRefresh();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Periodic check every 5 minutes while tab is active
|
||||
setInterval(checkAutoRefresh, 5 * 60 * 1000);
|
||||
|
||||
// Marker icons for sources
|
||||
const iconPrimary = makeIcon('violet');
|
||||
const iconSecondary = makeIcon('grey');
|
||||
const iconAis = makeIcon('blue');
|
||||
const iconStarlinkGps = makeIcon('yellow');
|
||||
const iconStarlinkLocation = makeIcon('green');
|
||||
|
||||
const sourceConfig = {
|
||||
'nmea_primary': { icon: iconPrimary, name: 'Primary GPS' },
|
||||
'nmea_secondary': { icon: iconSecondary, name: 'Secondary GPS' },
|
||||
'tm_ais': { icon: iconAis, name: 'TM AIS GPS' },
|
||||
'starlink_gps': { icon: iconStarlinkGps, name: 'Starlink GPS' },
|
||||
'starlink_location': { icon: iconStarlinkLocation, name: 'Starlink Location' }
|
||||
};
|
||||
|
||||
// Initialize on DOM ready
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
initMap();
|
||||
initTabs();
|
||||
initTimePicker();
|
||||
loadAssets();
|
||||
|
||||
// Auto-refresh every 10 seconds (only when in 'now' mode)
|
||||
startAutoRefresh();
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// MAP INITIALIZATION
|
||||
// =============================================================================
|
||||
|
||||
function initMap() {
|
||||
map = L.map('map', { zoomControl: true }).setView([34.665151, 33.016326], 11);
|
||||
|
||||
// CartoDB Dark tiles
|
||||
L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', {
|
||||
maxZoom: 19,
|
||||
attribution: '© OpenStreetMap & CARTO'
|
||||
}).addTo(map);
|
||||
|
||||
// Recalculate marker offsets when zoom changes
|
||||
map.on('zoomend', () => {
|
||||
if (currentData) {
|
||||
updateMap(currentData);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function makeIcon(color) {
|
||||
return new L.Icon({
|
||||
iconUrl: `https://raw.githubusercontent.com/pointhi/leaflet-color-markers/master/img/marker-icon-${color}.png`,
|
||||
shadowUrl: 'https://cdnjs.cloudflare.com/ajax/libs/leaflet/1.9.4/images/marker-shadow.png',
|
||||
iconSize: [25, 41],
|
||||
iconAnchor: [12, 41],
|
||||
popupAnchor: [1, -34],
|
||||
shadowSize: [41, 41]
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// TABS (Mobile)
|
||||
// =============================================================================
|
||||
|
||||
function initTabs() {
|
||||
const tabButtons = document.querySelectorAll('.tab-btn');
|
||||
tabButtons.forEach(btn => {
|
||||
btn.addEventListener('click', () => {
|
||||
const tabName = btn.dataset.tab;
|
||||
|
||||
// Update button states
|
||||
tabButtons.forEach(b => b.classList.remove('active'));
|
||||
btn.classList.add('active');
|
||||
|
||||
// Update tab content
|
||||
document.querySelectorAll('.tab-content').forEach(tab => {
|
||||
tab.classList.remove('active');
|
||||
});
|
||||
document.getElementById(`tab-${tabName}`).classList.add('active');
|
||||
|
||||
// Invalidate map size when showing map tab
|
||||
if (tabName === 'map' && map) {
|
||||
setTimeout(() => map.invalidateSize(), 100);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// TIME SELECTOR
|
||||
// =============================================================================
|
||||
|
||||
function initTimePicker() {
|
||||
// Set default datetime value to now
|
||||
const now = new Date();
|
||||
const localDatetime = formatDatetimeLocal(now);
|
||||
|
||||
const desktopPicker = document.getElementById('selectedDatetime');
|
||||
const mobilePicker = document.getElementById('mobileSelectedDatetime');
|
||||
|
||||
if (desktopPicker) desktopPicker.value = localDatetime;
|
||||
if (mobilePicker) mobilePicker.value = localDatetime;
|
||||
}
|
||||
|
||||
function formatDatetimeLocal(date) {
|
||||
// Format date as YYYY-MM-DDTHH:mm for datetime-local input
|
||||
const year = date.getFullYear();
|
||||
const month = String(date.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(date.getDate()).padStart(2, '0');
|
||||
const hours = String(date.getHours()).padStart(2, '0');
|
||||
const minutes = String(date.getMinutes()).padStart(2, '0');
|
||||
return `${year}-${month}-${day}T${hours}:${minutes}`;
|
||||
}
|
||||
|
||||
function setTimeMode(mode) {
|
||||
timeMode = mode;
|
||||
|
||||
// Update radio buttons (sync both desktop and mobile)
|
||||
document.querySelectorAll('input[name="timeMode"], input[name="timeModeM"]').forEach(radio => {
|
||||
radio.checked = radio.value === mode;
|
||||
});
|
||||
|
||||
// Show/hide datetime picker
|
||||
const pickers = ['datetimePicker', 'mobileDatetimePicker'];
|
||||
const displays = ['selectedTimeDisplay', 'mobileSelectedTimeDisplay'];
|
||||
|
||||
pickers.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.classList.toggle('hidden', mode === 'now');
|
||||
});
|
||||
|
||||
if (mode === 'now') {
|
||||
// Hide the selected time display when switching to 'now'
|
||||
displays.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.classList.add('hidden');
|
||||
});
|
||||
|
||||
// Clear selected timestamp
|
||||
selectedTimestamp = null;
|
||||
|
||||
// Reset map to fit bounds when switching to 'now'
|
||||
isInitialMapLoad = true;
|
||||
|
||||
// Restart auto-refresh and fetch current data
|
||||
startAutoRefresh();
|
||||
fetchData();
|
||||
loadRouteData();
|
||||
} else {
|
||||
// Stop auto-refresh when viewing historical data
|
||||
stopAutoRefresh();
|
||||
}
|
||||
}
|
||||
|
||||
function onDatetimeChange() {
|
||||
// Sync desktop and mobile pickers
|
||||
const desktopPicker = document.getElementById('selectedDatetime');
|
||||
const mobilePicker = document.getElementById('mobileSelectedDatetime');
|
||||
|
||||
// Get the value from whichever picker was changed
|
||||
const value = desktopPicker?.value || mobilePicker?.value;
|
||||
|
||||
if (desktopPicker) desktopPicker.value = value;
|
||||
if (mobilePicker) mobilePicker.value = value;
|
||||
}
|
||||
|
||||
function applySelectedTime() {
|
||||
const desktopPicker = document.getElementById('selectedDatetime');
|
||||
const value = desktopPicker?.value;
|
||||
|
||||
if (!value) {
|
||||
alert('Please select a date and time');
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert to Unix timestamp
|
||||
const date = new Date(value);
|
||||
selectedTimestamp = date.getTime() / 1000;
|
||||
|
||||
// Update display
|
||||
const displayText = date.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
hour12: false
|
||||
});
|
||||
|
||||
const displays = ['selectedTimeDisplay', 'mobileSelectedTimeDisplay'];
|
||||
const textEls = ['selectedTimeText', 'mobileSelectedTimeText'];
|
||||
|
||||
displays.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.classList.remove('hidden');
|
||||
});
|
||||
|
||||
textEls.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.textContent = displayText;
|
||||
});
|
||||
|
||||
// Reset map to fit bounds when applying new time
|
||||
isInitialMapLoad = true;
|
||||
|
||||
// Fetch historical data
|
||||
fetchData();
|
||||
loadRouteData();
|
||||
|
||||
logEvent('info', `Viewing data at ${displayText}`);
|
||||
}
|
||||
|
||||
function startAutoRefresh() {
|
||||
if (autoRefreshInterval) return; // Already running
|
||||
autoRefreshInterval = setInterval(() => {
|
||||
if (timeMode === 'now') {
|
||||
fetchData();
|
||||
}
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
function stopAutoRefresh() {
|
||||
if (autoRefreshInterval) {
|
||||
clearInterval(autoRefreshInterval);
|
||||
autoRefreshInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
function resetTimeMode() {
|
||||
// Reset to 'now' mode (called when switching assets)
|
||||
timeMode = 'now';
|
||||
selectedTimestamp = null;
|
||||
|
||||
// Update UI
|
||||
document.querySelectorAll('input[name="timeMode"], input[name="timeModeM"]').forEach(radio => {
|
||||
radio.checked = radio.value === 'now';
|
||||
});
|
||||
|
||||
const pickers = ['datetimePicker', 'mobileDatetimePicker'];
|
||||
const displays = ['selectedTimeDisplay', 'mobileSelectedTimeDisplay'];
|
||||
|
||||
pickers.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.classList.add('hidden');
|
||||
});
|
||||
|
||||
displays.forEach(id => {
|
||||
const el = document.getElementById(id);
|
||||
if (el) el.classList.add('hidden');
|
||||
});
|
||||
|
||||
// Reset datetime picker to current time
|
||||
const now = new Date();
|
||||
const localDatetime = formatDatetimeLocal(now);
|
||||
const desktopPicker = document.getElementById('selectedDatetime');
|
||||
const mobilePicker = document.getElementById('mobileSelectedDatetime');
|
||||
if (desktopPicker) desktopPicker.value = localDatetime;
|
||||
if (mobilePicker) mobilePicker.value = localDatetime;
|
||||
|
||||
// Restart auto-refresh
|
||||
startAutoRefresh();
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// ASSET MANAGEMENT
|
||||
// =============================================================================
|
||||
|
||||
async function loadAssets() {
|
||||
try {
|
||||
const response = await fetch('/api/dashboard/assets');
|
||||
if (!response.ok) throw new Error('Failed to load assets');
|
||||
|
||||
assets = await response.json();
|
||||
renderAssetList();
|
||||
populateMobileDropdown();
|
||||
|
||||
// Auto-select last asset if available (most recently added)
|
||||
if (assets.length > 0) {
|
||||
selectAsset(assets[assets.length - 1].name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading assets:', error);
|
||||
document.getElementById('assetList').innerHTML =
|
||||
'<div class="asset-loading">Failed to load assets</div>';
|
||||
}
|
||||
}
|
||||
|
||||
function renderAssetList() {
|
||||
const container = document.getElementById('assetList');
|
||||
|
||||
if (assets.length === 0) {
|
||||
container.innerHTML = '<div class="asset-loading">No assets registered</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = assets.map(asset => {
|
||||
// Determine status class:
|
||||
// - online + valid = green (online)
|
||||
// - online + invalid + distance alert = red (alert)
|
||||
// - online + invalid + no distance alert = amber (degraded)
|
||||
// - offline = gray (no class)
|
||||
let statusClass = '';
|
||||
if (asset.is_online) {
|
||||
if (asset.is_valid === true) {
|
||||
statusClass = 'online'; // green
|
||||
} else if (asset.is_valid === false) {
|
||||
statusClass = asset.has_distance_alert ? 'alert' : 'degraded'; // red or amber
|
||||
} else {
|
||||
statusClass = 'online'; // null/unknown - assume ok
|
||||
}
|
||||
}
|
||||
|
||||
const isActive = currentAsset === asset.name;
|
||||
|
||||
return `
|
||||
<div class="asset-item ${isActive ? 'active' : ''} ${!asset.is_online ? 'offline' : ''}"
|
||||
onclick="selectAsset('${asset.name}')">
|
||||
<div class="asset-name">${asset.name}</div>
|
||||
<div class="asset-status">
|
||||
<span class="status-dot ${statusClass}"></span>
|
||||
<span>${asset.is_online ? 'Online' : 'Offline'}</span>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
function populateMobileDropdown() {
|
||||
const select = document.getElementById('mobileAssetSelect');
|
||||
select.innerHTML = '<option value="">Select Asset...</option>' +
|
||||
assets.map(asset => `<option value="${asset.name}">${asset.name}</option>`).join('');
|
||||
|
||||
if (currentAsset) {
|
||||
select.value = currentAsset;
|
||||
}
|
||||
}
|
||||
|
||||
function selectAsset(assetName) {
|
||||
if (!assetName) return;
|
||||
|
||||
currentAsset = assetName;
|
||||
|
||||
// Update UI
|
||||
renderAssetList();
|
||||
document.getElementById('mobileAssetSelect').value = assetName;
|
||||
|
||||
// Reset time mode to 'now' when switching assets
|
||||
resetTimeMode();
|
||||
|
||||
// Clear current data and fetch new
|
||||
currentData = null;
|
||||
clearSourceMarkers();
|
||||
clearRouteMarkers();
|
||||
isInitialMapLoad = true; // Reset to fit bounds for new asset
|
||||
|
||||
// Show loading state immediately while fetching
|
||||
showLoadingState();
|
||||
|
||||
fetchData();
|
||||
loadRouteData();
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// DATA FETCHING
|
||||
// =============================================================================
|
||||
|
||||
async function fetchData() {
|
||||
if (!currentAsset) return;
|
||||
|
||||
try {
|
||||
// Build URL with optional timestamp parameter
|
||||
let url = `/api/dashboard/asset/${currentAsset}/status`;
|
||||
if (timeMode === 'select' && selectedTimestamp) {
|
||||
url += `?at=${selectedTimestamp}`;
|
||||
}
|
||||
|
||||
const response = await fetch(url);
|
||||
|
||||
if (!response.ok) {
|
||||
showDegradedState(`Server error: ${response.status}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (data.error) {
|
||||
showDegradedState(data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
currentData = data;
|
||||
lastFetchSucceeded = true;
|
||||
|
||||
updateUI(data);
|
||||
updateMap(data);
|
||||
|
||||
// Log event if validation timestamp changed (only in 'now' mode)
|
||||
if (timeMode === 'now' && data.validation_timestamp !== lastValidationTimestamp) {
|
||||
lastValidationTimestamp = data.validation_timestamp;
|
||||
if (data.has_alert && !data.is_valid && data.max_distance_km !== null) {
|
||||
logEvent('crit', `Alert: distance ${data.max_distance_km.toFixed(1)} km`);
|
||||
} else if (!data.is_valid) {
|
||||
logEvent('warn', 'Validation issue detected');
|
||||
} else {
|
||||
logEvent('info', 'Cloud status OK');
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Fetch error:', error);
|
||||
showDegradedState('Connection failed: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
async function loadRouteData() {
|
||||
if (!currentAsset) return;
|
||||
|
||||
try {
|
||||
// Build URL with optional until parameter
|
||||
let url = `/api/dashboard/asset/${currentAsset}/route?hours=72`;
|
||||
if (timeMode === 'select' && selectedTimestamp) {
|
||||
url += `&until=${selectedTimestamp}`;
|
||||
}
|
||||
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) return;
|
||||
|
||||
const routeData = await response.json();
|
||||
renderRoute(routeData);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error loading route:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// UI UPDATES
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Update both GNSS status pills (desktop and mobile)
|
||||
*/
|
||||
function updateStatusPills(status, text) {
|
||||
const pills = [
|
||||
document.getElementById('desktopStatusPill'),
|
||||
document.getElementById('mobileStatusPill')
|
||||
];
|
||||
|
||||
pills.forEach(pill => {
|
||||
if (!pill) return;
|
||||
pill.classList.remove('ok', 'warn', 'crit');
|
||||
pill.textContent = text;
|
||||
if (status) {
|
||||
pill.classList.add(status);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function updateUI(data) {
|
||||
// Update GNSS status pills
|
||||
if (data.has_alert && data.max_distance_km !== null) {
|
||||
updateStatusPills('crit', 'GNSS Integrity: At Risk');
|
||||
} else if (!data.is_valid) {
|
||||
updateStatusPills('warn', 'GNSS Integrity: Degraded');
|
||||
} else {
|
||||
updateStatusPills('ok', 'GNSS Integrity: Stable');
|
||||
}
|
||||
|
||||
// Update alert banner
|
||||
const alertBanner = document.getElementById('alertBanner');
|
||||
const alertDistance = document.getElementById('alert-distance-value');
|
||||
|
||||
if (data.has_alert && data.max_distance_km !== null) {
|
||||
alertBanner.classList.remove('hidden');
|
||||
alertDistance.textContent = `${data.max_distance_km.toFixed(1)} km`;
|
||||
} else {
|
||||
alertBanner.classList.add('hidden');
|
||||
}
|
||||
|
||||
// Update sources - pass distance alert state
|
||||
const hasDistanceAlert = data.has_alert && data.max_distance_km !== null;
|
||||
renderSources(data.sources, hasDistanceAlert);
|
||||
}
|
||||
|
||||
function renderSources(sources, hasDistanceAlert = false) {
|
||||
const container = document.getElementById('sourcesContainer');
|
||||
const sourceOrder = ['nmea_primary', 'nmea_secondary', 'tm_ais', 'starlink_gps', 'starlink_location'];
|
||||
|
||||
container.innerHTML = sourceOrder.map(sourceName => {
|
||||
const source = sources[sourceName];
|
||||
if (!source) return '';
|
||||
|
||||
let cardClass = 'ok';
|
||||
let badgeClass = 'badge-healthy';
|
||||
let badgeText = 'HEALTHY';
|
||||
let coordsText = 'Loading...';
|
||||
let updateText = '-';
|
||||
let updateClass = '';
|
||||
|
||||
if (!source.enabled) {
|
||||
cardClass = 'offline';
|
||||
badgeClass = 'badge-offline';
|
||||
badgeText = 'NOT CONFIGURED';
|
||||
coordsText = 'No data source configured.';
|
||||
} else if (source.status === 'missing') {
|
||||
cardClass = 'crit';
|
||||
badgeClass = 'badge-danger';
|
||||
badgeText = 'MISSING';
|
||||
coordsText = 'No coordinates received.';
|
||||
updateClass = 'stale-text';
|
||||
} else if (source.status === 'stale' || source.is_stale) {
|
||||
cardClass = 'stale';
|
||||
badgeClass = 'badge-stale';
|
||||
badgeText = 'STALE';
|
||||
if (source.coordinates) {
|
||||
coordsText = `${source.coordinates.latitude.toFixed(6)}, ${source.coordinates.longitude.toFixed(6)}`;
|
||||
}
|
||||
updateClass = 'stale-text';
|
||||
} else {
|
||||
if (source.coordinates) {
|
||||
coordsText = `${source.coordinates.latitude.toFixed(6)}, ${source.coordinates.longitude.toFixed(6)}`;
|
||||
}
|
||||
|
||||
// If distance alert and source has coordinates, mark as AT RISK
|
||||
if (hasDistanceAlert && source.coordinates) {
|
||||
cardClass = 'crit';
|
||||
badgeClass = 'badge-danger';
|
||||
badgeText = 'AT RISK';
|
||||
}
|
||||
}
|
||||
|
||||
if (source.last_update_unix) {
|
||||
updateText = formatRelativeTime(source.last_update_unix);
|
||||
}
|
||||
|
||||
return `
|
||||
<div class="card ${cardClass}">
|
||||
<div class="card-header">
|
||||
<div class="card-title">${source.display_name}</div>
|
||||
<div class="badge ${badgeClass}">${badgeText}</div>
|
||||
</div>
|
||||
<div class="card-line"><strong>Lat/Lon</strong>: ${coordsText}</div>
|
||||
<div class="card-line"><strong>Updated</strong>: <span class="${updateClass}">${updateText}</span></div>
|
||||
</div>
|
||||
`;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Show loading state while fetching data for a new asset
|
||||
*/
|
||||
function showLoadingState() {
|
||||
// Show neutral loading status
|
||||
updateStatusPills(null, 'GNSS Integrity: Loading...');
|
||||
|
||||
// Hide alert banner
|
||||
document.getElementById('alertBanner').classList.add('hidden');
|
||||
|
||||
// Show placeholder source cards
|
||||
renderPlaceholderSources('loading');
|
||||
}
|
||||
|
||||
/**
|
||||
* Show state when asset has never pushed any validation data
|
||||
*/
|
||||
function showNoDataState() {
|
||||
lastFetchSucceeded = false;
|
||||
|
||||
// Show neutral "no data" status
|
||||
updateStatusPills(null, 'GNSS Integrity: No Data');
|
||||
|
||||
// Hide alert banner
|
||||
document.getElementById('alertBanner').classList.add('hidden');
|
||||
|
||||
// Show placeholder source cards indicating awaiting data
|
||||
renderPlaceholderSources('nodata');
|
||||
|
||||
logEvent('warn', 'Asset has not pushed any validation data yet');
|
||||
}
|
||||
|
||||
/**
|
||||
* Render placeholder cards for all sources
|
||||
* @param {string} mode - 'loading' or 'nodata'
|
||||
*/
|
||||
function renderPlaceholderSources(mode) {
|
||||
const container = document.getElementById('sourcesContainer');
|
||||
const sourceNames = {
|
||||
'nmea_primary': 'Primary GPS',
|
||||
'nmea_secondary': 'Secondary GPS',
|
||||
'tm_ais': 'TM AIS GPS',
|
||||
'starlink_gps': 'Starlink GPS',
|
||||
'starlink_location': 'Starlink Location'
|
||||
};
|
||||
const sourceOrder = ['nmea_primary', 'nmea_secondary', 'tm_ais', 'starlink_gps', 'starlink_location'];
|
||||
|
||||
const isLoading = mode === 'loading';
|
||||
const badgeText = isLoading ? 'LOADING' : 'AWAITING';
|
||||
const coordsText = isLoading ? 'Loading...' : 'Awaiting first update...';
|
||||
const updateText = isLoading ? '...' : '—';
|
||||
|
||||
container.innerHTML = sourceOrder.map(sourceName => {
|
||||
return `
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<div class="card-title">${sourceNames[sourceName]}</div>
|
||||
<div class="badge badge-offline">${badgeText}</div>
|
||||
</div>
|
||||
<div class="card-line"><strong>Lat/Lon</strong>: ${coordsText}</div>
|
||||
<div class="card-line"><strong>Updated</strong>: <span>${updateText}</span></div>
|
||||
</div>
|
||||
`;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
function showDegradedState(errorMessage) {
|
||||
lastFetchSucceeded = false;
|
||||
|
||||
// Check if this is a "no data" error
|
||||
if (errorMessage && errorMessage.includes('No validation data')) {
|
||||
showNoDataState();
|
||||
return;
|
||||
}
|
||||
|
||||
// Update status pills to degraded state
|
||||
updateStatusPills('warn', 'GNSS Integrity: Degraded');
|
||||
|
||||
// Mark all update times as stale
|
||||
document.querySelectorAll('.card-line').forEach(line => {
|
||||
if (line.textContent.includes('Updated')) {
|
||||
const span = line.querySelector('span');
|
||||
if (span) span.classList.add('stale-text');
|
||||
}
|
||||
});
|
||||
|
||||
logEvent('crit', errorMessage);
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// MAP UPDATES
|
||||
// =============================================================================
|
||||
|
||||
// Calculate offset for markers to spread them in a circle when close together
|
||||
function calculateMarkerOffsets(sourceCoords, zoomLevel) {
|
||||
if (Object.keys(sourceCoords).length <= 1) {
|
||||
// Single marker, no offset needed
|
||||
const result = {};
|
||||
for (const [name, coord] of Object.entries(sourceCoords)) {
|
||||
result[name] = { lat: coord.lat, lon: coord.lon, offsetLat: 0, offsetLon: 0 };
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Calculate centroid
|
||||
let sumLat = 0, sumLon = 0, count = 0;
|
||||
for (const coord of Object.values(sourceCoords)) {
|
||||
sumLat += coord.lat;
|
||||
sumLon += coord.lon;
|
||||
count++;
|
||||
}
|
||||
const centroidLat = sumLat / count;
|
||||
const centroidLon = sumLon / count;
|
||||
|
||||
// Check if markers are close together (within ~50 meters)
|
||||
const closeThreshold = 0.0005; // ~50m in degrees
|
||||
let maxDist = 0;
|
||||
for (const coord of Object.values(sourceCoords)) {
|
||||
const dist = Math.sqrt(
|
||||
Math.pow(coord.lat - centroidLat, 2) +
|
||||
Math.pow(coord.lon - centroidLon, 2)
|
||||
);
|
||||
maxDist = Math.max(maxDist, dist);
|
||||
}
|
||||
|
||||
// If markers are spread out enough, don't offset
|
||||
if (maxDist > closeThreshold) {
|
||||
const result = {};
|
||||
for (const [name, coord] of Object.entries(sourceCoords)) {
|
||||
result[name] = { lat: coord.lat, lon: coord.lon, offsetLat: 0, offsetLon: 0 };
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Calculate offset radius based on zoom level (smaller offset when zoomed in)
|
||||
// At zoom 15, offset ~30m; at zoom 10, offset ~100m
|
||||
const baseOffset = 0.0003; // ~30m base offset
|
||||
const zoomFactor = Math.pow(2, 15 - Math.min(zoomLevel, 18));
|
||||
const offsetRadius = baseOffset * zoomFactor;
|
||||
|
||||
// Arrange markers in a circle around centroid
|
||||
const result = {};
|
||||
const sourceNames = Object.keys(sourceCoords);
|
||||
const angleStep = (2 * Math.PI) / sourceNames.length;
|
||||
|
||||
sourceNames.forEach((name, index) => {
|
||||
const angle = angleStep * index - Math.PI / 2; // Start from top
|
||||
const offsetLat = offsetRadius * Math.cos(angle);
|
||||
const offsetLon = offsetRadius * Math.sin(angle) * 1.5; // Adjust for latitude distortion
|
||||
|
||||
result[name] = {
|
||||
lat: centroidLat + offsetLat,
|
||||
lon: centroidLon + offsetLon,
|
||||
offsetLat: offsetLat,
|
||||
offsetLon: offsetLon,
|
||||
originalLat: sourceCoords[name].lat,
|
||||
originalLon: sourceCoords[name].lon
|
||||
};
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function updateMap(data) {
|
||||
clearSourceMarkers();
|
||||
|
||||
const sources = data.sources || {};
|
||||
const allCoords = [];
|
||||
const sourceCoords = {};
|
||||
|
||||
// First pass: collect all valid coordinates
|
||||
Object.entries(sources).forEach(([sourceName, sourceData]) => {
|
||||
if (sourceData.coordinates && sourceData.coordinates.latitude && sourceData.coordinates.longitude) {
|
||||
const lat = sourceData.coordinates.latitude;
|
||||
const lon = sourceData.coordinates.longitude;
|
||||
if (sourceConfig[sourceName]) {
|
||||
sourceCoords[sourceName] = { lat, lon };
|
||||
allCoords.push([lat, lon]);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate offsets for overlapping markers
|
||||
const zoomLevel = map.getZoom() || 13;
|
||||
const offsetPositions = calculateMarkerOffsets(sourceCoords, zoomLevel);
|
||||
|
||||
// Second pass: add markers with calculated positions
|
||||
Object.entries(sources).forEach(([sourceName, sourceData]) => {
|
||||
if (sourceData.coordinates && sourceData.coordinates.latitude && sourceData.coordinates.longitude) {
|
||||
const config = sourceConfig[sourceName];
|
||||
const position = offsetPositions[sourceName];
|
||||
|
||||
if (config && position) {
|
||||
// Build popup with original coordinates
|
||||
const origLat = sourceData.coordinates.latitude;
|
||||
const origLon = sourceData.coordinates.longitude;
|
||||
const popupContent = `<b>${config.name}</b><br>Lat: ${origLat.toFixed(6)}<br>Lon: ${origLon.toFixed(6)}`;
|
||||
|
||||
const marker = L.marker([position.lat, position.lon], { icon: config.icon })
|
||||
.bindPopup(popupContent)
|
||||
.addTo(map);
|
||||
sourceMarkers[sourceName] = marker;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Fit map to show all markers (only on initial load or asset change, not on refresh)
|
||||
if (isInitialMapLoad) {
|
||||
if (allCoords.length > 0) {
|
||||
const bounds = L.latLngBounds(allCoords);
|
||||
map.fitBounds(bounds, {
|
||||
padding: [50, 50], // Add padding around markers
|
||||
maxZoom: 15 // Don't zoom in too much when markers are close
|
||||
});
|
||||
} else if (currentData && currentData.map_center && currentData.map_center.latitude && currentData.map_center.longitude) {
|
||||
// Fallback to center if no markers
|
||||
map.setView([currentData.map_center.latitude, currentData.map_center.longitude], 13);
|
||||
}
|
||||
isInitialMapLoad = false; // Don't auto-zoom on subsequent refreshes
|
||||
}
|
||||
}
|
||||
|
||||
function clearSourceMarkers() {
|
||||
Object.values(sourceMarkers).forEach(marker => {
|
||||
map.removeLayer(marker);
|
||||
});
|
||||
sourceMarkers = {};
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// ROUTE VISUALIZATION
|
||||
// =============================================================================
|
||||
|
||||
function renderRoute(routeData) {
|
||||
clearRouteMarkers();
|
||||
|
||||
if (!showRouteEnabled || !routeData || routeData.length === 0) return;
|
||||
|
||||
// Create small circle markers for route points
|
||||
routeData.forEach(point => {
|
||||
let color;
|
||||
let statusText;
|
||||
|
||||
switch (point.status) {
|
||||
case 'valid':
|
||||
color = '#1fad3a';
|
||||
statusText = 'Valid';
|
||||
break;
|
||||
case 'degraded':
|
||||
color = '#ffa726';
|
||||
statusText = 'Degraded';
|
||||
break;
|
||||
case 'alert':
|
||||
color = '#c62828';
|
||||
statusText = 'Alert';
|
||||
break;
|
||||
default:
|
||||
color = '#9aa3b8';
|
||||
statusText = 'Unknown';
|
||||
}
|
||||
|
||||
const marker = L.circleMarker([point.latitude, point.longitude], {
|
||||
radius: 5,
|
||||
fillColor: color,
|
||||
color: color,
|
||||
weight: 1,
|
||||
opacity: 0.8,
|
||||
fillOpacity: 0.6
|
||||
}).addTo(map);
|
||||
|
||||
// Create detailed popup
|
||||
const popupContent = `
|
||||
<div class="route-popup">
|
||||
<div class="popup-header">${formatTimestamp(point.timestamp)}</div>
|
||||
<div class="popup-row"><strong>Status:</strong> <span class="status-${point.status}">${statusText}</span></div>
|
||||
<div class="popup-row"><strong>Lat/Lon:</strong> ${point.latitude.toFixed(6)}, ${point.longitude.toFixed(6)}</div>
|
||||
${point.sources_missing?.length ? `<div class="popup-row"><strong>Missing:</strong> ${point.sources_missing.join(', ')}</div>` : ''}
|
||||
${point.sources_stale?.length ? `<div class="popup-row"><strong>Stale:</strong> ${point.sources_stale.join(', ')}</div>` : ''}
|
||||
${point.max_distance_m > point.threshold_m ? `<div class="popup-row"><strong>Distance:</strong> ${(point.max_distance_m/1000).toFixed(2)} km</div>` : ''}
|
||||
</div>
|
||||
`;
|
||||
|
||||
marker.bindPopup(popupContent);
|
||||
routeMarkers.push(marker);
|
||||
});
|
||||
}
|
||||
|
||||
function clearRouteMarkers() {
|
||||
routeMarkers.forEach(marker => {
|
||||
map.removeLayer(marker);
|
||||
});
|
||||
routeMarkers = [];
|
||||
}
|
||||
|
||||
function toggleRoute() {
|
||||
showRouteEnabled = document.getElementById('showRoute').checked;
|
||||
if (showRouteEnabled) {
|
||||
loadRouteData();
|
||||
} else {
|
||||
clearRouteMarkers();
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// EVENT LOGGING
|
||||
// =============================================================================
|
||||
|
||||
function logEvent(level, message) {
|
||||
const log = document.getElementById('eventLog');
|
||||
const now = new Date();
|
||||
const time = now.toTimeString().slice(0, 8);
|
||||
|
||||
const levelMap = {
|
||||
'info': 'INFO',
|
||||
'warn': 'WARN',
|
||||
'crit': 'CRIT'
|
||||
};
|
||||
|
||||
const event = document.createElement('div');
|
||||
event.className = `event level-${level}`;
|
||||
event.innerHTML = `<span class="level">${levelMap[level]}</span> [${time}] ${message}`;
|
||||
|
||||
// Insert after title
|
||||
const title = log.querySelector('.event-log-title');
|
||||
if (title.nextSibling) {
|
||||
log.insertBefore(event, title.nextSibling);
|
||||
} else {
|
||||
log.appendChild(event);
|
||||
}
|
||||
|
||||
// Keep only 3 events
|
||||
const events = log.querySelectorAll('.event');
|
||||
while (events.length > 3) {
|
||||
const lastEvent = log.querySelector('.event:last-of-type');
|
||||
if (lastEvent) lastEvent.remove();
|
||||
else break;
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// UTILITIES
|
||||
// =============================================================================
|
||||
|
||||
function formatRelativeTime(unixTimestamp) {
|
||||
const now = Date.now() / 1000;
|
||||
const diff = now - unixTimestamp;
|
||||
|
||||
if (diff < 60) return `${Math.floor(diff)}s ago`;
|
||||
if (diff < 3600) return `${Math.floor(diff / 60)}m ago`;
|
||||
if (diff < 86400) return `${Math.floor(diff / 3600)}h ago`;
|
||||
return `${Math.floor(diff / 86400)}d ago`;
|
||||
}
|
||||
|
||||
function formatTimestamp(isoString) {
|
||||
const date = new Date(isoString);
|
||||
return date.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
hour12: false
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// AUTHENTICATION
|
||||
// =============================================================================
|
||||
|
||||
async function logout() {
|
||||
try {
|
||||
await fetch('/logout', { method: 'POST' });
|
||||
window.location.href = '/login';
|
||||
} catch (error) {
|
||||
console.error('Logout error:', error);
|
||||
window.location.href = '/login';
|
||||
}
|
||||
}
|
||||
|
||||
1023
backup-from-device/gnss-guard/tm-gnss-guard/server/static/style.css
Normal file
1023
backup-from-device/gnss-guard/tm-gnss-guard/server/static/style.css
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,160 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>TM GNSS Guard Cloud</title>
|
||||
|
||||
<!-- Leaflet CSS -->
|
||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css" />
|
||||
|
||||
<link rel="stylesheet" href="/static/style.css?v={{ cache_buster }}">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<!-- HEADER -->
|
||||
<div class="header">
|
||||
<div class="header-left">
|
||||
<div class="header-title">TM GNSS Guard</div>
|
||||
<div class="header-sub">Multi-Asset Monitoring Cloud</div>
|
||||
</div>
|
||||
<div class="header-right">
|
||||
<div class="user-menu">
|
||||
<span class="user-name">{{ username }}</span>
|
||||
<button class="logout-btn" onclick="logout()">Logout</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- ALERT BANNER (dynamic) -->
|
||||
<div class="alert-banner alert-critical hidden" id="alertBanner">
|
||||
<div class="alert-indicator" id="alertIndicator"></div>
|
||||
<div id="alertText">GPS Jamming or Spoofing Alert! Location Distance: <span id="alert-distance-value">-</span></div>
|
||||
</div>
|
||||
|
||||
<!-- MOBILE ASSET DROPDOWN -->
|
||||
<div class="mobile-asset-dropdown" id="mobileAssetDropdown">
|
||||
<select id="mobileAssetSelect" onchange="selectAsset(this.value)">
|
||||
<option value="">Select Asset...</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<!-- MOBILE TIME SELECTOR -->
|
||||
<div class="mobile-time-selector" id="mobileTimeSelector">
|
||||
<div class="time-radio-group">
|
||||
<label class="time-radio">
|
||||
<input type="radio" name="timeModeM" value="now" checked onchange="setTimeMode('now')">
|
||||
<span>Now</span>
|
||||
</label>
|
||||
<label class="time-radio">
|
||||
<input type="radio" name="timeModeM" value="select" onchange="setTimeMode('select')">
|
||||
<span>Select Day/Time</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="datetime-picker hidden" id="mobileDatetimePicker">
|
||||
<input type="datetime-local" id="mobileSelectedDatetime" onchange="onDatetimeChange()">
|
||||
<button class="apply-time-btn" onclick="applySelectedTime()">Apply</button>
|
||||
</div>
|
||||
<div class="selected-time-display hidden" id="mobileSelectedTimeDisplay">
|
||||
Viewing: <span id="mobileSelectedTimeText"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- MOBILE GNSS STATUS (visible only in mobile view) -->
|
||||
<div class="mobile-gnss-status" id="mobileGnssStatus">
|
||||
<div class="status-pill" id="mobileStatusPill">GNSS Integrity: —</div>
|
||||
</div>
|
||||
|
||||
<!-- MOBILE TAB BAR (only visible in portrait mode) -->
|
||||
<div class="mobile-tabs">
|
||||
<button class="tab-btn active" data-tab="status">Status</button>
|
||||
<button class="tab-btn" data-tab="map">Map</button>
|
||||
</div>
|
||||
|
||||
<!-- MAIN LAYOUT -->
|
||||
<div class="layout">
|
||||
<!-- ASSET PANEL (desktop only) -->
|
||||
<div class="asset-panel" id="assetPanel">
|
||||
<div class="panel-title">Assets</div>
|
||||
<div class="asset-list" id="assetList">
|
||||
<!-- Assets populated by JavaScript -->
|
||||
<div class="asset-loading">Loading assets...</div>
|
||||
</div>
|
||||
|
||||
<!-- TIME SELECTOR -->
|
||||
<div class="time-selector" id="timeSelector">
|
||||
<div class="panel-title">Time</div>
|
||||
<div class="time-radio-group">
|
||||
<label class="time-radio">
|
||||
<input type="radio" name="timeMode" value="now" checked onchange="setTimeMode('now')">
|
||||
<span>Now</span>
|
||||
</label>
|
||||
<label class="time-radio">
|
||||
<input type="radio" name="timeMode" value="select" onchange="setTimeMode('select')">
|
||||
<span>Select Day/Time</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="datetime-picker hidden" id="datetimePicker">
|
||||
<input type="datetime-local" id="selectedDatetime" onchange="onDatetimeChange()">
|
||||
<button class="apply-time-btn" onclick="applySelectedTime()">Apply</button>
|
||||
</div>
|
||||
<div class="selected-time-display hidden" id="selectedTimeDisplay">
|
||||
Viewing: <span id="selectedTimeText"></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- STATUS TAB CONTENT (Sources + Event Log) -->
|
||||
<div class="tab-content tab-status active" id="tab-status">
|
||||
<div class="left-panel">
|
||||
<!-- DESKTOP GNSS STATUS (visible only in desktop view) -->
|
||||
<div class="desktop-gnss-status" id="desktopGnssStatus">
|
||||
<div class="status-pill" id="desktopStatusPill">GNSS Integrity: —</div>
|
||||
</div>
|
||||
<div class="panel-title">GNSS Sources</div>
|
||||
<div id="sourcesContainer">
|
||||
<div class="no-asset-selected">Select an asset to view GNSS sources</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- EVENT LOG -->
|
||||
<div class="event-log" id="eventLog">
|
||||
<div class="event-log-title">Event Stream</div>
|
||||
</div>
|
||||
|
||||
<!-- COPYRIGHT -->
|
||||
<div class="copyright">Tototheo Global © 2025</div>
|
||||
</div>
|
||||
|
||||
<!-- MAP TAB CONTENT -->
|
||||
<div class="tab-content tab-map" id="tab-map">
|
||||
<div class="map-panel">
|
||||
<div id="map"></div>
|
||||
<div class="map-overlay-legend">
|
||||
<div class="legend-section">Sources</div>
|
||||
<div><span class="legend-dot legend-primary"></span>Primary GPS</div>
|
||||
<div><span class="legend-dot legend-secondary"></span>Secondary GPS</div>
|
||||
<div><span class="legend-dot legend-ais"></span>TM AIS GPS</div>
|
||||
<div><span class="legend-dot legend-starlink-gps"></span>Starlink GPS</div>
|
||||
<div><span class="legend-dot legend-starlink-location"></span>Starlink Location</div>
|
||||
<div class="legend-section">72h Route</div>
|
||||
<div><span class="legend-dot legend-valid"></span>Valid</div>
|
||||
<div><span class="legend-dot legend-degraded"></span>Degraded</div>
|
||||
<div><span class="legend-dot legend-alert"></span>Alert</div>
|
||||
</div>
|
||||
<div class="map-route-toggle">
|
||||
<label>
|
||||
<input type="checkbox" id="showRoute" checked onchange="toggleRoute()">
|
||||
Show 72h Route
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Leaflet JS -->
|
||||
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
|
||||
<script src="/static/app.js?v={{ cache_buster }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Login - GNSS Guard Cloud</title>
|
||||
<link rel="stylesheet" href="/static/style.css?v={{ cache_buster }}">
|
||||
</head>
|
||||
<body class="login-page">
|
||||
|
||||
<div class="login-container">
|
||||
<div class="login-box">
|
||||
<div class="login-header">
|
||||
<div class="login-title">TM GNSS Guard</div>
|
||||
<div class="login-subtitle">Cloud Dashboard</div>
|
||||
</div>
|
||||
|
||||
<form id="loginForm" class="login-form">
|
||||
<div class="form-group">
|
||||
<label for="username">Username</label>
|
||||
<input type="text" id="username" name="username" required autocomplete="username">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="password">Password</label>
|
||||
<input type="password" id="password" name="password" required autocomplete="current-password">
|
||||
</div>
|
||||
|
||||
<div class="form-error hidden" id="loginError">Invalid credentials</div>
|
||||
|
||||
<button type="submit" class="login-btn">Sign In</button>
|
||||
</form>
|
||||
|
||||
<div class="login-footer">
|
||||
Tototheo Global © 2025
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.getElementById('loginForm').addEventListener('submit', async (e) => {
|
||||
e.preventDefault();
|
||||
|
||||
const username = document.getElementById('username').value;
|
||||
const password = document.getElementById('password').value;
|
||||
const errorEl = document.getElementById('loginError');
|
||||
|
||||
errorEl.classList.add('hidden');
|
||||
|
||||
try {
|
||||
const response = await fetch('/login', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ username, password })
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
window.location.href = '/';
|
||||
} else {
|
||||
errorEl.classList.remove('hidden');
|
||||
errorEl.textContent = 'Invalid username or password';
|
||||
}
|
||||
} catch (error) {
|
||||
errorEl.classList.remove('hidden');
|
||||
errorEl.textContent = 'Connection error. Please try again.';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
Reference in New Issue
Block a user