Refactor golden image handling in backup upload process</message>

<message>Update the _set_golden_from_path function to improve the handling of existing golden image files. Replace the existing unlink logic with a more robust method that safely removes files or broken symlinks using the missing_ok parameter. This change enhances the reliability of the backup upload process by ensuring that stale references are properly cleared before setting a new golden image path.
This commit is contained in:
nearxos
2026-02-24 00:19:40 +02:00
parent df180120aa
commit 808fbf5c7c
136 changed files with 407837 additions and 2 deletions

View File

@@ -0,0 +1,225 @@
#!/usr/bin/env python3
"""
Asset management service for GNSS Guard Server
"""
import json
import logging
from datetime import datetime, timedelta
from typing import List, Optional, Dict, Any
from sqlalchemy.orm import Session
from sqlalchemy import desc, func
from models import Asset, ValidationHistory, AssetNotificationState
logger = logging.getLogger("gnss_guard.server.asset_service")
class AssetService:
"""Service for asset-related operations"""
def __init__(self, db: Session):
self.db = db
def get_all_assets(self, include_inactive: bool = False) -> List[Asset]:
"""Get all assets"""
query = self.db.query(Asset)
if not include_inactive:
query = query.filter(Asset.is_active == True)
return query.all()
def get_asset_by_name(self, name: str) -> Optional[Asset]:
"""Get asset by name"""
return self.db.query(Asset).filter(Asset.name == name).first()
def get_asset_by_token(self, token: str) -> Optional[Asset]:
"""Get active asset by token"""
token_hash = Asset.hash_token(token)
return self.db.query(Asset).filter(
Asset.token_hash == token_hash,
Asset.is_active == True
).first()
def get_latest_validation(self, asset_id: int) -> Optional[ValidationHistory]:
"""Get the latest validation record for an asset"""
return self.db.query(ValidationHistory).filter(
ValidationHistory.asset_id == asset_id
).order_by(desc(ValidationHistory.validation_timestamp_unix)).first()
def get_validation_at_timestamp(
self,
asset_id: int,
target_timestamp: float
) -> Optional[ValidationHistory]:
"""
Get the validation record closest to (but not after) the specified timestamp.
This is useful for viewing historical data at a specific point in time.
"""
return self.db.query(ValidationHistory).filter(
ValidationHistory.asset_id == asset_id,
ValidationHistory.validation_timestamp_unix <= target_timestamp
).order_by(desc(ValidationHistory.validation_timestamp_unix)).first()
def get_validation_history(
self,
asset_id: int,
hours: int = 72,
limit: Optional[int] = None
) -> List[ValidationHistory]:
"""Get validation history for an asset"""
cutoff = datetime.utcnow() - timedelta(hours=hours)
cutoff_unix = cutoff.timestamp()
query = self.db.query(ValidationHistory).filter(
ValidationHistory.asset_id == asset_id,
ValidationHistory.validation_timestamp_unix >= cutoff_unix
).order_by(desc(ValidationHistory.validation_timestamp_unix))
if limit:
query = query.limit(limit)
return query.all()
def get_all_assets_status(self) -> List[Dict[str, Any]]:
"""Get status summary for all active assets"""
assets = self.get_all_assets()
statuses = []
for asset in assets:
latest = self.get_latest_validation(asset.id)
# Get online status from notification state (consistent with Telegram alerts)
notification_state = self.db.query(AssetNotificationState).filter(
AssetNotificationState.asset_id == asset.id
).first()
is_online = notification_state.is_online if notification_state else False
last_seen = notification_state.last_validation_at if notification_state else None
# Fall back to validation timestamp if no notification state
if not last_seen and latest and latest.received_at:
last_seen = latest.received_at
is_valid = None
has_distance_alert = False # True if distance threshold exceeded
if latest:
is_valid = latest.is_valid
# Check if there's a distance alert (AT RISK vs DEGRADED)
if not is_valid:
validation_details = json.loads(latest.validation_details or "{}")
coordinate_differences = json.loads(latest.coordinate_differences or "{}")
threshold = validation_details.get("threshold_meters", 200)
max_distance = validation_details.get("max_distance_meters", 0)
# Also check coordinate_differences for max distance
if not max_distance and coordinate_differences:
for diff_data in coordinate_differences.values():
if isinstance(diff_data, dict):
dist = diff_data.get("distance_meters", 0)
if dist > max_distance:
max_distance = dist
has_distance_alert = max_distance > threshold
statuses.append({
"name": asset.name,
"is_online": is_online,
"is_valid": is_valid,
"has_distance_alert": has_distance_alert,
"last_seen": last_seen.isoformat() if last_seen else None,
"description": asset.description
})
return statuses
def get_route_data(
self,
asset_id: int,
hours: int = 72,
until_timestamp: Optional[float] = None
) -> List[Dict[str, Any]]:
"""
Get route data for map visualization.
Returns list of points with coordinates and validation status.
Args:
asset_id: The asset ID
hours: Number of hours of history to retrieve
until_timestamp: Optional Unix timestamp to show route up to this time.
If provided, returns `hours` of history ending at this timestamp.
"""
if until_timestamp is not None:
# Get history ending at the specified timestamp
cutoff_unix = until_timestamp - (hours * 3600)
validations = self.db.query(ValidationHistory).filter(
ValidationHistory.asset_id == asset_id,
ValidationHistory.validation_timestamp_unix >= cutoff_unix,
ValidationHistory.validation_timestamp_unix <= until_timestamp
).order_by(desc(ValidationHistory.validation_timestamp_unix)).all()
else:
validations = self.get_validation_history(asset_id, hours)
route_points = []
for v in validations:
source_coordinates = json.loads(v.source_coordinates or "{}")
# Get primary coordinate (prefer nmea_primary, then tm_ais, then any)
coord = None
for source in ["nmea_primary", "tm_ais", "starlink_location"]:
if source in source_coordinates:
coord = source_coordinates[source]
break
if not coord and source_coordinates:
# Use first available
coord = list(source_coordinates.values())[0]
if coord and coord.get("latitude") and coord.get("longitude"):
# Determine status color
sources_missing = json.loads(v.sources_missing or "[]")
sources_stale = json.loads(v.sources_stale or "[]")
validation_details = json.loads(v.validation_details or "{}")
threshold = validation_details.get("threshold_meters", 200)
max_distance = validation_details.get("max_distance_meters", 0)
if not v.is_valid and max_distance > threshold:
status = "alert" # Red - distance exceeded
elif sources_missing or sources_stale:
status = "degraded" # Orange - missing/stale
else:
status = "valid" # Green - all OK
route_points.append({
"id": v.id,
"timestamp": v.validation_timestamp,
"timestamp_unix": v.validation_timestamp_unix,
"latitude": coord["latitude"],
"longitude": coord["longitude"],
"status": status,
"is_valid": v.is_valid,
"sources_missing": sources_missing,
"sources_stale": sources_stale,
"max_distance_m": max_distance,
"threshold_m": threshold
})
return route_points
def cleanup_old_validations(self, days: int = 90) -> int:
"""Remove validation records older than specified days"""
cutoff = datetime.utcnow() - timedelta(days=days)
cutoff_unix = cutoff.timestamp()
deleted = self.db.query(ValidationHistory).filter(
ValidationHistory.validation_timestamp_unix < cutoff_unix
).delete()
self.db.commit()
logger.info(f"Cleaned up {deleted} old validation records")
return deleted