Refactor golden image handling in backup upload process</message>

<message>Update the _set_golden_from_path function to improve the handling of existing golden image files. Replace the existing unlink logic with a more robust method that safely removes files or broken symlinks using the missing_ok parameter. This change enhances the reliability of the backup upload process by ensuring that stale references are properly cleared before setting a new golden image path.
This commit is contained in:
nearxos
2026-02-24 00:19:40 +02:00
parent df180120aa
commit 808fbf5c7c
136 changed files with 407837 additions and 2 deletions

View File

@@ -0,0 +1,4 @@
"""
Validation modules for GNSS Guard
"""

View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python3
"""
Coordinate validation logic for GNSS Guard
Validates coordinates across multiple sources
"""
import logging
from datetime import datetime, timezone
from typing import Dict, Any, List, Optional
from utils.distance import haversine_distance
logger = logging.getLogger("gnss_guard.validation")
class CoordinateValidator:
"""Validator for GPS coordinates across multiple sources"""
def __init__(self, threshold_meters: float, stale_threshold_seconds: int, expected_sources: List[str]):
"""
Initialize coordinate validator
Args:
threshold_meters: Maximum allowed distance difference in meters
stale_threshold_seconds: Threshold in seconds after which data is considered stale
expected_sources: List of expected source names
"""
self.threshold_meters = threshold_meters
self.stale_threshold_seconds = stale_threshold_seconds
self.expected_sources = expected_sources
def validate_positions(self, positions: Dict[str, Dict[str, Any]]) -> Dict[str, Any]:
"""
Validate positions from multiple sources
Args:
positions: Dictionary mapping source names to position dictionaries
Returns:
Validation result dictionary
"""
validation_timestamp = datetime.now(timezone.utc)
# Check for missing sources
missing_sources = [src for src in self.expected_sources if src not in positions]
# Check for stale timestamps
stale_sources = []
current_time = validation_timestamp.timestamp()
for source, position in positions.items():
timestamp_unix = position.get("timestamp_unix")
if timestamp_unix:
age_seconds = current_time - timestamp_unix
if age_seconds > self.stale_threshold_seconds:
stale_sources.append(source)
# Calculate coordinate differences
coordinate_differences = {}
source_coordinates = {}
null_island_sources = [] # Sources reporting exactly (0, 0) - "Null Island"
# Extract coordinates for all sources
for source, position in positions.items():
lat = position.get("latitude")
lon = position.get("longitude")
if lat is not None and lon is not None:
# Filter out "Null Island" coordinates (0, 0) - indicates no valid GPS fix
if lat == 0.0 and lon == 0.0:
logger.warning(f"Source {source} reported (0, 0) coordinates - treating as missing/invalid")
null_island_sources.append(source)
continue
source_coordinates[source] = {
"latitude": lat,
"longitude": lon,
"altitude": position.get("altitude"),
"position_uncertainty_m": position.get("position_uncertainty_m"),
"timestamp": position.get("timestamp"),
"timestamp_unix": position.get("timestamp_unix"),
}
# Add null island sources to missing sources list (they're effectively missing)
missing_sources.extend(null_island_sources)
# Calculate pairwise distances
sources_with_coords = list(source_coordinates.keys())
for i, source1 in enumerate(sources_with_coords):
for source2 in sources_with_coords[i + 1:]:
coord1 = source_coordinates[source1]
coord2 = source_coordinates[source2]
distance = haversine_distance(
coord1["latitude"],
coord1["longitude"],
coord2["latitude"],
coord2["longitude"]
)
if distance is not None:
pair_key = f"{source1}_{source2}"
coordinate_differences[pair_key] = {
"distance_meters": distance,
"source1": source1,
"source2": source2,
}
# Determine validity
is_valid = True
# Invalid if any source is missing
if missing_sources:
is_valid = False
# Invalid if any source is stale
if stale_sources:
is_valid = False
# Invalid if any coordinate pair differs by more than threshold
for pair_key, diff_data in coordinate_differences.items():
if diff_data["distance_meters"] > self.threshold_meters:
is_valid = False
# Build position uncertainty summary
position_uncertainties = {}
for source, position in positions.items():
uncertainty = position.get("position_uncertainty_m")
if uncertainty is not None:
position_uncertainties[source] = uncertainty
# Build validation details
validation_details = {
"threshold_meters": self.threshold_meters,
"stale_threshold_seconds": self.stale_threshold_seconds,
"expected_sources": self.expected_sources,
"sources_found": list(positions.keys()),
"sources_with_coordinates": sources_with_coords,
"sources_null_island": null_island_sources, # Sources reporting (0,0)
"max_distance_meters": max(
[diff["distance_meters"] for diff in coordinate_differences.values()],
default=0.0
),
"position_uncertainties": position_uncertainties,
}
return {
"validation_timestamp": validation_timestamp.isoformat(),
"validation_timestamp_unix": validation_timestamp.timestamp(),
"is_valid": is_valid,
"sources_missing": missing_sources,
"sources_stale": stale_sources,
"coordinate_differences": coordinate_differences,
"source_coordinates": source_coordinates,
"validation_details": validation_details,
}