<message>Update the _set_golden_from_path function to improve the handling of existing golden image files. Replace the existing unlink logic with a more robust method that safely removes files or broken symlinks using the missing_ok parameter. This change enhances the reliability of the backup upload process by ensuring that stale references are properly cleared before setting a new golden image path.
157 lines
5.4 KiB
Python
157 lines
5.4 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Structured JSON logging for GNSS Guard
|
|
Logs to date-based folders with daily rotation and cleanup
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
import time
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from typing import Dict, Any, Optional
|
|
|
|
logger = logging.getLogger("gnss_guard.logger")
|
|
|
|
|
|
class StructuredLogger:
|
|
"""Structured JSON logger with date-based folders"""
|
|
|
|
def __init__(self, logs_base_path: Path, retention_days: int = 14):
|
|
"""
|
|
Initialize structured logger
|
|
|
|
Args:
|
|
logs_base_path: Base path for logs directory
|
|
retention_days: Number of days to retain logs
|
|
"""
|
|
self.logs_base_path = Path(logs_base_path)
|
|
self.retention_days = retention_days
|
|
self.current_log_file: Optional[Path] = None
|
|
self.current_date: Optional[str] = None
|
|
self.log_file_handle = None
|
|
self._closed = False
|
|
|
|
def _get_log_path(self, date: datetime) -> Path:
|
|
"""Get log file path for a given date"""
|
|
year = date.strftime("%Y")
|
|
month = date.strftime("%m")
|
|
day = date.strftime("%d")
|
|
date_str = date.strftime("%Y-%m-%d")
|
|
|
|
log_dir = self.logs_base_path / year / month / day
|
|
log_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
return log_dir / f"app_{date_str}.json"
|
|
|
|
def _ensure_log_file(self):
|
|
"""Ensure log file is open for current date"""
|
|
today = datetime.now()
|
|
today_str = today.strftime("%Y-%m-%d")
|
|
|
|
if self.current_date != today_str or self.current_log_file is None:
|
|
# Close previous file if open
|
|
if self.log_file_handle:
|
|
self.log_file_handle.close()
|
|
self.log_file_handle = None
|
|
|
|
# Cleanup old logs
|
|
self._cleanup_old_logs()
|
|
|
|
# Open new log file
|
|
self.current_log_file = self._get_log_path(today)
|
|
self.current_date = today_str
|
|
|
|
# Open file in append mode
|
|
self.log_file_handle = open(self.current_log_file, "a")
|
|
logger.info(f"Opened log file: {self.current_log_file}")
|
|
|
|
def _cleanup_old_logs(self):
|
|
"""Delete log files older than retention_days"""
|
|
try:
|
|
cutoff_date = datetime.now() - timedelta(days=self.retention_days)
|
|
cutoff_timestamp = cutoff_date.timestamp()
|
|
|
|
deleted_count = 0
|
|
|
|
# Walk through all log directories
|
|
if self.logs_base_path.exists():
|
|
for log_file in self.logs_base_path.rglob("app_*.json"):
|
|
try:
|
|
if log_file.stat().st_mtime < cutoff_timestamp:
|
|
log_file.unlink()
|
|
deleted_count += 1
|
|
except Exception as e:
|
|
logger.debug(f"Failed to delete old log file {log_file}: {e}")
|
|
|
|
if deleted_count > 0:
|
|
logger.info(f"Cleaned up {deleted_count} old log file(s) (> {self.retention_days} days)")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error during log cleanup: {e}")
|
|
|
|
def log(self, level: str, source: str, message: str, data: Optional[Dict[str, Any]] = None):
|
|
"""
|
|
Write structured log entry
|
|
|
|
Args:
|
|
level: Log level (INFO, WARNING, ERROR, DEBUG)
|
|
source: Source identifier
|
|
message: Log message
|
|
data: Optional additional data dictionary
|
|
"""
|
|
try:
|
|
# Don't write if logger is explicitly closed
|
|
if self._closed:
|
|
return
|
|
|
|
# Ensure log file is open
|
|
self._ensure_log_file()
|
|
|
|
# Check if file handle is still None (shouldn't happen, but be safe)
|
|
if self.log_file_handle is None:
|
|
logger.warning(f"Cannot write log entry: logger file handle is None")
|
|
return
|
|
|
|
log_entry = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"level": level,
|
|
"source": source,
|
|
"message": message,
|
|
}
|
|
|
|
if data:
|
|
log_entry["data"] = data
|
|
|
|
# Write as JSON line
|
|
json_line = json.dumps(log_entry, separators=(",", ":"))
|
|
self.log_file_handle.write(json_line + "\n")
|
|
self.log_file_handle.flush()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to write log entry: {e}")
|
|
|
|
def info(self, source: str, message: str, data: Optional[Dict[str, Any]] = None):
|
|
"""Log info message"""
|
|
self.log("INFO", source, message, data)
|
|
|
|
def warning(self, source: str, message: str, data: Optional[Dict[str, Any]] = None):
|
|
"""Log warning message"""
|
|
self.log("WARNING", source, message, data)
|
|
|
|
def error(self, source: str, message: str, data: Optional[Dict[str, Any]] = None):
|
|
"""Log error message"""
|
|
self.log("ERROR", source, message, data)
|
|
|
|
def debug(self, source: str, message: str, data: Optional[Dict[str, Any]] = None):
|
|
"""Log debug message"""
|
|
self.log("DEBUG", source, message, data)
|
|
|
|
def close(self):
|
|
"""Close log file handle"""
|
|
self._closed = True
|
|
if self.log_file_handle:
|
|
self.log_file_handle.close()
|
|
self.log_file_handle = None
|
|
|